├── .github ├── CODEOWNERS ├── ISSUE_TEMPLATE │ ├── bug_report.md │ └── feature_request.md ├── dependabot.yml ├── dictionary.txt ├── queries │ ├── asssociated-pr.query.yml │ └── pr-labels.query.yml └── workflows │ ├── changelog.yml │ ├── check-links.yml │ ├── create-jira-issue.yml │ ├── deploy_to_github_io.yml │ ├── labeled-pr.yml │ ├── release-template-comment.yml │ ├── release.yml │ ├── spellcheck.yml │ ├── static-analysis.yml │ ├── tag-release.yml │ ├── test-build.yml │ ├── update_asf_tools_version.yml │ └── update_sdk_version.yml ├── .gitignore ├── .gitleaks.toml ├── .lycheeignore ├── CHANGELOG.md ├── CITATION.cff ├── LICENSE ├── README.md ├── docs ├── CNAME ├── ViewerJS │ ├── compatibility.js │ ├── example.local.css │ ├── images │ │ ├── kogmbh.png │ │ ├── nlnet.png │ │ ├── texture.png │ │ ├── toolbarButton-download.png │ │ ├── toolbarButton-fullscreen.png │ │ ├── toolbarButton-menuArrows.png │ │ ├── toolbarButton-pageDown.png │ │ ├── toolbarButton-pageUp.png │ │ ├── toolbarButton-presentation.png │ │ ├── toolbarButton-zoomIn.png │ │ └── toolbarButton-zoomOut.png │ ├── index.html │ ├── pdf.js │ ├── pdf.worker.js │ ├── pdfjsversion.js │ ├── text_layer_builder.js │ ├── ui_utils.js │ └── webodf.js ├── citing-snippet.md ├── contact-snippet.md ├── contact.md ├── contributing.md ├── dems.md ├── guides │ ├── Sentinel_RTC_ATBD_v3.1.pdf │ ├── Sentinel_RTC_Product_Guide.pdf │ │ └── index.html │ ├── burst_insar_product_guide.md │ ├── gunw_product_guide.md │ ├── insar_product_guide.md │ ├── insar_product_guide_template.md │ ├── introduction_to_sar.md │ ├── opera_rtc_product_guide.md │ ├── rtc_atbd.md │ └── rtc_product_guide.md ├── how_it_works.md ├── images │ ├── HyP3-graphic-only.png │ ├── SAR_band_types.png │ ├── api-401-unauthorized.png │ ├── asf_burst_insar_names.png │ ├── asf_gunw_names.png │ ├── asf_insar_names.png │ ├── baseline_asf.png │ ├── burst-contiguity.png │ ├── cop-coverage-map.png │ ├── cop-missing-100.png │ ├── dem-coverage-map.png │ ├── favicon.ico │ ├── frame_granule_overlap.png │ ├── get_jobs_query.png │ ├── get_user_execute.png │ ├── get_user_try.png │ ├── insar-tutorial.png │ ├── landsat-false-color-composite.jpg │ ├── log-difference-raster.png │ ├── microwave-emr.png │ ├── opera-browse-download.png │ ├── opera-mask-compare.png │ ├── opera-rtc-static-layer-coverage.png │ ├── opera-rtc-vertex-results.png │ ├── opera-rtc-vertex-search.png │ ├── opera-rtc-vertex-static-id.png │ ├── opera-rtc-vertex-static.png │ ├── orbit_in_name.png │ ├── phase_diff.png │ ├── pixel-spacing-compare.png │ ├── polarizations_ASF_dashed.png │ ├── post_jobs_execute.png │ ├── rtc-tutorial.png │ ├── s1b_hole_alaska.png │ ├── sar-optical-fusion.jpg │ ├── sar_distortions.png │ ├── scattering_types.png │ ├── seasonal-change-example.jpg │ ├── sentinel-1-rtc-image.jpg │ ├── slc_jitter.png │ ├── three_rader_backscatter_convention.jpg │ ├── vertex-GUNW-dataset-selection.png │ ├── vertex-dataset-selection.png │ ├── vertex-sign-in.png │ ├── vertex.png │ ├── water-histogram.png │ ├── water-mask.png │ ├── watermask-tutorial.png │ └── wavelength_vs_roughness.png ├── index.md ├── javascripts │ └── mathjax.js ├── plugins.md ├── products.md ├── sentinel1.md ├── tools │ ├── arcgis_toolbox.md │ ├── asf_tools.md │ └── asf_tools_api.md ├── tutorials.md ├── tutorials │ ├── hyp3_insar_stack_for_ts_analysis.ipynb │ ├── hyp3_isce2_burst_merge.ipynb │ ├── hyp3_isce2_burst_stack_for_ts_analysis.ipynb │ ├── new-insar-jobs.ipynb │ ├── new-rtc-jobs.ipynb │ └── process-new-granules-for-search-parameters.md ├── usage_guidelines.md ├── using-snippet.md ├── using.md ├── using │ ├── api.md │ ├── credits.md │ ├── requesting_access.md │ ├── sdk.md │ ├── sdk_api.md │ ├── subscriptions.md │ └── vertex.md ├── v2-transition.md ├── water_masking.md └── whats_new.md ├── environment.yml ├── macros.py ├── mkdocs.yml ├── overrides └── main.html └── requirements.txt /.github/CODEOWNERS: -------------------------------------------------------------------------------- 1 | # These owners will be requested for review when someone opens a pull request. 2 | * @ASFHyP3/Tools @ASFHyP3/SciDev @ASFHyP3/Services 3 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Bug report 3 | about: Create a report to help us improve 4 | title: '' 5 | labels: bug 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Describe the bug** 11 | A clear and concise description of what the bug is. 12 | 13 | **To Reproduce** 14 | Steps to reproduce the behavior: 15 | 1. Go to '...' 16 | 2. Click on '....' 17 | 3. Scroll down to '....' 18 | 4. See error 19 | 20 | **Expected behavior** 21 | A clear and concise description of what you expected to happen. 22 | 23 | 24 | **Additional context** 25 | Add any other context about the problem here. 26 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.md: -------------------------------------------------------------------------------- 1 | --- 2 | name: Feature request 3 | about: Suggest an idea for this project 4 | title: '' 5 | labels: enhancement 6 | assignees: '' 7 | 8 | --- 9 | 10 | **Is your feature request related to a problem? Please describe.** 11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...] 12 | 13 | **Describe the solution you'd like** 14 | A clear and concise description of what you want to happen. 15 | 16 | **Describe alternatives you've considered** 17 | A clear and concise description of any alternative solutions or features you've considered. 18 | 19 | **Additional context** 20 | Add any other context or screenshots about the feature request here. 21 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # To get started with Dependabot version updates, you'll need to specify which 2 | # package ecosystems to update and where the package manifests are located. 3 | # Please see the documentation for all configuration options: 4 | # https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file 5 | 6 | version: 2 7 | updates: 8 | - package-ecosystem: pip 9 | directory: / 10 | schedule: 11 | interval: weekly 12 | labels: 13 | - bumpless 14 | - package-ecosystem: github-actions 15 | directory: / 16 | schedule: 17 | interval: weekly 18 | labels: 19 | - bumpless 20 | -------------------------------------------------------------------------------- /.github/dictionary.txt: -------------------------------------------------------------------------------- 1 | -0 2 | .github 3 | .kmz 4 | .lycheeignore 5 | .md-button 6 | .png 7 | .png.aux.xml 8 | .png.xml 9 | .tif.xml 10 | .txt 11 | .xml 12 | 5x1 13 | 5x1-look 14 | 10x 15 | 10x2 16 | 10x2-look 17 | 20250418T141628Z 18 | 20250419T010229Z 19 | 20x4 20 | 20x4-look 21 | 160m 22 | 80m 23 | 40m 24 | 30m 25 | 20m 26 | 10m 27 | 2π 28 | 4π 29 | 5x1 30 | 7x7 31 | 2D 32 | 2D/3D 33 | 3D 34 | a- 35 | aaaaaaaa 36 | acknowledgement 37 | adaptively 38 | adf 39 | al. 40 | ANF 41 | antimeridian 42 | AOI 43 | APD 44 | APIs 45 | ArcCatalog 46 | ArcGIS 47 | ArcGIS-Compatible 48 | ArcGIS-compatible 49 | ArcGIS-compliant 50 | ArcGrid 51 | ArcMap 52 | area.tif 53 | ARIA-S1-GUNW 54 | ARIA-S1-GUNWs 55 | ARIA_S1_GUNW 56 | ASF 57 | ASF's 58 | ASF-specific 59 | asf-tools 60 | asf_search 61 | asf_tools 62 | ASFHyP3 63 | AutoRIFT 64 | autoRIFT 65 | azimuthAngle 66 | Aγ 67 | Aσ 68 | Backscatter 69 | backscatter 70 | bandpass 71 | base_name 72 | base_name_mb 73 | basename 74 | bbbbbb 75 | Bekaert 76 | beta0 77 | burst2safe 78 | BurstID 79 | Buzzanga 80 | cccc 81 | Changelog 82 | Clauss 83 | CLI 84 | CloudFormation 85 | CNAME 86 | conda 87 | Conda 88 | conda-forge 89 | connectedComponents 90 | CONUS 91 | cookiecutter 92 | copernicus 93 | coregister 94 | coregistered 95 | Coregisters 96 | coregisters 97 | Coregistration 98 | coregistration 99 | cross-polarizations 100 | customizable 101 | DAAC 102 | date1 103 | date2 104 | Decompositions 105 | Decorrelation 106 | decorrelation 107 | DEM 108 | DEM's 109 | dem.tif 110 | dem_matching 111 | dem_name 112 | DEMs 113 | differenced 114 | DockerizedTopsApp 115 | docstring 116 | DOI 117 | DOIs 118 | Downloader 119 | downsampled 120 | DV 121 | DynamoDB 122 | E053_1 123 | E054_1 124 | Earthdata 125 | EarthScope 126 | EDL 127 | EGM2008 128 | EGM96 129 | electro-magnetic 130 | endblock 131 | entrypoint 132 | EPSG:4326 133 | ESA 134 | ESA's 135 | ESD 136 | ESRI 137 | et 138 | EW 139 | existingproducts 140 | FD6A 141 | framingtip 142 | front-end 143 | full-fledged 144 | Galápagos 145 | gamma-nought 146 | gamma0 147 | Geocode 148 | Geocoded 149 | geocoded 150 | Geocoding 151 | geocoding 152 | geoid 153 | geoid_adjust 154 | geojson 155 | geolocated 156 | Geolocation 157 | geolocation 158 | Geolocations 159 | geolocations 160 | geoprocessing 161 | georeferenced 162 | georeferencing 163 | GEOSCIENCE 164 | Geoscience 165 | geoscience 166 | Geospatial 167 | geospatial 168 | GeoTIFF 169 | GeoTIFF's 170 | GeoTIFFs 171 | GIS 172 | GitOps 173 | Gitter 174 | GL1 175 | GLO-30 176 | GLO-90 177 | GNSS 178 | Goldstein 179 | Goldstein-Werner 180 | Grayscale 181 | grayscale 182 | GRD 183 | GRD-H 184 | grey 185 | Gruber 186 | GSHHG 187 | GUNW 188 | GUNWs 189 | HDF5 190 | HH 191 | HRRR 192 | HV 193 | hydrostatic 194 | HyP3 195 | hyp3 196 | hyp3-docs 197 | hyp3-gamma 198 | HyP3-ISCE2 199 | hyp3_sdk 200 | IaC 201 | Iain 202 | IFG 203 | IGARSS 204 | IHS 205 | imagingGeometry 206 | impactful 207 | inc_map.tif 208 | incidenceAngle 209 | include_dem 210 | include_rgb 211 | InSAR 212 | INSAR_ISCE_BURST 213 | INSAR_ISCE_MULTI_BURST 214 | Interferogram 215 | interferogram 216 | Interferograms 217 | interferograms 218 | Interferometric 219 | interferometric 220 | Interferometry 221 | interferometry 222 | ionosphereBurstRamps 223 | ISCE2 224 | ISCE2's 225 | ISCE3 226 | iteratively 227 | IW 228 | Jinja2 229 | jitter 230 | JOSS 231 | JPL 232 | JPL's 233 | JS 234 | JSON 235 | Jupyter 236 | KMZ 237 | kmz 238 | km² 239 | Kuenzer 240 | L1 241 | L1/L5 242 | L2 243 | L3 244 | L4 245 | L5 246 | L7 247 | L8 248 | L9 249 | lal_f 250 | landfast 251 | landuse 252 | lau_f 253 | LayerName 254 | Liang 255 | lifecycle 256 | Log10 257 | lon 258 | lonl_f 259 | lonu_f 260 | lookAngle 261 | ls_map.tif 262 | lv_phi 263 | lv_theta 264 | LVθ 265 | LVφ 266 | map-centric 267 | MapReady 268 | Markert 269 | Mathjax 270 | mathjax 271 | MCF 272 | mcf 273 | Menz 274 | Miniconda 275 | MintPy 276 | mintpy 277 | MkDocs 278 | mosaicked 279 | mosaicking 280 | MTI 281 | multiband 282 | Multilook 283 | multilooking 284 | Multitemporal 285 | Muro 286 | MWE 287 | N27_3 288 | N27_8 289 | NAVD88 290 | NBViewer 291 | NED1 292 | NED13 293 | NED2 294 | netCDF 295 | NetCDF 296 | NetCDF4 297 | NISAR 298 | NOAA 299 | NoData 300 | non-geocoded 301 | NoSQL 302 | novertex 303 | OpenAPI 304 | OpenStreetMap 305 | OPERA_L2_RTC-S1 306 | OPERA_L2_RTC-S1_V1 307 | OPERA-RTC-S1 308 | OPERA-S1 309 | orthometric 310 | OSM 311 | Ottinger 312 | par_s1_slc 313 | parallelBaseline 314 | Pavia 315 | PEP8 316 | perpendicularBaseline 317 | phenology 318 | pip 319 | PixelSpacing 320 | Pluggable 321 | PNG 322 | png 323 | Polarimetry 324 | polarimetry 325 | Polarization 326 | polarization 327 | Polarizations 328 | polarizations 329 | PR 330 | pre- 331 | pre-generated 332 | pre-processed 333 | Pre-Processing 334 | Pre-processing 335 | pre-processing 336 | ProductGenerationDateTime 337 | ProductVersion 338 | programmatically 339 | Programmatically 340 | PRs 341 | PySolid 342 | QGIS 343 | Radiometrically 344 | radiometrically 345 | RAiDER 346 | rainforest 347 | range-doppler 348 | raster 349 | rasters 350 | Raytracing 351 | README 352 | readme 353 | repo 354 | reproject 355 | reprojected 356 | reprojecting 357 | Restituted 358 | restituted 359 | RGB 360 | rgb 361 | rgb.kmz 362 | rgb.png 363 | rgb.tif 364 | Ridgecrest 365 | rrr 366 | RTC 367 | RTC-based 368 | RTC-S1 369 | RTC-STATIC 370 | Rüetschi 371 | S1 372 | S1_136231_IW2_20200604T022312_VV_7C85-BURST 373 | S1_136231_IW2_20200616T022313_VV_5D11-BURST 374 | S1_rrr__yyyymmdd_yyyymmdd_pp_INTzz_cccc 375 | S1A 376 | S1A_064_E053_1_N27_3_E054_1_N27_8_20200604_20200616_VV_INT80_7EB5 377 | S1A_IW_SLC__1SDV_20200116T032559_20200116T032627_030820_038928_F5DC 378 | S1B 379 | S1B_IW_SLC__1SDV_20200128T032559_20200128T032627_030995_038F51_7D4F 380 | S1s 381 | S1s_rrr_lonl_f_lal_f_lonu_f_lau_f_yyyymmdd_yyyymmdd_pp_INTzz_cccc 382 | S2 383 | S3 384 | SAR 385 | SBAS 386 | scalable 387 | scatterers 388 | SDH 389 | SDK 390 | SDS 391 | SDV 392 | Sentinel-1A 393 | Sentinel-1B 394 | Sentinel-1C 395 | Sentinel-1D 396 | Sentinel-2 397 | Shapefile 398 | shapefile 399 | Shiroma 400 | show_submodules 401 | sidelobe 402 | sidelobes 403 | sigma-nought 404 | sigma0 405 | Sinergise 406 | SLC 407 | SLCs 408 | SLCs. 409 | SNAPHU 410 | SNAPHU's 411 | snaphu_mcf 412 | socio-economic 413 | solidEarthTide 414 | Specular 415 | specular 416 | SRTM 417 | SRTMGL1 418 | ssss 419 | SSV 420 | StartDateTime 421 | Stereographic 422 | StoryMap 423 | StoryMaps 424 | subfolder 425 | subfolders 426 | submit_aria_s1_gunw_job 427 | subpackage 428 | superpixel 429 | T115-245714-IW1 430 | TBD 431 | tif 432 | TODO 433 | TopsApp 434 | tradeoffs 435 | TRE 436 | tropospheric 437 | Tropospheric 438 | troposphereHydrostatic 439 | troposphereWet 440 | Twele 441 | UAF 442 | UI 443 | unbuffered 444 | uncomment 445 | unfilteredCoherence 446 | unintuitive 447 | unitless 448 | unwrappedPhase 449 | upsampled 450 | URL 451 | USGS 452 | UTC 453 | UTCTime 454 | UTM 455 | UTM-projected 456 | v1.0 457 | v2 458 | VH 459 | VV 460 | waterbodies 461 | waterbody 462 | Watermap 463 | watermasking 464 | webpage 465 | Werner 466 | Werner's 467 | WGS84 468 | whitespace 469 | Woodhouse 470 | WorldCover 471 | wr 472 | XML 473 | xx 474 | Yukon-Kuskokwim 475 | Yunjun 476 | yy 477 | yyyymmdd 478 | YYYYMMDDTHHMMSSZ 479 | zz 480 | ⅓ 481 | ½ 482 | ½π 483 | Δ 484 | ΔΨ 485 | ΔΩ 486 | Δψref 487 | Δω 488 | Δωref 489 | Δϒ 490 | γ0 491 | θ 492 | λ 493 | π 494 | π/2 495 | σ0 496 | φ 497 | -------------------------------------------------------------------------------- /.github/queries/asssociated-pr.query.yml: -------------------------------------------------------------------------------- 1 | query: ' 2 | query($owner:String!, $name:String!, $sha:String!) { 3 | repository(owner:$owner, name:$name) { 4 | commit: object(expression:$sha) { 5 | ... on Commit { 6 | associatedPullRequests(first:1, orderBy:{field: UPDATED_AT, direction: DESC}){ 7 | edges{ 8 | node{ 9 | title 10 | number 11 | body 12 | } 13 | } 14 | } 15 | } 16 | } 17 | } 18 | }' 19 | variables: 20 | owner: 21 | type: arg 22 | name: owner 23 | name: 24 | type: arg 25 | name: name 26 | sha: 27 | type: arg 28 | name: sha 29 | -------------------------------------------------------------------------------- /.github/queries/pr-labels.query.yml: -------------------------------------------------------------------------------- 1 | query: ' 2 | query($owner:String!, $name:String!, $pr:Int!) { 3 | repository(owner:$owner, name:$name) { 4 | pullRequest(number:$pr) { 5 | labels(first:100) { 6 | nodes { 7 | name 8 | } 9 | } 10 | } 11 | } 12 | }' 13 | variables: 14 | owner: 15 | type: arg 16 | name: owner 17 | name: 18 | type: arg 19 | name: name 20 | pr: 21 | type: jq 22 | file: pr.json 23 | query: '.data.repository.commit.associatedPullRequests.edges[0].node.number' 24 | cast: Int 25 | -------------------------------------------------------------------------------- /.github/workflows/changelog.yml: -------------------------------------------------------------------------------- 1 | name: Changelog updated? 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - opened 7 | - labeled 8 | - unlabeled 9 | - synchronize 10 | branches: 11 | - main 12 | - develop 13 | 14 | jobs: 15 | call-changelog-check-workflow: 16 | uses: ASFHyP3/actions/.github/workflows/reusable-changelog-check.yml@v0.20.0 17 | -------------------------------------------------------------------------------- /.github/workflows/check-links.yml: -------------------------------------------------------------------------------- 1 | name: Check links 2 | 3 | # TODO: switch back to the push trigger if/when this workflow gets fixed; see https://github.com/ASFHyP3/hyp3-docs/issues/340 4 | # on: push 5 | on: 6 | workflow_dispatch: 7 | 8 | jobs: 9 | linkChecker: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: actions/checkout@v4 13 | 14 | - name: Link Checker 15 | id: lychee 16 | uses: lycheeverse/lychee-action@v2.4.1 17 | env: 18 | GITHUB_TOKEN: ${{secrets.TOOLS_BOT_PAK}} 19 | with: 20 | args: --insecure "docs/**/*.md" "README.md" 21 | fail: true 22 | -------------------------------------------------------------------------------- /.github/workflows/create-jira-issue.yml: -------------------------------------------------------------------------------- 1 | name: Create Jira issue 2 | 3 | on: 4 | issues: 5 | types: [labeled] 6 | 7 | jobs: 8 | call-create-jira-issue-workflow: 9 | uses: ASFHyP3/actions/.github/workflows/reusable-create-jira-issue.yml@v0.20.0 10 | secrets: 11 | JIRA_BASE_URL: ${{ secrets.JIRA_BASE_URL }} 12 | JIRA_USER_EMAIL: ${{ secrets.JIRA_USER_EMAIL }} 13 | JIRA_API_TOKEN: ${{ secrets.JIRA_API_TOKEN }} 14 | JIRA_PROJECT: ${{ secrets.JIRA_PROJECT }} 15 | JIRA_FIELDS: ${{ secrets.JIRA_FIELDS }} 16 | -------------------------------------------------------------------------------- /.github/workflows/deploy_to_github_io.yml: -------------------------------------------------------------------------------- 1 | name: Deploy to Github.io 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | - develop 8 | 9 | jobs: 10 | build_and_deploy: 11 | name: Build site and deploy 12 | runs-on: "ubuntu-latest" 13 | steps: 14 | - uses: actions/checkout@v4 15 | with: 16 | path: hyp3-docs 17 | 18 | - uses: conda-incubator/setup-miniconda@v3 19 | with: 20 | auto-update-conda: true 21 | python-version: '3.10' 22 | activate-environment: hyp3-docs 23 | environment-file: hyp3-docs/environment.yml 24 | 25 | - name: Deploy test website 26 | if: github.ref == 'refs/heads/develop' 27 | shell: bash -l {0} 28 | run: | 29 | cd hyp3-docs 30 | # Remove conflicting CNAME for project site 31 | rm docs/CNAME 32 | # Set site URL for dev site 33 | sed -i 's|site_url: https://hyp3-docs.asf.alaska.edu/|site_url: https://hyp3-docs.asf.alaska.edu/hyp3-docs/|' mkdocs.yml 34 | mkdocs gh-deploy --force 35 | 36 | - uses: actions/checkout@v4 37 | if: github.ref == 'refs/heads/main' 38 | with: 39 | repository: ASFHyP3/ASFHyP3.github.io 40 | path: ASFHyP3.github.io 41 | token: ${{ secrets.TOOLS_BOT_PAK }} 42 | 43 | - name: deploy to github.io 44 | if: github.ref == 'refs/heads/main' 45 | shell: bash -l {0} 46 | run: | 47 | cd ASFHyP3.github.io 48 | mkdocs gh-deploy --force --config-file ../hyp3-docs/mkdocs.yml --remote-branch main 49 | -------------------------------------------------------------------------------- /.github/workflows/labeled-pr.yml: -------------------------------------------------------------------------------- 1 | name: Is PR labeled? 2 | 3 | on: 4 | pull_request: 5 | types: 6 | - opened 7 | - labeled 8 | - unlabeled 9 | - synchronize 10 | branches: 11 | - main 12 | 13 | jobs: 14 | call-labeled-pr-check-workflow: 15 | uses: ASFHyP3/actions/.github/workflows/reusable-labeled-pr-check.yml@v0.20.0 16 | -------------------------------------------------------------------------------- /.github/workflows/release-template-comment.yml: -------------------------------------------------------------------------------- 1 | on: 2 | pull_request: 3 | types: 4 | - opened 5 | branches: 6 | - main 7 | 8 | jobs: 9 | call-release-checklist-workflow: 10 | uses: ASFHyP3/actions/.github/workflows/reusable-release-checklist-comment.yml@v0.20.0 11 | secrets: 12 | USER_TOKEN: ${{ secrets.GITHUB_TOKEN }} 13 | -------------------------------------------------------------------------------- /.github/workflows/release.yml: -------------------------------------------------------------------------------- 1 | name: Create Release 2 | 3 | on: 4 | push: 5 | tags: 6 | - 'v*' 7 | 8 | jobs: 9 | call-release-workflow: 10 | uses: ASFHyP3/actions/.github/workflows/reusable-release.yml@v0.20.0 11 | with: 12 | release_prefix: HyP3 Docs 13 | secrets: 14 | USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }} 15 | -------------------------------------------------------------------------------- /.github/workflows/spellcheck.yml: -------------------------------------------------------------------------------- 1 | name: Spell Check .md 2 | on: push 3 | 4 | jobs: 5 | spellcheck: 6 | runs-on: ubuntu-latest 7 | name: Spellcheck 8 | steps: 9 | - name: Checkout 10 | uses: actions/checkout@v4 11 | - name: Spellcheck 12 | uses: tbroadley/spellchecker-cli-action@v1 13 | with: 14 | files: "docs/**/*.md **/*.md *.md" 15 | quiet: true 16 | noSuggestions: true 17 | dictionaries: ".github/dictionary.txt" 18 | reports: "report.json" 19 | plugins: "spell repeated-words syntax-mentions syntax-urls frontmatter" 20 | - name: Report 21 | if: ${{ failure() }} 22 | uses: actions/upload-artifact@v4 23 | with: 24 | name: report 25 | path: report.json 26 | -------------------------------------------------------------------------------- /.github/workflows/static-analysis.yml: -------------------------------------------------------------------------------- 1 | name: Static analysis 2 | 3 | on: push 4 | 5 | jobs: 6 | call-secrets-analysis-workflow: 7 | uses: ASFHyP3/actions/.github/workflows/reusable-secrets-analysis.yml@v0.20.0 8 | -------------------------------------------------------------------------------- /.github/workflows/tag-release.yml: -------------------------------------------------------------------------------- 1 | name: Tag New Version 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | call-bump-version-workflow: 10 | uses: ASFHyP3/actions/.github/workflows/reusable-bump-version.yml@v0.20.0 11 | secrets: 12 | USER_TOKEN: ${{ secrets.TOOLS_BOT_PAK }} 13 | -------------------------------------------------------------------------------- /.github/workflows/test-build.yml: -------------------------------------------------------------------------------- 1 | name: Test build of website 2 | 3 | on: 4 | pull_request: 5 | branches: 6 | - main 7 | - develop 8 | 9 | jobs: 10 | test_build: 11 | name: Test build of site 12 | runs-on: "ubuntu-latest" 13 | steps: 14 | - uses: actions/checkout@v4 15 | 16 | - uses: conda-incubator/setup-miniconda@v3 17 | with: 18 | auto-update-conda: true 19 | python-version: '3.10' 20 | activate-environment: hyp3-docs 21 | environment-file: environment.yml 22 | 23 | - name: Test build of website 24 | shell: bash -l {0} 25 | run: | 26 | mkdocs build 27 | -------------------------------------------------------------------------------- /.github/workflows/update_asf_tools_version.yml: -------------------------------------------------------------------------------- 1 | name: Update ASF Tools version 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | asf_tools_version: 7 | description: 'The new version of ASF Tools' 8 | required: true 9 | 10 | jobs: 11 | bump_sdk_version: 12 | name: Bump the ASF Tools version 13 | runs-on: "ubuntu-latest" 14 | steps: 15 | - uses: actions/checkout@v4 16 | with: 17 | token: ${{ secrets.TOOLS_BOT_PAK }} 18 | 19 | - name: create patch branch 20 | id: patch 21 | env: 22 | ASF_TOOLS_VERSION: ${{ github.event.inputs.asf_tools_version }} 23 | PATCH_BRANCH: bump-asf-tools-${{ github.event.inputs.asf_tools_version }} 24 | run: | 25 | git config user.name "tools-bot" 26 | git config user.email "UAF-asf-apd@alaska.edu" 27 | echo "::set-output name=branch::${PATCH_BRANCH}" 28 | git checkout -b ${PATCH_BRANCH} 29 | sed -i -r "s|asf-tools/v[0-9]+\.[0-9]+\.[0-9]+|asf-tools/${ASF_TOOLS_VERSION}|" docs/tools/asf_tools.md 30 | sed -i -r "s|\`asf_tools\` \*v[0-9]+\.[0-9]+\.[0-9]+\*|\`asf_tools\` \*${ASF_TOOLS_VERSION}\*|" docs/tools/asf_tools_api.md 31 | sed -i -r "s|asf_tools=[0-9]+\.[0-9]+\.[0-9]+|asf_tools=${ASF_TOOLS_VERSION#v}|" environment.yml 32 | git commit -am "Bump ASF Tools version to ${ASF_TOOLS_VERSION}" 33 | git push origin ${PATCH_BRANCH} 34 | 35 | - name: open PR 36 | env: 37 | PR_TITLE: Update ASF Tools version to ${{ github.event.inputs.asf_tools_version }} 38 | PR_BODY: PR created by a `workflow_dispatch` event 39 | GH_TOKEN: ${{ secrets.TOOLS_BOT_PAK }} 40 | run: | 41 | gh pr create --title "${PR_TITLE}" \ 42 | --body "${PR_BODY}" \ 43 | --label tools-bot \ 44 | --head ${{ steps.patch.outputs.branch }} \ 45 | --base main 46 | -------------------------------------------------------------------------------- /.github/workflows/update_sdk_version.yml: -------------------------------------------------------------------------------- 1 | name: Update HyP3 SDK version 2 | 3 | on: 4 | workflow_dispatch: 5 | inputs: 6 | sdk_version: 7 | description: 'The new version of the SDK' 8 | required: true 9 | 10 | jobs: 11 | bump_sdk_version: 12 | name: Bump the SDK version 13 | runs-on: "ubuntu-latest" 14 | steps: 15 | - uses: actions/checkout@v4 16 | with: 17 | token: ${{ secrets.TOOLS_BOT_PAK }} 18 | 19 | - name: create patch branch 20 | id: patch 21 | env: 22 | SDK_VERSION: ${{ github.event.inputs.sdk_version }} 23 | PATCH_BRANCH: bump-sdk-${{ github.event.inputs.sdk_version }} 24 | run: | 25 | git config user.name "tools-bot" 26 | git config user.email "UAF-asf-apd@alaska.edu" 27 | echo "::set-output name=branch::${PATCH_BRANCH}" 28 | git checkout -b ${PATCH_BRANCH} 29 | sed -i -r "s|hyp3-sdk/v[0-9]+\.[0-9]+\.[0-9]+|hyp3-sdk/${SDK_VERSION}|" docs/using/sdk.md 30 | sed -i -r "s|\`hyp3_sdk\` \*v[0-9]+\.[0-9]+\.[0-9]+\*|\`hyp3_sdk\` \*${SDK_VERSION}\*|" docs/using/sdk_api.md 31 | sed -i -r "s|hyp3_sdk=[0-9]+\.[0-9]+\.[0-9]+|hyp3_sdk=${SDK_VERSION#v}|" environment.yml 32 | git commit -am "Bump SDK version to ${SDK_VERSION}" 33 | git push origin ${PATCH_BRANCH} 34 | 35 | - name: open PR 36 | env: 37 | PR_TITLE: Update HyP3 SDK version to ${{ github.event.inputs.sdk_version }} 38 | PR_BODY: PR created by a `workflow_dispatch` event 39 | GH_TOKEN: ${{ secrets.TOOLS_BOT_PAK }} 40 | run: | 41 | gh pr create --title "${PR_TITLE}" \ 42 | --body "${PR_BODY}" \ 43 | --label tools-bot \ 44 | --head ${{ steps.patch.outputs.branch }} \ 45 | --base main 46 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | #### joe made this: http://goel.io/joe 2 | 3 | #### python #### 4 | # Byte-compiled / optimized / DLL files 5 | __pycache__/ 6 | *.py[cod] 7 | *$py.class 8 | 9 | # C extensions 10 | *.so 11 | 12 | # Distribution / packaging 13 | .Python 14 | build/ 15 | develop-eggs/ 16 | dist/ 17 | downloads/ 18 | eggs/ 19 | .eggs/ 20 | lib/ 21 | lib64/ 22 | parts/ 23 | sdist/ 24 | var/ 25 | wheels/ 26 | share/python-wheels/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | MANIFEST 31 | 32 | # PyInstaller 33 | # Usually these files are written by a python script from a template 34 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 35 | *.manifest 36 | *.spec 37 | 38 | # Installer logs 39 | pip-log.txt 40 | pip-delete-this-directory.txt 41 | 42 | # Unit test / coverage reports 43 | htmlcov/ 44 | .tox/ 45 | .nox/ 46 | .coverage 47 | .coverage.* 48 | .cache 49 | nosetests.xml 50 | coverage.xml 51 | *.cover 52 | *.py,cover 53 | .hypothesis/ 54 | .pytest_cache/ 55 | cover/ 56 | 57 | # Translations 58 | *.mo 59 | *.pot 60 | 61 | # Django stuff: 62 | *.log 63 | local_settings.py 64 | db.sqlite3 65 | db.sqlite3-journal 66 | 67 | # Flask stuff: 68 | instance/ 69 | .webassets-cache 70 | 71 | # Scrapy stuff: 72 | .scrapy 73 | 74 | # Sphinx documentation 75 | docs/_build/ 76 | 77 | # PyBuilder 78 | .pybuilder/ 79 | target/ 80 | 81 | # Jupyter Notebook 82 | .ipynb_checkpoints 83 | 84 | # IPython 85 | profile_default/ 86 | ipython_config.py 87 | 88 | # pyenv 89 | # For a library or package, you might want to ignore these files since the code is 90 | # intended to run in multiple environments; otherwise, check them in: 91 | # .python-version 92 | 93 | # pipenv 94 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 95 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 96 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 97 | # install all needed dependencies. 98 | #Pipfile.lock 99 | 100 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 101 | __pypackages__/ 102 | 103 | # Celery stuff 104 | celerybeat-schedule 105 | celerybeat.pid 106 | 107 | # SageMath parsed files 108 | *.sage.py 109 | 110 | # Environments 111 | .env 112 | .venv 113 | env/ 114 | venv/ 115 | ENV/ 116 | env.bak/ 117 | venv.bak/ 118 | 119 | # Spyder project settings 120 | .spyderproject 121 | .spyproject 122 | 123 | # Rope project settings 124 | .ropeproject 125 | 126 | # mkdocs documentation 127 | /site 128 | 129 | # mypy 130 | .mypy_cache/ 131 | .dmypy.json 132 | dmypy.json 133 | 134 | # Pyre type checker 135 | .pyre/ 136 | 137 | # pytype static type analyzer 138 | .pytype/ 139 | 140 | # Cython debug symbols 141 | cython_debug/ 142 | 143 | 144 | #### jetbrains #### 145 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider 146 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839 147 | 148 | # User-specific stuff 149 | .idea/**/workspace.xml 150 | .idea/**/tasks.xml 151 | .idea/**/usage.statistics.xml 152 | .idea/**/dictionaries 153 | .idea/**/shelf 154 | 155 | # AWS User-specific 156 | .idea/**/aws.xml 157 | 158 | # Generated files 159 | .idea/**/contentModel.xml 160 | 161 | # Sensitive or high-churn files 162 | .idea/**/dataSources/ 163 | .idea/**/dataSources.ids 164 | .idea/**/dataSources.local.xml 165 | .idea/**/sqlDataSources.xml 166 | .idea/**/dynamic.xml 167 | .idea/**/uiDesigner.xml 168 | .idea/**/dbnavigator.xml 169 | 170 | # Gradle 171 | .idea/**/gradle.xml 172 | .idea/**/libraries 173 | 174 | # Gradle and Maven with auto-import 175 | # When using Gradle or Maven with auto-import, you should exclude module files, 176 | # since they will be recreated, and may cause churn. Uncomment if using 177 | # auto-import. 178 | # .idea/artifacts 179 | # .idea/compiler.xml 180 | # .idea/jarRepositories.xml 181 | # .idea/modules.xml 182 | # .idea/*.iml 183 | # .idea/modules 184 | # *.iml 185 | # *.ipr 186 | 187 | # CMake 188 | cmake-build-*/ 189 | 190 | # Mongo Explorer plugin 191 | .idea/**/mongoSettings.xml 192 | 193 | # File-based project format 194 | *.iws 195 | 196 | # IntelliJ 197 | out/ 198 | 199 | # mpeltonen/sbt-idea plugin 200 | .idea_modules/ 201 | 202 | # JIRA plugin 203 | atlassian-ide-plugin.xml 204 | 205 | # Cursive Clojure plugin 206 | .idea/replstate.xml 207 | 208 | # Crashlytics plugin (for Android Studio and IntelliJ) 209 | com_crashlytics_export_strings.xml 210 | crashlytics.properties 211 | crashlytics-build.properties 212 | fabric.properties 213 | 214 | # Editor-based Rest Client 215 | .idea/httpRequests 216 | 217 | # Android studio 3.1+ serialized cache file 218 | .idea/caches/build_file_checksums.ser 219 | 220 | 221 | #### vim #### 222 | # Swap 223 | [._]*.s[a-v][a-z] 224 | !*.svg # comment out if you don't need vector files 225 | [._]*.sw[a-p] 226 | [._]s[a-rt-v][a-z] 227 | [._]ss[a-gi-z] 228 | [._]sw[a-p] 229 | 230 | # Session 231 | Session.vim 232 | Sessionx.vim 233 | 234 | # Temporary 235 | .netrwhist 236 | *~ 237 | # Auto-generated tag files 238 | tags 239 | # Persistent undo 240 | [._]*.un~ 241 | -------------------------------------------------------------------------------- /.gitleaks.toml: -------------------------------------------------------------------------------- 1 | title = "gitleaks config" 2 | [[rules]] 3 | description = "AWS Manager ID" 4 | regex = '''(A3T[A-Z0-9]|AKIA|AGPA|AIDA|AROA|AIPA|ANPA|ANVA|ASIA)[A-Z0-9]{16}''' 5 | tags = ["key", "AWS"] 6 | [[rules]] 7 | description = "AWS Secret Key" 8 | regex = '''(?i)aws(.{0,20})?(?-i)[0-9a-zA-Z\/+]{40}''' 9 | tags = ["key", "AWS"] 10 | [[rules]] 11 | description = "AWS MWS key" 12 | regex = '''amzn\.mws\.[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}''' 13 | tags = ["key", "AWS", "MWS"] 14 | [[rules]] 15 | description = "Github" 16 | regex = '''(?i)github(.{0,20})?(?-i)[0-9a-zA-Z]{35,40}''' 17 | tags = ["key", "Github"] 18 | [[rules]] 19 | description = "Asymmetric Private Key" 20 | regex = '''-----BEGIN ((EC|PGP|DSA|RSA|OPENSSH) )?PRIVATE KEY( BLOCK)?-----''' 21 | tags = ["key", "AsymmetricPrivateKey"] 22 | [[rules]] 23 | description = "Generic Credential" 24 | regex = '''(?i)(api_key|apikey|secret|password|pass|pw|key)(.{0,20})?[0-9a-zA-Z]{16,45}''' 25 | tags = ["key", "API", "generic"] 26 | [[rules.whitelist]] 27 | regex = '''KeyChecking.no.*''' 28 | description = "Ignore ssh settings for GitLab tools-bot" 29 | [[rules]] 30 | description = "Google API key" 31 | regex = '''AIza[0-9A-Za-z\\-_]{35}''' 32 | tags = ["key", "Google"] 33 | [[rules]] 34 | description = "Google (GCP) Service Account" 35 | regex = '''"type": "service_account"''' 36 | tags = ["key", "Google"] 37 | [[rules]] 38 | description = "Heroku API key" 39 | regex = '''(?i)heroku(.{0,20})?[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}''' 40 | tags = ["key", "Heroku"] 41 | [[rules]] 42 | description = "MailChimp API key" 43 | regex = '''(?i)(mailchimp|mc)(.{0,20})?[0-9a-f]{32}-us[0-9]{1,2}''' 44 | tags = ["key", "Mailchimp"] 45 | [[rules]] 46 | description = "Mailgun API key" 47 | regex = '''((?i)(mailgun|mg)(.{0,20})?)?key-[0-9a-z]{32}''' 48 | tags = ["key", "Mailgun"] 49 | [[rules]] 50 | description = "Slack Webhook" 51 | regex = '''https://hooks.slack.com/services/T[a-zA-Z0-9_]{8}/B[a-zA-Z0-9_]{8}/[a-zA-Z0-9_]{24}''' 52 | tags = ["key", "slack"] 53 | [whitelist] 54 | description = "Whitelisted files" 55 | files = ['''(^.*gitleaks.toml$|(.*?)(jpg|gif|doc|pdf|bin)$)'''] 56 | -------------------------------------------------------------------------------- /.lycheeignore: -------------------------------------------------------------------------------- 1 | https://twitter.com/ASFHyP3 2 | http://127.0.0.1:8000/ 3 | https://github.com/issues?q=is%3Aopen+is%3Aissue+org%3AASFHyP3 4 | https://github.com/issues 5 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: If you use this software, please cite it as below. 3 | authors: 4 | - family-names: Hogenson 5 | given-names: Kirk 6 | - family-names: Kristenson 7 | given-names: Heidi 8 | orcid: https://orcid.org/0000-0002-2130-4527 9 | - family-names: Kennedy 10 | given-names: Joseph 11 | orcid: https://orcid.org/0000-0002-9348-693X 12 | - family-names: Johnston 13 | given-names: Andrew 14 | - family-names: Rine 15 | given-names: James 16 | - family-names: Logan 17 | given-names: Thomas 18 | orcid: https://orcid.org/0000-0002-6982-0344 19 | - family-names: Zhu 20 | given-names: Jiang 21 | orcid: https://orcid.org/0000-0001-6833-5518 22 | - family-names: Williams 23 | given-names: Forrest 24 | orcid: https://orcid.org/0000-0001-8721-6020 25 | - family-names: Herrmann 26 | given-names: Jake 27 | - family-names: Smale 28 | given-names: Jacquelyn 29 | orcid: https://orcid.org/0000-0002-2749-5010 30 | - family-names: Meyer 31 | given-names: Franz 32 | orcid: https://orcid.org/0000-0002-2491-526X 33 | date-released: 2020-10-20 34 | repository-code: https://github.com/ASFHyP3/hyp3-docs 35 | title: "Hybrid Pluggable Processing Pipeline (HyP3): A cloud-native infrastructure for generic processing of SAR data" 36 | doi: 10.5281/zenodo.4646138 37 | license: BSD-3-Clause 38 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | BSD 3-Clause License 2 | 3 | Copyright (c) 2021, Alaska Satellite Facility 4 | All rights reserved. 5 | 6 | Redistribution and use in source and binary forms, with or without 7 | modification, are permitted provided that the following conditions are met: 8 | 9 | * Redistributions of source code must retain the above copyright notice, this 10 | list of conditions and the following disclaimer. 11 | 12 | * Redistributions in binary form must reproduce the above copyright notice, 13 | this list of conditions and the following disclaimer in the documentation 14 | and/or other materials provided with the distribution. 15 | 16 | * Neither the name of the copyright holder nor the names of its 17 | contributors may be used to endorse or promote products derived from 18 | this software without specific prior written permission. 19 | 20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" 21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE 22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 30 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # HyP3 documentation 2 | 3 | [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.4646138.svg)](https://doi.org/10.5281/zenodo.4646138) 4 | 5 | HyP3 documentation is built using [MkDocs](https://www.mkdocs.org/) and the 6 | [ASF Theme](https://github.com/ASFHyP3/mkdocs-asf-theme). 7 | 8 | ## How to 9 | 10 | ### Setting up a development environment 11 | 12 | In order to automatically document some of our APIs, we use a `conda` environment 13 | with our APIs installed. You can get Miniconda (recommended) here: 14 | 15 | 16 | 17 | Once conda is installed, from the repository root, you can create and activate a 18 | conda environment with all the necessary dependencies 19 | 20 | ``` 21 | conda env create -f environment.yml 22 | conda activate hyp3-docs 23 | ``` 24 | 25 | Later, you can update the environment's dependencies with 26 | 27 | ``` 28 | conda env update -f environment.yml 29 | ``` 30 | 31 | ### Build and view the documentation site 32 | 33 | With the `hyp3-docs` conda environment activated, run 34 | 35 | ``` 36 | mkdocs serve 37 | ``` 38 | 39 | to generate the documentation. This will allow you to view it at . 40 | MkDocs will automatically watch for new/changed files in this directory and 41 | rebuild the website so you can see your changes live (just refresh the webpage!). 42 | 43 | *Note: `mkdocs serve` captures your terminal; use `crtl+c` to exit. It is recommended you 44 | use a second/dedicated terminal so you can keep this command running.* 45 | 46 | ### Deploy 47 | 48 | This documentation site is deployed as a GitHub Organization website with a CNAME 49 | so that it's viewable at . The website is served 50 | out of the special repository. Deployment 51 | is handled automatically with the `.github/workflows/deploy_to_github_io.yml` GitHub 52 | Action for any merge to `main`. 53 | 54 | There is also a test site deployed to , which 55 | tracks the `develop` branch of this repo and is served out of the `gh-pages` branch 56 | of this repo. 57 | 58 | ### Enable or disable the announcement banner 59 | 60 | We can display a site-wide banner for important announcements. 61 | The content of this banner is specified in [`overrides/main.html`](overrides/main.html), 62 | which should contain the following placeholder text when the banner is not in use: 63 | 64 | ```html 65 | {% extends "partials/main.html" %} 66 | 67 | {# Uncomment this block to enable the announcement banner: 68 | {% block announce %} 69 |
70 | ⚠️ TODO: Your announcement here.
71 | Read the full announcement. 72 |
73 | {% endblock %} 74 | #} 75 | ``` 76 | 77 | In order to enable the banner, uncomment the `announce` block and fill in the `TODO`s. 78 | Below is an example of an enabled announcement banner 79 | (taken from [here](https://github.com/ASFHyP3/hyp3-docs/blob/99c0d2294f1be1249e23880b7a849f13fa99a021/overrides/main.html)): 80 | 81 | ```html 82 | {% extends "partials/main.html" %} 83 | 84 | {% block announce %} 85 |
86 | ⚠️ Monthly processing quotas were replaced by a credit system on April 1st.
87 | Read the full announcement. 88 |
89 | {% endblock %} 90 | ``` 91 | 92 | When the announcement is no longer needed, restore the file to the placeholder text in order to disable the banner. 93 | 94 | If you are [building and viewing the site locally](#build-and-view-the-documentation-site), 95 | you will need to exit with `ctrl+c` and then re-run `mkdocs serve` 96 | in order to re-render any changes you make to this file. 97 | 98 | ## Markdown formatting 99 | 100 | The way MkDocs and GitHub parse the markdown documents are slightly different. Some compatibility tips: 101 | 102 | * Raw links should be wrapped in angle brackets: `` 103 | * MkDocs is pickier about whitespace between types (e.g., headers, paragraphs, lists) and seems to 104 | expect indents to be 4 spaces. So to get a representation like: 105 | 106 |
107 | 108 | - A list item 109 | 110 | ##### A sub list heading 111 | - A sub-list item 112 | 113 |
114 | 115 | in MkDocs, you'll want to write it like: 116 | 117 | ### Good 118 | ``` 119 | - A list item 120 | 121 | ##### A sub list heading 122 | - A sub-list item 123 | ``` 124 | 125 | ### Bad 126 | ``` 127 | - A list item 128 | ##### A sub list heading 129 | - A sub-list item 130 | ``` 131 | 132 | ``` 133 | - A list item 134 | ##### A sub list heading 135 | - A sub-list item 136 | ``` 137 | 138 | ``` 139 | - A list item 140 | 141 | ##### A sub list heading 142 | - A sub-list item 143 | ``` 144 | -------------------------------------------------------------------------------- /docs/CNAME: -------------------------------------------------------------------------------- 1 | hyp3-docs.asf.alaska.edu -------------------------------------------------------------------------------- /docs/ViewerJS/example.local.css: -------------------------------------------------------------------------------- 1 | /* This is just a sample file with CSS rules. You should write your own @font-face declarations 2 | * to add support for your desired fonts. 3 | */ 4 | 5 | @font-face { 6 | font-family: 'Novecentowide Book'; 7 | src: url("/ViewerJS/fonts/Novecentowide-Bold-webfont.eot"); 8 | src: url("/ViewerJS/fonts/Novecentowide-Bold-webfont.eot?#iefix") format("embedded-opentype"), 9 | url("/ViewerJS/fonts/Novecentowide-Bold-webfont.woff") format("woff"), 10 | url("/fonts/Novecentowide-Bold-webfont.ttf") format("truetype"), 11 | url("/fonts/Novecentowide-Bold-webfont.svg#NovecentowideBookBold") format("svg"); 12 | font-weight: normal; 13 | font-style: normal; 14 | } 15 | 16 | @font-face { 17 | font-family: 'exotica'; 18 | src: url('/ViewerJS/fonts/Exotica-webfont.eot'); 19 | src: url('/ViewerJS/fonts/Exotica-webfont.eot?#iefix') format('embedded-opentype'), 20 | url('/ViewerJS/fonts/Exotica-webfont.woff') format('woff'), 21 | url('/ViewerJS/fonts/Exotica-webfont.ttf') format('truetype'), 22 | url('/ViewerJS/fonts/Exotica-webfont.svg#exoticamedium') format('svg'); 23 | font-weight: normal; 24 | font-style: normal; 25 | 26 | } 27 | 28 | -------------------------------------------------------------------------------- /docs/ViewerJS/images/kogmbh.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/kogmbh.png -------------------------------------------------------------------------------- /docs/ViewerJS/images/nlnet.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/nlnet.png -------------------------------------------------------------------------------- /docs/ViewerJS/images/texture.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/texture.png -------------------------------------------------------------------------------- /docs/ViewerJS/images/toolbarButton-download.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/toolbarButton-download.png -------------------------------------------------------------------------------- /docs/ViewerJS/images/toolbarButton-fullscreen.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/toolbarButton-fullscreen.png -------------------------------------------------------------------------------- /docs/ViewerJS/images/toolbarButton-menuArrows.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/toolbarButton-menuArrows.png -------------------------------------------------------------------------------- /docs/ViewerJS/images/toolbarButton-pageDown.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/toolbarButton-pageDown.png -------------------------------------------------------------------------------- /docs/ViewerJS/images/toolbarButton-pageUp.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/toolbarButton-pageUp.png -------------------------------------------------------------------------------- /docs/ViewerJS/images/toolbarButton-presentation.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/toolbarButton-presentation.png -------------------------------------------------------------------------------- /docs/ViewerJS/images/toolbarButton-zoomIn.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/toolbarButton-zoomIn.png -------------------------------------------------------------------------------- /docs/ViewerJS/images/toolbarButton-zoomOut.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/ViewerJS/images/toolbarButton-zoomOut.png -------------------------------------------------------------------------------- /docs/ViewerJS/pdfjsversion.js: -------------------------------------------------------------------------------- 1 | var /**@const{!string}*/pdfjs_version = "v1.1.114"; 2 | -------------------------------------------------------------------------------- /docs/ViewerJS/text_layer_builder.js: -------------------------------------------------------------------------------- 1 | /* -*- Mode: Java; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ 2 | /* Copyright 2012 Mozilla Foundation 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | /* globals CustomStyle, PDFJS */ 17 | 18 | 'use strict'; 19 | 20 | var MAX_TEXT_DIVS_TO_RENDER = 100000; 21 | 22 | var NonWhitespaceRegexp = /\S/; 23 | 24 | function isAllWhitespace(str) { 25 | return !NonWhitespaceRegexp.test(str); 26 | } 27 | 28 | /** 29 | * @typedef {Object} TextLayerBuilderOptions 30 | * @property {HTMLDivElement} textLayerDiv - The text layer container. 31 | * @property {number} pageIndex - The page index. 32 | * @property {PageViewport} viewport - The viewport of the text layer. 33 | * @property {PDFFindController} findController 34 | */ 35 | 36 | /** 37 | * TextLayerBuilder provides text-selection functionality for the PDF. 38 | * It does this by creating overlay divs over the PDF text. These divs 39 | * contain text that matches the PDF text they are overlaying. This object 40 | * also provides a way to highlight text that is being searched for. 41 | * @class 42 | */ 43 | var TextLayerBuilder = (function TextLayerBuilderClosure() { 44 | function TextLayerBuilder(options) { 45 | this.textLayerDiv = options.textLayerDiv; 46 | this.renderingDone = false; 47 | this.divContentDone = false; 48 | this.pageIdx = options.pageIndex; 49 | this.pageNumber = this.pageIdx + 1; 50 | this.matches = []; 51 | this.viewport = options.viewport; 52 | this.textDivs = []; 53 | this.findController = options.findController || null; 54 | } 55 | 56 | TextLayerBuilder.prototype = { 57 | _finishRendering: function TextLayerBuilder_finishRendering() { 58 | this.renderingDone = true; 59 | 60 | var event = document.createEvent('CustomEvent'); 61 | event.initCustomEvent('textlayerrendered', true, true, { 62 | pageNumber: this.pageNumber 63 | }); 64 | this.textLayerDiv.dispatchEvent(event); 65 | }, 66 | 67 | renderLayer: function TextLayerBuilder_renderLayer() { 68 | var textLayerFrag = document.createDocumentFragment(); 69 | var textDivs = this.textDivs; 70 | var textDivsLength = textDivs.length; 71 | var canvas = document.createElement('canvas'); 72 | var ctx = canvas.getContext('2d'); 73 | 74 | // No point in rendering many divs as it would make the browser 75 | // unusable even after the divs are rendered. 76 | if (textDivsLength > MAX_TEXT_DIVS_TO_RENDER) { 77 | this._finishRendering(); 78 | return; 79 | } 80 | 81 | var lastFontSize; 82 | var lastFontFamily; 83 | for (var i = 0; i < textDivsLength; i++) { 84 | var textDiv = textDivs[i]; 85 | if (textDiv.dataset.isWhitespace !== undefined) { 86 | continue; 87 | } 88 | 89 | var fontSize = textDiv.style.fontSize; 90 | var fontFamily = textDiv.style.fontFamily; 91 | 92 | // Only build font string and set to context if different from last. 93 | if (fontSize !== lastFontSize || fontFamily !== lastFontFamily) { 94 | ctx.font = fontSize + ' ' + fontFamily; 95 | lastFontSize = fontSize; 96 | lastFontFamily = fontFamily; 97 | } 98 | 99 | var width = ctx.measureText(textDiv.textContent).width; 100 | if (width > 0) { 101 | textLayerFrag.appendChild(textDiv); 102 | var transform; 103 | if (textDiv.dataset.canvasWidth !== undefined) { 104 | // Dataset values come of type string. 105 | var textScale = textDiv.dataset.canvasWidth / width; 106 | transform = 'scaleX(' + textScale + ')'; 107 | } else { 108 | transform = ''; 109 | } 110 | var rotation = textDiv.dataset.angle; 111 | if (rotation) { 112 | transform = 'rotate(' + rotation + 'deg) ' + transform; 113 | } 114 | if (transform) { 115 | CustomStyle.setProp('transform' , textDiv, transform); 116 | } 117 | } 118 | } 119 | 120 | this.textLayerDiv.appendChild(textLayerFrag); 121 | this._finishRendering(); 122 | this.updateMatches(); 123 | }, 124 | 125 | /** 126 | * Renders the text layer. 127 | * @param {number} timeout (optional) if specified, the rendering waits 128 | * for specified amount of ms. 129 | */ 130 | render: function TextLayerBuilder_render(timeout) { 131 | if (!this.divContentDone || this.renderingDone) { 132 | return; 133 | } 134 | 135 | if (this.renderTimer) { 136 | clearTimeout(this.renderTimer); 137 | this.renderTimer = null; 138 | } 139 | 140 | if (!timeout) { // Render right away 141 | this.renderLayer(); 142 | } else { // Schedule 143 | var self = this; 144 | this.renderTimer = setTimeout(function() { 145 | self.renderLayer(); 146 | self.renderTimer = null; 147 | }, timeout); 148 | } 149 | }, 150 | 151 | appendText: function TextLayerBuilder_appendText(geom, styles) { 152 | var style = styles[geom.fontName]; 153 | var textDiv = document.createElement('div'); 154 | this.textDivs.push(textDiv); 155 | if (isAllWhitespace(geom.str)) { 156 | textDiv.dataset.isWhitespace = true; 157 | return; 158 | } 159 | var tx = PDFJS.Util.transform(this.viewport.transform, geom.transform); 160 | var angle = Math.atan2(tx[1], tx[0]); 161 | if (style.vertical) { 162 | angle += Math.PI / 2; 163 | } 164 | var fontHeight = Math.sqrt((tx[2] * tx[2]) + (tx[3] * tx[3])); 165 | var fontAscent = fontHeight; 166 | if (style.ascent) { 167 | fontAscent = style.ascent * fontAscent; 168 | } else if (style.descent) { 169 | fontAscent = (1 + style.descent) * fontAscent; 170 | } 171 | 172 | var left; 173 | var top; 174 | if (angle === 0) { 175 | left = tx[4]; 176 | top = tx[5] - fontAscent; 177 | } else { 178 | left = tx[4] + (fontAscent * Math.sin(angle)); 179 | top = tx[5] - (fontAscent * Math.cos(angle)); 180 | } 181 | textDiv.style.left = left + 'px'; 182 | textDiv.style.top = top + 'px'; 183 | textDiv.style.fontSize = fontHeight + 'px'; 184 | textDiv.style.fontFamily = style.fontFamily; 185 | 186 | textDiv.textContent = geom.str; 187 | // |fontName| is only used by the Font Inspector. This test will succeed 188 | // when e.g. the Font Inspector is off but the Stepper is on, but it's 189 | // not worth the effort to do a more accurate test. 190 | if (PDFJS.pdfBug) { 191 | textDiv.dataset.fontName = geom.fontName; 192 | } 193 | // Storing into dataset will convert number into string. 194 | if (angle !== 0) { 195 | textDiv.dataset.angle = angle * (180 / Math.PI); 196 | } 197 | // We don't bother scaling single-char text divs, because it has very 198 | // little effect on text highlighting. This makes scrolling on docs with 199 | // lots of such divs a lot faster. 200 | if (textDiv.textContent.length > 1) { 201 | if (style.vertical) { 202 | textDiv.dataset.canvasWidth = geom.height * this.viewport.scale; 203 | } else { 204 | textDiv.dataset.canvasWidth = geom.width * this.viewport.scale; 205 | } 206 | } 207 | }, 208 | 209 | setTextContent: function TextLayerBuilder_setTextContent(textContent) { 210 | this.textContent = textContent; 211 | 212 | var textItems = textContent.items; 213 | for (var i = 0, len = textItems.length; i < len; i++) { 214 | this.appendText(textItems[i], textContent.styles); 215 | } 216 | this.divContentDone = true; 217 | }, 218 | 219 | convertMatches: function TextLayerBuilder_convertMatches(matches) { 220 | var i = 0; 221 | var iIndex = 0; 222 | var bidiTexts = this.textContent.items; 223 | var end = bidiTexts.length - 1; 224 | var queryLen = (this.findController === null ? 225 | 0 : this.findController.state.query.length); 226 | var ret = []; 227 | 228 | for (var m = 0, len = matches.length; m < len; m++) { 229 | // Calculate the start position. 230 | var matchIdx = matches[m]; 231 | 232 | // Loop over the divIdxs. 233 | while (i !== end && matchIdx >= (iIndex + bidiTexts[i].str.length)) { 234 | iIndex += bidiTexts[i].str.length; 235 | i++; 236 | } 237 | 238 | if (i === bidiTexts.length) { 239 | console.error('Could not find a matching mapping'); 240 | } 241 | 242 | var match = { 243 | begin: { 244 | divIdx: i, 245 | offset: matchIdx - iIndex 246 | } 247 | }; 248 | 249 | // Calculate the end position. 250 | matchIdx += queryLen; 251 | 252 | // Somewhat the same array as above, but use > instead of >= to get 253 | // the end position right. 254 | while (i !== end && matchIdx > (iIndex + bidiTexts[i].str.length)) { 255 | iIndex += bidiTexts[i].str.length; 256 | i++; 257 | } 258 | 259 | match.end = { 260 | divIdx: i, 261 | offset: matchIdx - iIndex 262 | }; 263 | ret.push(match); 264 | } 265 | 266 | return ret; 267 | }, 268 | 269 | renderMatches: function TextLayerBuilder_renderMatches(matches) { 270 | // Early exit if there is nothing to render. 271 | if (matches.length === 0) { 272 | return; 273 | } 274 | 275 | var bidiTexts = this.textContent.items; 276 | var textDivs = this.textDivs; 277 | var prevEnd = null; 278 | var pageIdx = this.pageIdx; 279 | var isSelectedPage = (this.findController === null ? 280 | false : (pageIdx === this.findController.selected.pageIdx)); 281 | var selectedMatchIdx = (this.findController === null ? 282 | -1 : this.findController.selected.matchIdx); 283 | var highlightAll = (this.findController === null ? 284 | false : this.findController.state.highlightAll); 285 | var infinity = { 286 | divIdx: -1, 287 | offset: undefined 288 | }; 289 | 290 | function beginText(begin, className) { 291 | var divIdx = begin.divIdx; 292 | textDivs[divIdx].textContent = ''; 293 | appendTextToDiv(divIdx, 0, begin.offset, className); 294 | } 295 | 296 | function appendTextToDiv(divIdx, fromOffset, toOffset, className) { 297 | var div = textDivs[divIdx]; 298 | var content = bidiTexts[divIdx].str.substring(fromOffset, toOffset); 299 | var node = document.createTextNode(content); 300 | if (className) { 301 | var span = document.createElement('span'); 302 | span.className = className; 303 | span.appendChild(node); 304 | div.appendChild(span); 305 | return; 306 | } 307 | div.appendChild(node); 308 | } 309 | 310 | var i0 = selectedMatchIdx, i1 = i0 + 1; 311 | if (highlightAll) { 312 | i0 = 0; 313 | i1 = matches.length; 314 | } else if (!isSelectedPage) { 315 | // Not highlighting all and this isn't the selected page, so do nothing. 316 | return; 317 | } 318 | 319 | for (var i = i0; i < i1; i++) { 320 | var match = matches[i]; 321 | var begin = match.begin; 322 | var end = match.end; 323 | var isSelected = (isSelectedPage && i === selectedMatchIdx); 324 | var highlightSuffix = (isSelected ? ' selected' : ''); 325 | 326 | if (this.findController) { 327 | this.findController.updateMatchPosition(pageIdx, i, textDivs, 328 | begin.divIdx, end.divIdx); 329 | } 330 | 331 | // Match inside new div. 332 | if (!prevEnd || begin.divIdx !== prevEnd.divIdx) { 333 | // If there was a previous div, then add the text at the end. 334 | if (prevEnd !== null) { 335 | appendTextToDiv(prevEnd.divIdx, prevEnd.offset, infinity.offset); 336 | } 337 | // Clear the divs and set the content until the starting point. 338 | beginText(begin); 339 | } else { 340 | appendTextToDiv(prevEnd.divIdx, prevEnd.offset, begin.offset); 341 | } 342 | 343 | if (begin.divIdx === end.divIdx) { 344 | appendTextToDiv(begin.divIdx, begin.offset, end.offset, 345 | 'highlight' + highlightSuffix); 346 | } else { 347 | appendTextToDiv(begin.divIdx, begin.offset, infinity.offset, 348 | 'highlight begin' + highlightSuffix); 349 | for (var n0 = begin.divIdx + 1, n1 = end.divIdx; n0 < n1; n0++) { 350 | textDivs[n0].className = 'highlight middle' + highlightSuffix; 351 | } 352 | beginText(end, 'highlight end' + highlightSuffix); 353 | } 354 | prevEnd = end; 355 | } 356 | 357 | if (prevEnd) { 358 | appendTextToDiv(prevEnd.divIdx, prevEnd.offset, infinity.offset); 359 | } 360 | }, 361 | 362 | updateMatches: function TextLayerBuilder_updateMatches() { 363 | // Only show matches when all rendering is done. 364 | if (!this.renderingDone) { 365 | return; 366 | } 367 | 368 | // Clear all matches. 369 | var matches = this.matches; 370 | var textDivs = this.textDivs; 371 | var bidiTexts = this.textContent.items; 372 | var clearedUntilDivIdx = -1; 373 | 374 | // Clear all current matches. 375 | for (var i = 0, len = matches.length; i < len; i++) { 376 | var match = matches[i]; 377 | var begin = Math.max(clearedUntilDivIdx, match.begin.divIdx); 378 | for (var n = begin, end = match.end.divIdx; n <= end; n++) { 379 | var div = textDivs[n]; 380 | div.textContent = bidiTexts[n].str; 381 | div.className = ''; 382 | } 383 | clearedUntilDivIdx = match.end.divIdx + 1; 384 | } 385 | 386 | if (this.findController === null || !this.findController.active) { 387 | return; 388 | } 389 | 390 | // Convert the matches on the page controller into the match format 391 | // used for the textLayer. 392 | this.matches = this.convertMatches(this.findController === null ? 393 | [] : (this.findController.pageMatches[this.pageIdx] || [])); 394 | this.renderMatches(this.matches); 395 | } 396 | }; 397 | return TextLayerBuilder; 398 | })(); 399 | 400 | /** 401 | * @constructor 402 | * @implements IPDFTextLayerFactory 403 | */ 404 | function DefaultTextLayerFactory() {} 405 | DefaultTextLayerFactory.prototype = { 406 | /** 407 | * @param {HTMLDivElement} textLayerDiv 408 | * @param {number} pageIndex 409 | * @param {PageViewport} viewport 410 | * @returns {TextLayerBuilder} 411 | */ 412 | createTextLayerBuilder: function (textLayerDiv, pageIndex, viewport) { 413 | return new TextLayerBuilder({ 414 | textLayerDiv: textLayerDiv, 415 | pageIndex: pageIndex, 416 | viewport: viewport 417 | }); 418 | } 419 | }; 420 | -------------------------------------------------------------------------------- /docs/ViewerJS/ui_utils.js: -------------------------------------------------------------------------------- 1 | /* -*- Mode: Java; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */ 2 | /* Copyright 2012 Mozilla Foundation 3 | * 4 | * Licensed under the Apache License, Version 2.0 (the "License"); 5 | * you may not use this file except in compliance with the License. 6 | * You may obtain a copy of the License at 7 | * 8 | * http://www.apache.org/licenses/LICENSE-2.0 9 | * 10 | * Unless required by applicable law or agreed to in writing, software 11 | * distributed under the License is distributed on an "AS IS" BASIS, 12 | * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 13 | * See the License for the specific language governing permissions and 14 | * limitations under the License. 15 | */ 16 | 17 | 'use strict'; 18 | 19 | var CSS_UNITS = 96.0 / 72.0; 20 | var DEFAULT_SCALE = 'auto'; 21 | var UNKNOWN_SCALE = 0; 22 | var MAX_AUTO_SCALE = 1.25; 23 | var SCROLLBAR_PADDING = 40; 24 | var VERTICAL_PADDING = 5; 25 | 26 | // optimised CSS custom property getter/setter 27 | var CustomStyle = (function CustomStyleClosure() { 28 | 29 | // As noted on: http://www.zachstronaut.com/posts/2009/02/17/ 30 | // animate-css-transforms-firefox-webkit.html 31 | // in some versions of IE9 it is critical that ms appear in this list 32 | // before Moz 33 | var prefixes = ['ms', 'Moz', 'Webkit', 'O']; 34 | var _cache = {}; 35 | 36 | function CustomStyle() {} 37 | 38 | CustomStyle.getProp = function get(propName, element) { 39 | // check cache only when no element is given 40 | if (arguments.length === 1 && typeof _cache[propName] === 'string') { 41 | return _cache[propName]; 42 | } 43 | 44 | element = element || document.documentElement; 45 | var style = element.style, prefixed, uPropName; 46 | 47 | // test standard property first 48 | if (typeof style[propName] === 'string') { 49 | return (_cache[propName] = propName); 50 | } 51 | 52 | // capitalize 53 | uPropName = propName.charAt(0).toUpperCase() + propName.slice(1); 54 | 55 | // test vendor specific properties 56 | for (var i = 0, l = prefixes.length; i < l; i++) { 57 | prefixed = prefixes[i] + uPropName; 58 | if (typeof style[prefixed] === 'string') { 59 | return (_cache[propName] = prefixed); 60 | } 61 | } 62 | 63 | //if all fails then set to undefined 64 | return (_cache[propName] = 'undefined'); 65 | }; 66 | 67 | CustomStyle.setProp = function set(propName, element, str) { 68 | var prop = this.getProp(propName); 69 | if (prop !== 'undefined') { 70 | element.style[prop] = str; 71 | } 72 | }; 73 | 74 | return CustomStyle; 75 | })(); 76 | 77 | function getFileName(url) { 78 | var anchor = url.indexOf('#'); 79 | var query = url.indexOf('?'); 80 | var end = Math.min( 81 | anchor > 0 ? anchor : url.length, 82 | query > 0 ? query : url.length); 83 | return url.substring(url.lastIndexOf('/', end) + 1, end); 84 | } 85 | 86 | /** 87 | * Returns scale factor for the canvas. It makes sense for the HiDPI displays. 88 | * @return {Object} The object with horizontal (sx) and vertical (sy) 89 | scales. The scaled property is set to false if scaling is 90 | not required, true otherwise. 91 | */ 92 | function getOutputScale(ctx) { 93 | var devicePixelRatio = window.devicePixelRatio || 1; 94 | var backingStoreRatio = ctx.webkitBackingStorePixelRatio || 95 | ctx.mozBackingStorePixelRatio || 96 | ctx.msBackingStorePixelRatio || 97 | ctx.oBackingStorePixelRatio || 98 | ctx.backingStorePixelRatio || 1; 99 | var pixelRatio = devicePixelRatio / backingStoreRatio; 100 | return { 101 | sx: pixelRatio, 102 | sy: pixelRatio, 103 | scaled: pixelRatio !== 1 104 | }; 105 | } 106 | 107 | /** 108 | * Scrolls specified element into view of its parent. 109 | * element {Object} The element to be visible. 110 | * spot {Object} An object with optional top and left properties, 111 | * specifying the offset from the top left edge. 112 | */ 113 | function scrollIntoView(element, spot) { 114 | // Assuming offsetParent is available (it's not available when viewer is in 115 | // hidden iframe or object). We have to scroll: if the offsetParent is not set 116 | // producing the error. See also animationStartedClosure. 117 | var parent = element.offsetParent; 118 | var offsetY = element.offsetTop + element.clientTop; 119 | var offsetX = element.offsetLeft + element.clientLeft; 120 | if (!parent) { 121 | console.error('offsetParent is not set -- cannot scroll'); 122 | return; 123 | } 124 | while (parent.clientHeight === parent.scrollHeight) { 125 | if (parent.dataset._scaleY) { 126 | offsetY /= parent.dataset._scaleY; 127 | offsetX /= parent.dataset._scaleX; 128 | } 129 | offsetY += parent.offsetTop; 130 | offsetX += parent.offsetLeft; 131 | parent = parent.offsetParent; 132 | if (!parent) { 133 | return; // no need to scroll 134 | } 135 | } 136 | if (spot) { 137 | if (spot.top !== undefined) { 138 | offsetY += spot.top; 139 | } 140 | if (spot.left !== undefined) { 141 | offsetX += spot.left; 142 | parent.scrollLeft = offsetX; 143 | } 144 | } 145 | parent.scrollTop = offsetY; 146 | } 147 | 148 | /** 149 | * Helper function to start monitoring the scroll event and converting them into 150 | * PDF.js friendly one: with scroll debounce and scroll direction. 151 | */ 152 | function watchScroll(viewAreaElement, callback) { 153 | var debounceScroll = function debounceScroll(evt) { 154 | if (rAF) { 155 | return; 156 | } 157 | // schedule an invocation of scroll for next animation frame. 158 | rAF = window.requestAnimationFrame(function viewAreaElementScrolled() { 159 | rAF = null; 160 | 161 | var currentY = viewAreaElement.scrollTop; 162 | var lastY = state.lastY; 163 | if (currentY !== lastY) { 164 | state.down = currentY > lastY; 165 | } 166 | state.lastY = currentY; 167 | callback(state); 168 | }); 169 | }; 170 | 171 | var state = { 172 | down: true, 173 | lastY: viewAreaElement.scrollTop, 174 | _eventHandler: debounceScroll 175 | }; 176 | 177 | var rAF = null; 178 | viewAreaElement.addEventListener('scroll', debounceScroll, true); 179 | return state; 180 | } 181 | 182 | /** 183 | * Use binary search to find the index of the first item in a given array which 184 | * passes a given condition. The items are expected to be sorted in the sense 185 | * that if the condition is true for one item in the array, then it is also true 186 | * for all following items. 187 | * 188 | * @returns {Number} Index of the first array element to pass the test, 189 | * or |items.length| if no such element exists. 190 | */ 191 | function binarySearchFirstItem(items, condition) { 192 | var minIndex = 0; 193 | var maxIndex = items.length - 1; 194 | 195 | if (items.length === 0 || !condition(items[maxIndex])) { 196 | return items.length; 197 | } 198 | if (condition(items[minIndex])) { 199 | return minIndex; 200 | } 201 | 202 | while (minIndex < maxIndex) { 203 | var currentIndex = (minIndex + maxIndex) >> 1; 204 | var currentItem = items[currentIndex]; 205 | if (condition(currentItem)) { 206 | maxIndex = currentIndex; 207 | } else { 208 | minIndex = currentIndex + 1; 209 | } 210 | } 211 | return minIndex; /* === maxIndex */ 212 | } 213 | 214 | /** 215 | * Generic helper to find out what elements are visible within a scroll pane. 216 | */ 217 | function getVisibleElements(scrollEl, views, sortByVisibility) { 218 | var top = scrollEl.scrollTop, bottom = top + scrollEl.clientHeight; 219 | var left = scrollEl.scrollLeft, right = left + scrollEl.clientWidth; 220 | 221 | function isElementBottomBelowViewTop(view) { 222 | var element = view.div; 223 | var elementBottom = 224 | element.offsetTop + element.clientTop + element.clientHeight; 225 | return elementBottom > top; 226 | } 227 | 228 | var visible = [], view, element; 229 | var currentHeight, viewHeight, hiddenHeight, percentHeight; 230 | var currentWidth, viewWidth; 231 | var firstVisibleElementInd = (views.length === 0) ? 0 : 232 | binarySearchFirstItem(views, isElementBottomBelowViewTop); 233 | 234 | for (var i = firstVisibleElementInd, ii = views.length; i < ii; i++) { 235 | view = views[i]; 236 | element = view.div; 237 | currentHeight = element.offsetTop + element.clientTop; 238 | viewHeight = element.clientHeight; 239 | 240 | if (currentHeight > bottom) { 241 | break; 242 | } 243 | 244 | currentWidth = element.offsetLeft + element.clientLeft; 245 | viewWidth = element.clientWidth; 246 | if (currentWidth + viewWidth < left || currentWidth > right) { 247 | continue; 248 | } 249 | hiddenHeight = Math.max(0, top - currentHeight) + 250 | Math.max(0, currentHeight + viewHeight - bottom); 251 | percentHeight = ((viewHeight - hiddenHeight) * 100 / viewHeight) | 0; 252 | 253 | visible.push({ 254 | id: view.id, 255 | x: currentWidth, 256 | y: currentHeight, 257 | view: view, 258 | percent: percentHeight 259 | }); 260 | } 261 | 262 | var first = visible[0]; 263 | var last = visible[visible.length - 1]; 264 | 265 | if (sortByVisibility) { 266 | visible.sort(function(a, b) { 267 | var pc = a.percent - b.percent; 268 | if (Math.abs(pc) > 0.001) { 269 | return -pc; 270 | } 271 | return a.id - b.id; // ensure stability 272 | }); 273 | } 274 | return {first: first, last: last, views: visible}; 275 | } 276 | 277 | /** 278 | * Event handler to suppress context menu. 279 | */ 280 | function noContextMenuHandler(e) { 281 | e.preventDefault(); 282 | } 283 | 284 | /** 285 | * Returns the filename or guessed filename from the url (see issue 3455). 286 | * url {String} The original PDF location. 287 | * @return {String} Guessed PDF file name. 288 | */ 289 | function getPDFFileNameFromURL(url) { 290 | var reURI = /^(?:([^:]+:)?\/\/[^\/]+)?([^?#]*)(\?[^#]*)?(#.*)?$/; 291 | // SCHEME HOST 1.PATH 2.QUERY 3.REF 292 | // Pattern to get last matching NAME.pdf 293 | var reFilename = /[^\/?#=]+\.pdf\b(?!.*\.pdf\b)/i; 294 | var splitURI = reURI.exec(url); 295 | var suggestedFilename = reFilename.exec(splitURI[1]) || 296 | reFilename.exec(splitURI[2]) || 297 | reFilename.exec(splitURI[3]); 298 | if (suggestedFilename) { 299 | suggestedFilename = suggestedFilename[0]; 300 | if (suggestedFilename.indexOf('%') !== -1) { 301 | // URL-encoded %2Fpath%2Fto%2Ffile.pdf should be file.pdf 302 | try { 303 | suggestedFilename = 304 | reFilename.exec(decodeURIComponent(suggestedFilename))[0]; 305 | } catch(e) { // Possible (extremely rare) errors: 306 | // URIError "Malformed URI", e.g. for "%AA.pdf" 307 | // TypeError "null has no properties", e.g. for "%2F.pdf" 308 | } 309 | } 310 | } 311 | return suggestedFilename || 'document.pdf'; 312 | } 313 | 314 | var ProgressBar = (function ProgressBarClosure() { 315 | 316 | function clamp(v, min, max) { 317 | return Math.min(Math.max(v, min), max); 318 | } 319 | 320 | function ProgressBar(id, opts) { 321 | this.visible = true; 322 | 323 | // Fetch the sub-elements for later. 324 | this.div = document.querySelector(id + ' .progress'); 325 | 326 | // Get the loading bar element, so it can be resized to fit the viewer. 327 | this.bar = this.div.parentNode; 328 | 329 | // Get options, with sensible defaults. 330 | this.height = opts.height || 100; 331 | this.width = opts.width || 100; 332 | this.units = opts.units || '%'; 333 | 334 | // Initialize heights. 335 | this.div.style.height = this.height + this.units; 336 | this.percent = 0; 337 | } 338 | 339 | ProgressBar.prototype = { 340 | 341 | updateBar: function ProgressBar_updateBar() { 342 | if (this._indeterminate) { 343 | this.div.classList.add('indeterminate'); 344 | this.div.style.width = this.width + this.units; 345 | return; 346 | } 347 | 348 | this.div.classList.remove('indeterminate'); 349 | var progressSize = this.width * this._percent / 100; 350 | this.div.style.width = progressSize + this.units; 351 | }, 352 | 353 | get percent() { 354 | return this._percent; 355 | }, 356 | 357 | set percent(val) { 358 | this._indeterminate = isNaN(val); 359 | this._percent = clamp(val, 0, 100); 360 | this.updateBar(); 361 | }, 362 | 363 | setWidth: function ProgressBar_setWidth(viewer) { 364 | if (viewer) { 365 | var container = viewer.parentNode; 366 | var scrollbarWidth = container.offsetWidth - viewer.offsetWidth; 367 | if (scrollbarWidth > 0) { 368 | this.bar.setAttribute('style', 'width: calc(100% - ' + 369 | scrollbarWidth + 'px);'); 370 | } 371 | } 372 | }, 373 | 374 | hide: function ProgressBar_hide() { 375 | if (!this.visible) { 376 | return; 377 | } 378 | this.visible = false; 379 | this.bar.classList.add('hidden'); 380 | document.body.classList.remove('loadingInProgress'); 381 | }, 382 | 383 | show: function ProgressBar_show() { 384 | if (this.visible) { 385 | return; 386 | } 387 | this.visible = true; 388 | document.body.classList.add('loadingInProgress'); 389 | this.bar.classList.remove('hidden'); 390 | } 391 | }; 392 | 393 | return ProgressBar; 394 | })(); 395 | -------------------------------------------------------------------------------- /docs/citing-snippet.md: -------------------------------------------------------------------------------- 1 | To reference HyP3 in manuscripts, cite our documentation available at [ASF's hyp3-docs GitHub repository](https://github.com/ASFHyP3/hyp3-docs "https://github.com/ASFHyP3/hyp3-docs" ){target=_blank}: 2 | 3 | !!! note "" 4 | 5 | Hogenson, K., Kristenson, H., Kennedy, J., Johnston, A., Rine, J., Logan, T., Zhu, J., Williams, F., Herrmann, J., Smale, J., & Meyer, F. (2020). Hybrid Pluggable Processing Pipeline (HyP3): A cloud-native infrastructure for generic processing of SAR data [Computer software]. https://doi.org/10.5281/zenodo.4646138 6 | -------------------------------------------------------------------------------- /docs/contact-snippet.md: -------------------------------------------------------------------------------- 1 | Want to talk about HyP3? We would love to hear from you! 2 | 3 | Found a bug? Want to request a feature? 4 | [Open an issue](https://github.com/ASFHyP3/ASFHyP3.github.io/issues/new "Open new ASFHyP3 GitHub Issue" ){target=_blank} 5 | 6 | General questions? Suggestions? Or just want to talk to the team? 7 | [Chat with us on Gitter](https://gitter.im/ASFHyP3/community "ASF HyP3 Gitter Community" ){target=_blank} 8 | 9 | You can also reach us by email through ASF User Services: [Email ASF User Services](mailto:uso@asf.alaska.edu "uso@asf.alaska.edu") 10 | -------------------------------------------------------------------------------- /docs/contact.md: -------------------------------------------------------------------------------- 1 | # Contact Us 2 | 3 | {% include 'contact-snippet.md' %} 4 | -------------------------------------------------------------------------------- /docs/contributing.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | 3 | Thank you for your interest in helping make custom on-demand SAR processing accessible! 4 | 5 | We're excited you would like to contribute to HyP3! Whether you're finding bugs, 6 | adding new features, fixing anything broken, or improving documentation, get 7 | started by submitting an issue or pull request! 8 | 9 | Please read our [Code of Conduct](https://github.com/ASFHyP3/.github/blob/main/CODE_OF_CONDUCT.md "HyP3 Code of Conduct" ){target=_blank} before contributing. 10 | 11 | ## Issues and Pull Requests are welcome 12 | 13 | If you have any questions or ideas, or notice any problems or bugs, and want to open an issue, great! 14 | We recommend first [searching our open issues](https://github.com/issues?q=is%3Aopen+is%3Aissue+org%3AASFHyP3 "https://github.com/issues" ){target=_blank} 15 | to see if the issue has already been submitted (we may already be working on it!). If you think your 16 | issue is new, you're welcome to [create a new issue](https://github.com/ASFHyP3/ASFHyP3.github.io/issues/new "https://github.com/ASFHyP3/ASFHyP3.github.io/issues/new" ){target=_blank} in our 17 | general issues tracker. If you know the specific [repository](https://github.com/orgs/ASFHyP3/repositories "https://github.com/orgs/ASFHyP3/repositories" ){target=_blank} that your issue pertains to, you can use its issues tracker. 18 | 19 | Found a typo, know how to fix a bug, want to update the docs, want to add a new feature? Even better! 20 | The smaller the PR, the easier it is to review and test, and the more likely it is to be successful. 21 | For major contributions, consider opening an issue describing the contribution, so we can help guide 22 | and breakup the work into digestible pieces. 23 | 24 | 25 | ### Pull Request Guidelines 26 | 27 | We ask that you follow these guidelines with your contributions 28 | 29 | #### Style 30 | 31 | We generally follow python community standards ([PEP8](https://pep8.org/ "https://pep8.org/" ){target=_blank}), except we allow line 32 | lengths up to 120 characters. We recommend trying to keep lines 80--100 characters long, but allow 33 | up to 120 when it improves readability. 34 | 35 | #### Documentation 36 | 37 | We are working to improve our documentation! 38 | 39 | For all public-facing functions/methods (not 40 | [marked internal use](https://www.python.org/dev/peps/pep-0008/#naming-conventions "https://www.python.org/dev/peps/pep-0008/#naming-conventions" ){target=_blank}), 41 | please include [type hints](https://google.github.io/styleguide/pyguide.html#221-type-annotated-code "https://google.github.io/styleguide/pyguide.html#221-type-annotated-code" ){target=_blank} 42 | (when reasonable) and a [docstring](https://www.python.org/dev/peps/pep-0257/ "https://www.python.org/dev/peps/pep-0257" ){target=_blank} 43 | formatted [Google style](https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings "https://google.github.io/styleguide/pyguide.html#38-comments-and-docstrings" ){target=_blank}. 44 | 45 | #### Tests 46 | 47 | All of the automated tests for the project need to pass before your submission will be accepted. 48 | 49 | If you add new functionality, please consider adding tests for that functionality as well. 50 | 51 | #### Commits 52 | 53 | * Make small commits that show the individual changes you are making 54 | * Write descriptive commit messages that explain your changes 55 | 56 | Example of a good commit message: 57 | 58 | ``` 59 | Improve contributing guidelines. Fixes #10 60 | 61 | Improve contributing docs and consolidate them in the standard location 62 | https://help.github.com/articles/setting-guidelines-for-repository-contributors/ 63 | ``` 64 | -------------------------------------------------------------------------------- /docs/dems.md: -------------------------------------------------------------------------------- 1 | # Digital Elevation Models 2 | Digital Elevation Models are required when processing SAR data to higher-level products, such as the [Radiometric Terrain Correction (RTC)](products.md#rtc "RTC Products" ){target=_blank} and [Interferometric SAR (InSAR)](products.md#insar "InSAR Products" ){target=_blank} products available [On Demand](https://search.asf.alaska.edu/#/?topic=onDemand "Vertex On Demand Documentation" ){target=_blank} from ASF. 3 | 4 | In the past, ASF maintained a collection of DEMs that were pre-processed as appropriate for SAR workflows, and applied a preference hierarchy so that the best available DEM in any given area would be automatically selected for processing. With the public release of the [GLO-30 Copernicus DEM](https://dataspace.copernicus.eu/explore-data/data-collections/copernicus-contributing-missions/collections-description/COP-DEM "Copernicus DEM" ){target=_blank}, we have changed our DEM strategy to leverage a [cloud-hosted copy](https://registry.opendata.aws/copernicus-dem/ "https://registry.opendata.aws/copernicus-dem" ){target=_blank} of the global Copernicus DEM. This is now the only DEM option available for processing RTC and InSAR products. 5 | 6 | !!! tip "Removal of option to use Legacy DEMs for RTC Processing" 7 | 8 | Users no longer have the option to use legacy DEMs when processing RTC jobs [On Demand in Vertex](https://search.asf.alaska.edu/#/?topic=onDemand "Vertex On Demand Documentation" ){target=_blank} or when using the [API](https://hyp3-docs.asf.alaska.edu/using/api/ "https://hyp3-docs.asf.alaska.edu/using/api" ){target=_blank} or [SDK](https://hyp3-docs.asf.alaska.edu/using/sdk/ "https://hyp3-docs.asf.alaska.edu/using/sdk" ){target=_blank}. The [Copernicus GLO-30 DEM](https://dataspace.copernicus.eu/explore-data/data-collections/copernicus-contributing-missions/collections-description/COP-DEM "Copernicus DEM" ){target=_blank} is now used for all RTC processing. 9 | 10 | We use the 2022 Release of the [Copernicus GLO-30 Public DEM](https://dataspace.copernicus.eu/explore-data/data-collections/copernicus-contributing-missions/collections-description/COP-DEM "Copernicus DEM" ){target=_blank}, [available on AWS](https://registry.opendata.aws/copernicus-dem/ "Registry of Open Data on AWS - Copernicus DEM" ){target=_blank}. 11 | 12 | !!! tip "Coverage gaps in Copernicus DEM GLO-30 filled using GLO-90" 13 | 14 | The 2022 release of the Copernicus DEM GLO-30 dataset does not provide coverage over Armenia and Azerbaijan. In the past, we have not supported On Demand product generation over those areas, due to the lack of DEM coverage. We now use the Copernicus DEM GLO-90 to fill those gaps. 15 | 16 | The GLO-90 dataset has a pixel spacing of 90 meters, which is not as detailed as the 30-m pixel spacing in the GLO-30 DEM, but it does allow us to provide On Demand products in these regions, where they were previously unavailable. 17 | 18 | Table 1 summarizes ASF's source DEM. The Copernicus GLO-30 DEM is now the only option available for both RTC and InSAR processing. Note that the DEM is reprojected to the UTM Zone (WGS84) appropriate for the granule location, and a geoid correction is applied before being used for processing. For RTC processing, the DEM is resampled to the pixel spacing of the output product. For InSAR processing, the DEM is resampled to twice the pixel spacing of the output InSAR product (160 m for 20x4 looks, 80 m for 10x2 looks). 19 | 20 | | Resolution | DEM | Vertical Datum | Area | Posting | Priority | 21 | |------------|---------|----------------|--------------------------------|-----------------|----------| 22 | | Medium | GLO-30 | EGM2008 | Global | 1 arc second | Default | 23 | 24 | *Table 1: DEM used for On Demand processing. The Copernicus DEM is the only option available when processing RTC and InSAR products.* 25 | 26 | When ordering On-Demand products, you can choose to include a copy of the DEM used for processing in the output product package. For RTC products, this DEM copy is converted to 16-bit signed integer format, but is otherwise the same as the DEM used in the RTC process. For InSAR products, the DEM copy is output in 32-bit float format, and is upsampled from the DEM resolution used for processing to match the pixel spacing of the output InSAR products. 27 | 28 | Note that the height values will differ from the original source DEM in all cases, due to the geoid correction applied to prepare the DEM for use in SAR processing. 29 | 30 | ## Copernicus DEM 31 | 32 | The [GLO-30 Copernicus DEM](https://dataspace.copernicus.eu/explore-data/data-collections/copernicus-contributing-missions/collections-description/COP-DEM "Copernicus DEM" ){target=_blank} provides global coverage at 30-m pixel spacing (with the current exception of an area covering Armenia and Azerbaijan, see Figure 2). 33 | 34 | When an On Demand job is requested, we download the required DEM tiles from the Copernicus Digital Elevation Model (DEM) GLO-30 Public dataset available in the [Registry of Open Data on AWS](https://registry.opendata.aws/copernicus-dem/ "https://registry.opendata.aws/copernicus-dem" ){target=_blank}, managed by [Sinergise](https://www.sinergise.com/ "https://www.sinergise.com" ){target=_blank}. We mosaic the tiles and reproject them to the appropriate UTM Zone for the location of the SAR granule to be processed, resampling them as required for processing. A geoid correction is applied before it is used for On Demand processing. 35 | 36 | For the area that does not have coverage with the GLO-30 DEM, we use the Copernicus DEM GLO-90 dataset, which provides elevation data at 90-meter pixel spacing. Users ordering products over this area should be aware that a lower-resolution DEM is used for processing. 37 | 38 | Figure 1 shows the coverage of the Copernicus DEM GLO-30 Public dataset, and Figure 2 details the land area currently only covered by the GLO-30 DEM at 90-m pixel spacing. 39 | 40 | ![Figure 1](images/cop-coverage-map.png "Copernicus DEM GLO-30 coverage map") 41 | 42 | *Figure 1: Copernicus DEM GLO-30 coverage map* 43 | 44 | ![Figure 2](images/cop-missing-100.png "Detail of area currently not covered by Copernicus DEM GLO-30. Jobs over this area will be filled with GLO-90.") 45 | 46 | *Figure 2: Detail of area currently not covered by Copernicus DEM GLO-30. On Demand jobs requested over this area will use the Copernicus DEM GLO-90.* 47 | 48 | ## Special Use DEMs 49 | 50 | [AutoRIFT](products.md#autorift "AutoRIFT Products" ){target=_blank}, a process developed by the [NASA MEaSUREs ITS_LIVE](https://its-live.jpl.nasa.gov/ "https://its-live.jpl.nasa.gov" ){target=_blank} project, uses custom Greenland and Antarctica DEMs with 240-m resolution. The DEM, associated process input files, and their details are available on the [ITS_LIVE project website](https://its-live.jpl.nasa.gov/ "https://its-live.jpl.nasa.gov" ){target=_blank}. 51 | -------------------------------------------------------------------------------- /docs/guides/Sentinel_RTC_ATBD_v3.1.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/guides/Sentinel_RTC_ATBD_v3.1.pdf -------------------------------------------------------------------------------- /docs/guides/Sentinel_RTC_Product_Guide.pdf/index.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | Redirecting... 6 | 7 | 8 | 9 | 10 | 11 | 12 | Redirecting... 13 | 14 | 15 | -------------------------------------------------------------------------------- /docs/guides/introduction_to_sar.md: -------------------------------------------------------------------------------- 1 | # Introduction to SAR 2 | --------------------- 3 | 4 | ## How SAR Operates 5 | 6 | SAR is an active sensor that transmits pulses and listens for echoes, called backscatter. The backscatter is recorded in both phase and amplitude. The phase is used to determine the distance from the sensor to a target, and amplitude indicates the amount of the sent signal that returns to the sensor. Amplitude measurements provide information about the roughness, geometry, wetness, and dielectric constant of that target, while phase measurements are used for SAR interferometry. 7 | 8 | ### Propagation of EM Waves 9 | 10 | At the most fundamental level, SAR transmits an encoded burst, called a chirp, of electromagnetic energy (Figure 1) and then listens for the return signal, called echoes. The wavelength of this chirp is in the centimeter range, with X-band (~3 cm), C-band (~6 cm), and L-band (~23 cm) all in common use. 11 | 12 | ![Figure 1](../images/microwave-emr.png "The spectrum of electromagnetic radiation. SAR is imaged using microwave wavelengths") 13 | 14 | *Figure 1: The spectrum of electromagnetic radiation. SAR is imaged using microwave wavelengths. The microwave range extends from about 1 mm to 1 m in wavelength, with most radar applications using bands within the 3 mm to 30 cm range.* 15 | 16 | ### Polarizations 17 | 18 | Polarization refers to the direction of travel of an electromagnetic wave. A horizontal wave is transmitted so that it oscillates in a plane parallel to the surface imaged, while a vertical wave oscillates in a plane perpendicular to the surface imaged. 19 | 20 | There are four different polarization combinations commonly used by SAR sensors: VV, VH, HV and HH, as listed in Table 1. The first letter indicates the polarization used to transmit the signal, and the second letter indicates the polarization of the measured return, as illustrated in Figure 2. 21 | 22 | *Table 1: SAR Polarizations* 23 | 24 | | Polarization Code | Transmit Signal Polarization | Return Signal Polarization | 25 | |-------------------|------------------------------|----------------------------| 26 | | VV | Vertical | Vertical | 27 | | VH | Vertical | Horizontal | 28 | | HV | Horizontal | Vertical | 29 | | HH | Horizontal | Horizontal | 30 | 31 | 32 | ![Figure 2](../images/polarizations_ASF_dashed.png "SAR signals are transmitted either vertically or horizontally. Likewise, the sensor can listen for either horizontally or vertically polarized returns.") 33 | 34 | *Figure 2: SAR signals are transmitted and received either vertically (V) or horizontally (H). This gives the potential for four different polarization combinations (transmit listed first, receive second): VV, VH, HH, and HV. Credit: ASF* 35 | 36 | Different SAR sensors have different polarization capabilities. Single-pol sensors send out a signal in one polarization and can only measure returns that are in that same polarization (VV or HH). Dual-pol sensors send out a signal in one polarization, but can measure returns that are in that same polarization (co-pol: VV or HH) as well as returns that are in the other polarization (cross-pol: VH or HV). Some SAR systems can transmit chirps with both a horizontal or vertical polarization and listen for both horizontal or vertical returns, giving full quad-pol capabilities (VV, VH, HV, HH). 37 | 38 | Polarimetry is an emerging field of SAR processing which is used in a number of applications such as measuring vegetation properties and changes of vegetation over time. Additional applications include oceanography, geology, and disaster response. 39 | 40 | ------- 41 | 42 | ## Backscatter Contributors 43 | 44 | Many factors influence the backscatter received by the SAR sensor. The wavelength used by the SAR influences the signal's penetration, and, thus, what is being imaged. Surface roughness will modulate the backscatter returns from nothing up to a strong return, decreasing or increasing the brightness of the resulting pixel. Scattering mechanisms like volume scattering or double bounce can strongly influence the brightness of the SAR image as well, sometimes resulting in total saturation by the received signal. 45 | 46 | ### Wavelength 47 | 48 | The wavelength of the SAR system influences the amount of ground penetration that occurs. As shown in Figure 3, X-band has the least penetration, scattering from the top of the canopy in vegetated areas. All three bands will penetrate dry sand, with stronger returns from both C-band and L-band. L-band has the most penetration overall, with returns from the ground in vegetated areas, strong returns from substances under dry alluvium, and deep penetration of ice and snow. 49 | 50 | ![Figure 3](../images/SAR_band_types.png "Effects of SAR band on penetration of surfaces. The longer the wavelength, the deeper the penetration through most land types.") 51 | 52 | *Figure 3: Effects of the SAR band on penetration of surfaces. The longer the wavelength, the deeper the penetration through most land types. Credit: [The SAR Handbook](https://gis1.servirglobal.net/TrainingMaterials/SAR/Chp2Content.pdf "The SAR Handbook Chapter 2" ){target=_blank}* 53 | 54 | ### Surface Roughness 55 | 56 | The strength of the return, or backscatter, is partially based upon relative roughness of the surface imaged. The smoother the surface, the more reflection away from the sensor, while rough surfaces give a much stronger return towards the imaging platform. As can be seen in Figure 4, if the height of the surface's roughness is less than 1/32 of the wavelength, mostly specular reflection occurs. If the height of the surface's roughness is greater than 1/2 the wavelength used, the echoes are scattered in all directions, giving a strong return back to the sensor. 57 | 58 | ![Figure 4](../images/wavelength_vs_roughness.png "The amount of backscatter from a surface depends largely on the surface's roughness") 59 | 60 | *Figure 4: The amount of backscatter from a surface depends largely on the surface's roughness, with smooth surfaces getting the least returns and rough surfaces getting the strongest returns. Credit: [The SAR Handbook](https://gis1.servirglobal.net/TrainingMaterials/SAR/Chp2Content.pdf "The SAR Handbook Chapter 2" ){target=_blank}* 61 | 62 | ### Types of Scattering 63 | 64 | ![Figure 5](../images/scattering_types.png "Scattering mechanisms. Rough surfaces give bright returns due to the wide scattering. Vegetated surfaces cause volumetric scattering, which has a darker return to the imaging platform. Double bounce returns, found mostly in urban areas, give the brightest return, as the majority of the energy is re-directed back towards the sensor.") 65 | 66 | *Figure 5: Scattering mechanisms. Rough surfaces give bright returns due to the wide scattering. Vegetated surfaces cause volumetric scattering, which gives a darker return to the imaging platform. Double bounce returns, found mostly in urban areas, give the brightest return, as the majority of the energy is re-directed back towards the sensor. Credit: [The SAR Handbook](https://gis1.servirglobal.net/TrainingMaterials/SAR/Chp2Content.pdf "The SAR Handbook Chapter 2" ){target=_blank}* 67 | 68 | The resolution of Sentinel-1 SAR images is roughly 10 m. This means that a square of 10 meters on the ground is represented by a single pixel in the SAR image. The relative roughness of this patch of ground compared to the wavelength used will affect the backscatter strength (see Figure 4). However, there are additional types of bounce mechanisms beyond specular and diffuse, as shown in Figure 5. In vegetation, *volumetric* scattering occurs when signals bounce around inside the vegetation imaged. The *double bounce* mechanism which occurs in urban areas and is exploited by corner reflectors, causes chirp to be reflected directly back to the sensor, causing a very strong backscatter. Double bounce returns are so strong in some places that they cause over saturation of the sensor, resulting in visible sidelobes. These sidelobes are evidenced by bright crosses surrounding the double bounce target. 69 | 70 | ------------------------ 71 | 72 | ## SAR Scale 73 | 74 | SAR backscatter are recorded in both return strength and phase. Each pixel in a single-look complex SAR image represents these values as an imaginary number (I,Q). To create the visible images we are used to looking at, the SAR image is *detected*. This process calculates the square root of the sum of the squares of the I and Q values found in an SLC image, creating a so-called intensity image. This image is real valued, and, when calibrated, gives the absolute backscatter of the surface imaged. Detected images can be stored using several different scales, including power, amplitude, and dB. Note the default scale of Sentinel-1 RTC products from HyP3 is power. However, in some cases, it may be desirable to convert the actual pixel values to a different scale. Two other scales commonly used for SAR data are amplitude and dB. 75 | 76 | ### Power Scale 77 | 78 | The values in this scale are generally very close to zero, so the dynamic range of the SAR image can be easily skewed by a few bright scatterers in the image. Power scale is appropriate for statistical analysis of the SAR dataset, but may not always be the best option for data visualization. 79 | 80 | When viewing a SAR image in power scale in a GIS environment, it may appear mostly or all black, and you may need to adjust the stretch to see features in the image. Often applying a stretch of 2 standard deviations, or setting the Min-Max stretch values to 0 and 0.3, will greatly improve the appearance of the image. You can adjust the stretch as desired to display your image to full advantage. Be aware that this does not change the actual pixel values. 81 | 82 | ### Amplitude Scale 83 | 84 | Amplitude scale is the square root of the power scale values. This brightens the darker pixels and darkens the brighter pixels, narrowing the dynamic range of the image. In many cases, amplitude scale presents a pleasing grayscale display of RTC images. Amplitude scale works well for calculating log difference ratios (see [ASF Sentinel-1 RTC Product Guide](../guides/rtc_product_guide.md#change-detection-using-rtc-data "Link to Change Detection section in ASF Sentinel-1 RTC Product Guide" ){target=_blank}). 85 | 86 | ### dB Scale 87 | 88 | The dB scale is calculated by multiplying 10 times the Log10 of the power scale values. This scale brightens the pixels, allowing for better differentiation among very dark pixels. When identifying water on the landscape, this is often a good scale to use; the water pixels generally remain very dark, while the terrestrial pixels are even brighter (see [Identifying Surface Water](../guides/rtc_product_guide.md#identifying-surface-water "Link to Identifying Surface Water section in ASF Sentinel-1 RTC Product Guide" ){target=_blank}). 89 | 90 | This scale is not always the best choice for general visualization of SAR products, as it can give a washed-out appearance, and because it is in a log scale, it is not appropriate for all types of statistical analyses. 91 | 92 | ------------------------ 93 | 94 | ## Geometric Distortions 95 | 96 | There are a number of distortions inherent to SAR data due to the side-looking nature of the sensor, and these impacts will be more prevalent in areas with rugged terrain. The process of radiometric terrain correction addresses the geometric distortions that lead to geolocation errors in terrain features, and also normalizes the backscatter values based on the actual area contributing returns. This process generates an image that aligns well with other geospatial data and is suitable for GIS applications or time-series analysis. 97 | 98 | The key distortions present in SAR images are foreshortening, layover and shadow (Figure 6). 99 | 100 | ![Figure 6](../images/sar_distortions.png "Distortions induced by side-looking SAR. Ground points a, b, c are ‘seen’ by radar as points a’, b’, c’ in the slant range.") 101 | 102 | *Figure 6: Distortions induced by side-looking SAR. Ground points a, b, c are ‘seen’ by radar as points a’, b’, c’ in the slant range. Credit: Franz J. Meyer* 103 | 104 | In the case of **foreshortening**, the backscatter from the front side of the mountain is compressed, with returns from a large area arriving back to the sensor at about the same time. This results in the front slope being displayed as a narrow, bright band. 105 | 106 | When **layover** occurs, returns from the front slope (and potentially even some of the area before the slope starts) are received at the same time as returns from the back slope. Thus, area in the front of the slope is projected onto the back side in the slant range image. In this case, the data from the front slope cannot be extracted from the returns. 107 | 108 | Another condition that results in missing data is radar **shadow**. In this case, the angle of the back slope is such that the sensor can not image it at all. These areas with steep back slopes offer no information to the SAR sensor. 109 | 110 | When RTC is performed, foreshortened areas are corrected based on the DEM. Areas impacted by layover or shadow, however, do not actually have data returns to correct. In this case, the pixels in the resulting RTC image will have a value of No Data. We do not interpolate missing data; users who would like to fill holes with estimated values will need to do so as appropriate for their particular application. 111 | 112 | ## Speckle 113 | 114 | In most cases, the patch of ground illuminated by the SAR transmitter will not be homogeneous. Instead it will be comprised of many different types of individual scatterers. The scatterers may interfere with each other either strengthening the return or weakening it. This creates a grainy (salt & pepper) appearance in SAR imagery. This a result of the nature of SAR and, thus, occurs in all SAR scenes. Speckle in SAR images can be mitigated by multi-looking, which, in effect, uses averaging to smooth out the image, resulting in a more homogeneous appearance at the expense of resolution. 115 | -------------------------------------------------------------------------------- /docs/guides/rtc_atbd.md: -------------------------------------------------------------------------------- 1 | # RTC Algorithm Theoretical Basis 2 | 3 | 6 | -------------------------------------------------------------------------------- /docs/how_it_works.md: -------------------------------------------------------------------------------- 1 | # How it Works 2 | 3 | HyP3 is built around three core concepts: Platform, Plugins, and Products. 4 | 5 | ## Platform 6 | 7 | The HyP3 platform makes it easy for users to request processing, monitor their requests, and download processed 8 | products. The platform delegates each processing request to a plugin on the user's behalf. A deployment of the HyP3 9 | platform can be integrated with any number of plugins. 10 | 11 | ## Plugins 12 | 13 | Plugins are the workhorses of HyP3. Each plugin implements a particular processing workflow and 14 | produces a product. At their most basic level, HyP3 plugins are 15 | [Docker containers](https://www.docker.com/resources/what-container "https://www.docker.com/resources/what-container" ){target=_blank} 16 | that handle the entire processing workflow for a single product, including: 17 | 18 | * Marshaling the required input data 19 | * performing any needed transformations and computations on the data 20 | * creating the final product 21 | * uploading the product to an AWS S3 bucket for distribution 22 | 23 | Plugins only need to define a simple interface (entrypoint) that HyP3 understands and is used to run the container. 24 | By encapsulating the entire workflow for generating a single product, HyP3 can arbitrarily scale to meet user need. 25 | 26 | ## Products 27 | 28 | Products are the end result of processing, typically one or more data files. For more information about 29 | our current products, see our [products page](products.md "HyP3 Products" ){target=_blank}. 30 | -------------------------------------------------------------------------------- /docs/images/HyP3-graphic-only.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/HyP3-graphic-only.png -------------------------------------------------------------------------------- /docs/images/SAR_band_types.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/SAR_band_types.png -------------------------------------------------------------------------------- /docs/images/api-401-unauthorized.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/api-401-unauthorized.png -------------------------------------------------------------------------------- /docs/images/asf_burst_insar_names.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/asf_burst_insar_names.png -------------------------------------------------------------------------------- /docs/images/asf_gunw_names.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/asf_gunw_names.png -------------------------------------------------------------------------------- /docs/images/asf_insar_names.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/asf_insar_names.png -------------------------------------------------------------------------------- /docs/images/baseline_asf.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/baseline_asf.png -------------------------------------------------------------------------------- /docs/images/burst-contiguity.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/burst-contiguity.png -------------------------------------------------------------------------------- /docs/images/cop-coverage-map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/cop-coverage-map.png -------------------------------------------------------------------------------- /docs/images/cop-missing-100.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/cop-missing-100.png -------------------------------------------------------------------------------- /docs/images/dem-coverage-map.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/dem-coverage-map.png -------------------------------------------------------------------------------- /docs/images/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/favicon.ico -------------------------------------------------------------------------------- /docs/images/frame_granule_overlap.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/frame_granule_overlap.png -------------------------------------------------------------------------------- /docs/images/get_jobs_query.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/get_jobs_query.png -------------------------------------------------------------------------------- /docs/images/get_user_execute.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/get_user_execute.png -------------------------------------------------------------------------------- /docs/images/get_user_try.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/get_user_try.png -------------------------------------------------------------------------------- /docs/images/insar-tutorial.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/insar-tutorial.png -------------------------------------------------------------------------------- /docs/images/landsat-false-color-composite.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/landsat-false-color-composite.jpg -------------------------------------------------------------------------------- /docs/images/log-difference-raster.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/log-difference-raster.png -------------------------------------------------------------------------------- /docs/images/microwave-emr.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/microwave-emr.png -------------------------------------------------------------------------------- /docs/images/opera-browse-download.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/opera-browse-download.png -------------------------------------------------------------------------------- /docs/images/opera-mask-compare.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/opera-mask-compare.png -------------------------------------------------------------------------------- /docs/images/opera-rtc-static-layer-coverage.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/opera-rtc-static-layer-coverage.png -------------------------------------------------------------------------------- /docs/images/opera-rtc-vertex-results.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/opera-rtc-vertex-results.png -------------------------------------------------------------------------------- /docs/images/opera-rtc-vertex-search.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/opera-rtc-vertex-search.png -------------------------------------------------------------------------------- /docs/images/opera-rtc-vertex-static-id.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/opera-rtc-vertex-static-id.png -------------------------------------------------------------------------------- /docs/images/opera-rtc-vertex-static.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/opera-rtc-vertex-static.png -------------------------------------------------------------------------------- /docs/images/orbit_in_name.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/orbit_in_name.png -------------------------------------------------------------------------------- /docs/images/phase_diff.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/phase_diff.png -------------------------------------------------------------------------------- /docs/images/pixel-spacing-compare.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/pixel-spacing-compare.png -------------------------------------------------------------------------------- /docs/images/polarizations_ASF_dashed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/polarizations_ASF_dashed.png -------------------------------------------------------------------------------- /docs/images/post_jobs_execute.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/post_jobs_execute.png -------------------------------------------------------------------------------- /docs/images/rtc-tutorial.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/rtc-tutorial.png -------------------------------------------------------------------------------- /docs/images/s1b_hole_alaska.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/s1b_hole_alaska.png -------------------------------------------------------------------------------- /docs/images/sar-optical-fusion.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/sar-optical-fusion.jpg -------------------------------------------------------------------------------- /docs/images/sar_distortions.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/sar_distortions.png -------------------------------------------------------------------------------- /docs/images/scattering_types.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/scattering_types.png -------------------------------------------------------------------------------- /docs/images/seasonal-change-example.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/seasonal-change-example.jpg -------------------------------------------------------------------------------- /docs/images/sentinel-1-rtc-image.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/sentinel-1-rtc-image.jpg -------------------------------------------------------------------------------- /docs/images/slc_jitter.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/slc_jitter.png -------------------------------------------------------------------------------- /docs/images/three_rader_backscatter_convention.jpg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/three_rader_backscatter_convention.jpg -------------------------------------------------------------------------------- /docs/images/vertex-GUNW-dataset-selection.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/vertex-GUNW-dataset-selection.png -------------------------------------------------------------------------------- /docs/images/vertex-dataset-selection.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/vertex-dataset-selection.png -------------------------------------------------------------------------------- /docs/images/vertex-sign-in.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/vertex-sign-in.png -------------------------------------------------------------------------------- /docs/images/vertex.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/vertex.png -------------------------------------------------------------------------------- /docs/images/water-histogram.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/water-histogram.png -------------------------------------------------------------------------------- /docs/images/water-mask.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/water-mask.png -------------------------------------------------------------------------------- /docs/images/watermask-tutorial.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/watermask-tutorial.png -------------------------------------------------------------------------------- /docs/images/wavelength_vs_roughness.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/ASFHyP3/hyp3-docs/ae52707a6f84926ec9ea751544a7621e1c44b1ee/docs/images/wavelength_vs_roughness.png -------------------------------------------------------------------------------- /docs/index.md: -------------------------------------------------------------------------------- 1 | # ASF HyP3 2 | 3 | ***Alaska Satellite Facility's Hybrid Pluggable Processing Pipeline*** 4 | 5 | HyP3 (pronounced *"hype"*) is a service for processing Synthetic Aperture Radar (SAR) imagery that addresses many common issues for users of 6 | SAR data: 7 | 8 | * Most SAR datasets require at least some processing to remove distortions before they are analysis-ready 9 | * SAR processing is computing-intensive 10 | * Software for SAR processing is complicated to use and/or prohibitively expensive 11 | * Producing analysis-ready SAR data has a steep learning curve that acts as a barrier to entry 12 | 13 | HyP3 solves these problems by providing a free service where people can request SAR processing on-demand. These 14 | processing requests are picked up by automated systems, which handle the complexity of SAR processing on behalf of the 15 | user. HyP3 doesn't require users to have a lot of knowledge of SAR processing before getting started; users only need to 16 | submit the input data and set a few optional parameters if desired. With HyP3, analysis-ready products are just a few 17 | clicks away. 18 | 19 | ## Getting started 20 | 21 | {% include 'using-snippet.md' %} 22 | 23 | ## What's New 24 | 25 | Follow [@ASFHyP3](https://twitter.com/ASFHyP3 "https://twitter.com/ASFHyP3" ){target=_blank} on Twitter, or check our [What's New](whats_new.md "HyP3 What's New" ){target=_blank} page to keep up to date on all things HyP3! 26 | 27 | ## Contact Us 28 | 29 | {% include 'contact-snippet.md' %} 30 | -------------------------------------------------------------------------------- /docs/javascripts/mathjax.js: -------------------------------------------------------------------------------- 1 | window.MathJax = { 2 | tex: { 3 | inlineMath: [["\\(", "\\)"]], 4 | displayMath: [["\\[", "\\]"]], 5 | processEscapes: true, 6 | processEnvironments: true 7 | }, 8 | options: { 9 | ignoreHtmlClass: ".*|", 10 | processHtmlClass: "arithmatex" 11 | } 12 | }; 13 | 14 | document$.subscribe(() => { 15 | MathJax.typesetPromise() 16 | }) 17 | -------------------------------------------------------------------------------- /docs/plugins.md: -------------------------------------------------------------------------------- 1 | # Plugins 2 | Plugins are the science backbone of HyP3; they do all of the data processing and product generation. 3 | Plugins can be added to HyP3 to generate new science products, or support different 4 | tools/software/algorithms/options/etc that are not currently supported by HyP3. 5 | 6 | ## How plugins work 7 | At their most basic level, HyP3 plugins are Docker containers with an interface (entrypoint) HyP3 understands. 8 | Plugins handle the entire processing workflow for a single product, including: 9 | 10 | * Marshaling the required input data 11 | * performing any needed transformations and computations on the data 12 | * creating the final product 13 | * uploading the product to an AWS S3 bucket for distribution 14 | 15 | By encapsulating the entire workflow for generating a single product, HyP3 can arbitrarily scale to meet user need. 16 | 17 | ## Developing a plugin 18 | To create a new HyP3 plugin, we recommend starting from a Minimal Working Example (MWE) of generating 19 | the product you're plugin will generate. Importantly, the MWE should be entirely self contained, and 20 | include all the necessary data to generate the product. 21 | 22 | Once a MWE is developed, it's important to define your plugin's interface -- this is where HyP3 connects 23 | the product generation and users. When designing the interface, you may find it helpful to ask yourself: 24 | 25 | * what options do I want to provide to users? 26 | * what's the *minimal* set information I need to gather from users? 27 | * is this information easily input by users? 28 | * is this information serializable? For example, can the information be written in a JSON file? 29 | * could I define this information more simply? 30 | 31 | Once a MWE is developed and an interface is defined, you can use our 32 | [HyP3 plugin cookiecutter](https://github.com/ASFHyP3/hyp3-cookiecutter "https://github.com/ASFHyP3/hyp3-cookiecutter" ){target=_blank} 33 | to help you build a plugin that conforms to the plugin requirements. 34 | 35 | ### Plugin requirements 36 | In order to be supported by HyP3, a plugin must meet a few requirements: 37 | 38 | * the plugin must be a Docker image that is hosted in a repository where HyP3 will be able to pull it 39 | * the plugin's entrypoint must minimally accept the following arguments 40 | * `--bucket BUCKET-NAME` where `BUCKET-NAME` is the name of an AWS S3 bucket that output products will be uploaded to 41 | * `--bucket-prefix BUCKET-PREFIX` where `BUCKET-PREFIX` is a string appended to the key of any file uploaded to AWS S3 42 | (this is effectively a subfolder in AWS S3) 43 | * `--username USER` where `USER` is the username used to authenticate to Earthdata Login 44 | * `--password PASSWORD` where `PASSWORD` is the password used to authenticate to Earthdata Login 45 | * any necessary user input should be able to be provided through entrypoint arguments 46 | * when uploading files to the S3 Bucket 47 | * products files must be tagged with `filetype: product` 48 | * if you wish to upload thumbnails or browse images, they must be tagged `filetype: thumbnail` or `filetype: browse` 49 | respectively 50 | 51 | *Note: the `aws` subpackage of `hyp3lib` provides helper functions for tagging and uploading files* 52 | 53 | ### Add the plugin to HyP3 54 | Once the plugin itself is created, it can be added to the HyP3 system by... TBD. 55 | -------------------------------------------------------------------------------- /docs/products.md: -------------------------------------------------------------------------------- 1 | # Available HyP3 Products 2 | 3 | On-demand SAR products generated using HyP3 are currently available for the 4 | [Sentinel-1 mission](sentinel1.md "Sentinel-1 Mission") 5 | only. Unless otherwise noted, On-Demand products are available for 14 days after they have been processed. 6 | 7 | A Digital Elevation Model (DEM) is required to generate each of the On-Demand products offered by ASF, and we 8 | generally use the 9 | [GLO-30 Copernicus DEM](https://dataspace.copernicus.eu/explore-data/data-collections/copernicus-contributing-missions/collections-description/COP-DEM "Copernicus DEM" ){target=_blank} 10 | in our processing workflows. For more information, refer to our 11 | [Digital Elevation Models](dems.md "HyP3 DEM Documentation") 12 | documentation. 13 | 14 | ## RTC 15 | 16 | SAR datasets inherently contain geometric and radiometric distortions due to terrain 17 | being imaged by a side-looking instrument. Radiometric Terrain Correction (RTC) removes 18 | these distortions and creates analysis-ready data suitable for use in GIS applications. 19 | RTC processing is a required first step for many amplitude-based SAR applications. 20 | 21 | Sentinel-1 RTC products are generated from Level-1 Sentinel-1 IW acquisitions (either GRD or SLC files), leveraging 22 | [GAMMA Software](https://gamma-rs.ch/gamma-software){target=_blank}. 23 | Products are distributed as GeoTIFFs projected to a UTM Zone, with a pixel spacing of 24 | [10, 20, or 30 meters](guides/rtc_product_guide.md#pixel-spacing "RTC Pixel Spacing Documentation"). 25 | Users can choose to output the products in 26 | [gamma-0 or sigma-0 radiometry](guides/rtc_product_guide.md#radiometry "RTC Radiometry Documentation"), 27 | and in 28 | [power, amplitude, or dB scale](guides/rtc_product_guide.md#scale "RTC Scale Documentation"). 29 | Users also have the option to 30 | [apply a speckle filter](guides/rtc_product_guide.md#speckle-filter "RTC Speckle Filter Documentation"). 31 | To learn more, refer to the [Sentinel-1 RTC Product Guide](guides/rtc_product_guide.md 32 | "Sentinel-1 RTC Product Guide"). 33 | 34 | For step-by-step instructions on searching for, ordering, downloading and using On-Demand RTC products, visit our 35 | [RTC On Demand!](https://storymaps.arcgis.com/stories/2ead3222d2294d1fae1d11d3f98d7c35 "RTC On Demand! StoryMap" ){target=_blank} 36 | tutorial. 37 | 38 | ## InSAR 39 | 40 | Interferometric SAR (InSAR) uses the phase differences from repeat passes over the 41 | same area to identify regions where the distance between the sensor and the Earth's 42 | surface has changed. This allows for the detection and quantification of surface 43 | deformation or ground movement. 44 | 45 | There are three different processing approaches available for generating On-Demand InSAR products from Sentinel-1: 46 | 47 | - [Full-scene processing using GAMMA software](#full-scene-insar-gamma) 48 | - [Burst-based processing using ISCE2 software](#burst-based-insar-isce2) 49 | - [ARIA Frame-based processing using ISCE2 software](#aria-sentinel-1-gunw-products-isce2) 50 | 51 | ### Full-scene InSAR (GAMMA) 52 | 53 | These products take Sentinel-1 IW SLC scene pairs as input, and processing is performed using 54 | [GAMMA Software](https://gamma-rs.ch/gamma-software){target=_blank}. 55 | Products are packaged as a collection of GeoTIFFs in a zip file. They are projected to the appropriate UTM Zone for 56 | the product location and can be generated at a pixel spacing of either 80 or 40 meters. To learn more, refer to the 57 | [Sentinel-1 InSAR Product Guide](guides/insar_product_guide.md "Sentinel-1 InSAR Product Guide"). 58 | 59 | For step-by-step instructions on searching for, ordering and downloading On Demand InSAR products, visit our [InSAR On Demand!](https://storymaps.arcgis.com/stories/68a8a3253900411185ae9eb6bb5283d3 "InSAR On Demand! StoryMap" ){target=_blank} tutorial. 60 | 61 | ### Burst-based InSAR (ISCE2) 62 | 63 | These products take sets of individual 64 | [SLC bursts](https://storymaps.arcgis.com/stories/88c8fe67933340779eddef212d76b8b8 "Sentinel-1 Bursts StoryMap" ){target=_blank} 65 | extracted from Sentinel-1 IW SLC products as input, and processing is performed using 66 | [ISCE2 software](https://github.com/isce-framework/isce2#readme "https://github.com/isce-framework/isce2" ){target=_blank}. Products are packaged as a collection of 67 | GeoTIFFs in a zip file. They are projected to the appropriate UTM Zone for the product 68 | location, and can be generated at a pixel spacing of 80, 40, or 20 meters. 69 | 70 | The advantage of using burst-based processing is that users have more control of the extent of the output 71 | interferogram, and the burst footprints always fully overlap from one acquisition to the next. Users can select 72 | sets of up to 15 contiguous along-track bursts to generate a single output interferogram. Refer to the 73 | [Sentinel-1 Burst InSAR Product Guide](guides/burst_insar_product_guide.md "Sentinel-1 Burst InSAR Product Guide") 74 | for more information. 75 | 76 | For step-by-step instructions on searching for, ordering and downloading On Demand Burst InSAR products, visit our 77 | [Burst-Based InSAR for Sentinel-1 On Demand](https://storymaps.arcgis.com/stories/191bf1b6962c402086807390b3ce63b0 "Burst-Based InSAR for Sentinel-1 On Demand StoryMap" ){target=_blank} 78 | tutorial. 79 | 80 | ### ARIA Sentinel-1 GUNW Products (ISCE2) 81 | 82 | There is an extensive archive of 83 | [ARIA S1 GUNW](https://aria.jpl.nasa.gov/products/standard-displacement-products.html "https://aria.jpl.nasa.gov" ){target=_blank} 84 | (Geocoded Unwrapped Interferogram) products 85 | [available from ASF](https://search.asf.alaska.edu/#/?maxResults=1000&dataset=SENTINEL-1%20INTERFEROGRAM%20(BETA) "Vertex search for ARIA S1 GUNW" ){target=_blank}, 86 | but they are only generated in specific geographic locations. If the existing archive does not provide the 87 | products you need, you can generate ARIA GUNW products on demand. 88 | 89 | ARIA S1 GUNW products are delivered as netCDF files with 90-m pixel spacing. Products generated On Demand use the same 90 | [ISCE2](https://github.com/isce-framework/isce2#readme "https://github.com/isce-framework/isce2" ){target=_blank}-based 91 | code used to generate the archived products to ensure interoperability. 92 | 93 | The ARIA S1 GUNW products use a set [framing system](guides/gunw_product_guide.md#aria-frame-ids "ARIA Sentinel-1 GUNW 94 | Product Guide: ARIA Frame IDs") to select consistent bursts from input Sentinel-1 IW SLCs to generate interferograms. 95 | Refer to the 96 | [ARIA Sentinel-1 GUNW Product Guide](guides/gunw_product_guide.md "ARIA Sentinel-1 GUNW Product Guide") 97 | for more information. 98 | 99 | ## autoRIFT 100 | 101 | [AutoRIFT](https://github.com/leiyangleon/autoRIFT "https://github.com/leiyangleon/autoRIFT" ){target=_blank} 102 | produces a velocity map from observed motion using a feature tracking algorithm developed as part of the 103 | [NASA MEaSUREs ITS_LIVE](https://its-live.jpl.nasa.gov/ "https://its-live.jpl.nasa.gov" ){target=_blank} 104 | project. 105 | 106 | To learn more, visit the 107 | [ITS_LIVE project website](https://its-live.jpl.nasa.gov/ "https://its-live.jpl.nasa.gov" ){target=_blank}. 108 | -------------------------------------------------------------------------------- /docs/sentinel1.md: -------------------------------------------------------------------------------- 1 | # Sentinel-1 Mission 2 | 3 | The Sentinel-1 satellite constellation is part of the Copernicus Earth Observation program, coordinated by the European 4 | Space Agency (ESA) on behalf of the European Commission (EC). Sentinel-1 satellites carry C-band Synthetic Aperture 5 | Radar (SAR) instruments for global, around-the-clock imagery acquisition, even through cloud cover. 6 | 7 | The mission was designed to support surface deformation applications, and the stable orbits and consistent 8 | acquisition plans of the Sentinel-1 satellites make it easy to generate high-quality Interferometric SAR (InSAR) 9 | products. These products can measure deformation to the centimeter scale, though the 5.6-cm wavelength 10 | of the C-band SAR sensor limits the viability of InSAR in densely vegetated areas. 11 | 12 | The relatively short interval between acquisitions makes this SAR dataset a very useful tool for monitoring rapid or 13 | sudden landscape changes. In addition, SAR can image the earth's surface through cloud or smoke cover and does not 14 | require sunlight, so valid imagery can be collected on every pass. This is particularly useful for monitoring 15 | conditions during natural disasters such as hurricanes or wildfires, or in areas that are prone to frequent cloud cover. 16 | 17 | More information about the mission is available from the [European Space Agency Sentinel-1 Mission website](https://sentiwiki.copernicus.eu/web/s1-mission "Sentinel-1 Mission" ){target=_blank}. 18 | 19 | ## The Sentinel-1 Constellation 20 | 21 | The Sentinel-1 mission was designed to be a two-satellite constellation, though there have been periods when 22 | only one satellite has been available for image acquisition. 23 | 24 | - Sentinel-1A was launched April 3, 2014, and is still actively acquiring imagery. 25 | - Sentinel-1B was launched April 25, 2016, but [ended its mission](https://www.esa.int/Applications/Observing_the_Earth/Copernicus/Sentinel-1/Mission_ends_for_Copernicus_Sentinel-1B_satellite "https://www.esa.int/Applications/Observing_the_Earth/Copernicus/Sentinel-1/Mission_ends_for_Copernicus_Sentinel-1B_satellite" ){target=_blank} on December 23, 2021. 26 | - Sentinel-1C was launched December 5, 2024, replacing Sentinel-1B in the constellation, and 27 | [has been acquiring imagery regularly since March 26, 2025](https://dataspace.copernicus.eu/news/2025-3-25-sentinel-1c-user-data-opening-26th-march "https://dataspace.copernicus.eu/news/2025-3-25-sentinel-1c-user-data-opening-26th-march" ){target=_blank}. 28 | 29 | Each Sentinel-1 satellite has a 12-day repeat cycle, and they all use the same orbit pattern. When there are two active 30 | sensors in the constellation, their orbits are offset 180 degrees to allow repeat passes every 6 days. In this 31 | scenario, most global landmasses are imaged every 12 days. However, some areas of particular interest to the EC, 32 | including Europe and areas undergoing rapid changes due to uplift or subsidence activity, are imaged every 6 days. 33 | 34 | Refer to the 35 | [Sentinel-1 Observation Scenario](https://sentinel.esa.int/web/sentinel/copernicus/sentinel-1/observation-scenario "https://sentinel.esa.int/web/sentinel/copernicus/sentinel-1/observation-scenario" ){target=_blank} 36 | for more information on the acquisition plans that have been used to meet mission goals under different 37 | constellation configurations. 38 | 39 | ## Transition from Sentinel-1B to Sentinel-1C 40 | 41 | As of December 23, 2021, Sentinel-1B was no longer able to acquire data. An anomaly related to the power supply 42 | could not be repaired, and the satellite has been decommissioned. Refer to 43 | [ESA documentation of the end of the Sentinel-1B mission](https://www.esa.int/Applications/Observing_the_Earth/Copernicus/Sentinel-1/Mission_ends_for_Copernicus_Sentinel-1B_satellite "Mission Ends for Copernicus Sentinel-1B Satellite" ){target=_blank} 44 | for more information. 45 | 46 | The loss of Sentinel-1B resulted in a significant reduction in the spatial and temporal coverage of the Sentinel-1 47 | mission. Refer to 48 | [this article by Iain Woodhouse](https://www.earthblox.io/blog/the-impact-of-the-sentinel-1b-failure-and-looking-forward-to-sentinel-1c "The impact of the Sentinel-1B failure and looking forward to Sentinel-1C" ){target=_blank} 49 | for an illustration of the global impact of the Sentinel-1B failure. The image below illustrates a gap in the 50 | acquisitions over Alaska. This area of the Yukon-Kuskokwim Delta did not have a Sentinel-1 acquisition during 51 | the summer of 2022 until August 15. 52 | 53 | ![Y-K Delta Gap](images/s1b_hole_alaska.png "Lack of acquisitions over the Yukon-Kuskokwim Delta, Alaska") 54 | 55 | The gaps in coverage were particularly noticeable the first few months after Sentinel-1B lost power, but some areas 56 | continued to have little or no coverage in the period from December 2021 to April 2025, when Sentinel-1C began 57 | acquiring data regularly. Keep this in mind as you search for data in your area of interest. If there are fewer 58 | results than you would expect, you can 59 | [download acquisition plans for the mission from ESA](https://sentinel.esa.int/web/sentinel/copernicus/sentinel-1/acquisition-plans "Sentinel-1 Acquisition Plans" ){target=_blank} to view the acquisition plan for your area and time period of interest. 60 | 61 | ## The Future of the Sentinel-1 Mission 62 | 63 | Now that Sentinel-1C has replaced Sentinel-1B, and Sentinel-1A continues to acquire data, the constellation has 64 | returned to the same observation scenario used when both Sentinel-1A and Sentinel-1B were active. The 65 | Sentinel-1A platform is approaching the end of its mission, however, and 66 | [plans are underway to launch Sentinel-1D to replace it](https://www.esa.int/Applications/Observing_the_Earth/Copernicus/Sentinel-1/Ariane_6_to_take_Sentinel-1D_into_orbit ){target=_blank}. 67 | -------------------------------------------------------------------------------- /docs/tools/arcgis_toolbox.md: -------------------------------------------------------------------------------- 1 | {{ get_content('https://raw.githubusercontent.com/ASFHyP3/asf-tools/v0.2.0/ArcGIS-toolbox/README.md') }} -------------------------------------------------------------------------------- /docs/tools/asf_tools.md: -------------------------------------------------------------------------------- 1 | {{ get_content('https://raw.githubusercontent.com/ASFHyP3/asf-tools/v0.8.3/src/asf_tools/README.md') }} 2 | -------------------------------------------------------------------------------- /docs/tools/asf_tools_api.md: -------------------------------------------------------------------------------- 1 | # `asf_tools` *v0.8.3* API Reference 2 | 3 | ::: asf_tools 4 | options: 5 | show_submodules: true 6 | -------------------------------------------------------------------------------- /docs/tutorials.md: -------------------------------------------------------------------------------- 1 | # HyP3 Tutorials 2 | 3 | ## Jupyter Notebooks 4 | 5 | We provide step-by-step tutorials for using HyP3 programmatically via Jupyter Notebooks. 6 | 7 | * [Using the HyP3 Python SDK](https://github.com/ASFHyP3/hyp3-sdk/blob/main/docs/sdk_example.ipynb "Using the HyP3 SDK Tutorial" ){target=_blank} 8 | -- This notebook walks through ordering and accessing RTC, InSAR, and autoRIFT products in Python using the HyP3 SDK. 9 | * [Using the HyP3 SDK to search for jobs run by another user](https://github.com/ASFHyP3/hyp3-sdk/blob/main/docs/search_other_user_jobs.ipynb "Using the HyP3 SDK to search for jobs run by another user" ){target=_blank} 10 | -- This notebook walks through using the HyP3 SDK to search for jobs run by another user. 11 | * [Using the HyP3 SDK to update a job name](https://github.com/ASFHyP3/hyp3-sdk/blob/main/docs/hyp3_job_name_change.ipynb "Using the HyP3 SDK to update a job name"){target=_blank} 12 | -- This notebook walks through using the HyP3 SDK to rename one of your previously submitted jobs. 13 | * [Using the HyP3 SDK to process new granules for given search parameters](./tutorials/process-new-granules-for-search-parameters.md) 14 | -- These notebooks demonstrate how to process new granules that match particular search parameters, 15 | which is particularly useful for ongoing monitoring of a geographic area of interest. 16 | * [Time series analysis with HyP3 and MintPy](https://github.com/ASFHyP3/hyp3-docs/blob/main/docs/tutorials/hyp3_insar_stack_for_ts_analysis.ipynb "Time series analysis with HyP3 and MintPy Tutorial" ){target=_blank} 17 | -- This notebook walks through performing a time-series analysis of the 2019 18 | Ridgecrest, CA earthquake with HyP3 On Demand InSAR products and MintPy. 19 | * [Time series analysis with HyP3 ISCE2 burst InSAR products and MintPy](https://github.com/ASFHyP3/hyp3-docs/blob/main/docs/tutorials/hyp3_isce2_burst_stack_for_ts_analysis.ipynb "Time series analysis with HyP3 ISCE2 burst InSAR products and MintPy Tutorial" ){target=_blank} 20 | -- This notebook walks through performing a time-series analysis of the 2019 21 | Ridgecrest, CA earthquake with HyP3 On Demand Burst InSAR products and MintPy. 22 | * [Merging of burst InSAR products with HyP3 ISCE2](https://github.com/ASFHyP3/hyp3-docs/blob/develop/docs/tutorials/hyp3_isce2_burst_merge.ipynb "Merging of burst InSAR products with HyP3 ISCE2 Tutorial" ){target=_blank} 23 | -- This notebook walks through merging a pair of burst InSAR products with HyP3 ISCE2, using the deformation signal related to a 2020 Iranian earthquake as an example. 24 | 25 | 26 | ## StoryMaps 27 | 28 | ASF provides a variety of interactive StoryMap tutorials focused on accessing and using 29 | Synthetic Aperture Radar (SAR) data available from ASF. They can all be accessed here: 30 | 31 | * [StoryMap Tutorials](https://asf-daac.maps.arcgis.com/home/index.html "StoryMap Tutorials" ){target=_blank} 32 | 33 | The StoryMap collection includes step-by-step tutorials for ordering and accessing 34 | [RTC](https://storymaps.arcgis.com/stories/2ead3222d2294d1fae1d11d3f98d7c35 "RTC On Demand StoryMap" ){target=_blank} 35 | and [InSAR](https://storymaps.arcgis.com/stories/68a8a3253900411185ae9eb6bb5283d3 "InSAR On Demand StoryMap" ){target=_blank} 36 | products in Vertex. 37 | 38 | [![RTC On Demand Image](images/rtc-tutorial.png "Click to open RTC On Demand! tutorial")](https://storymaps.arcgis.com/stories/2ead3222d2294d1fae1d11d3f98d7c35 "RTC On Demand!" ){target=_blank} 39 | [![InSAR On Demand Image](images/insar-tutorial.png "Click to open InSAR On Demand! tutorial")](https://storymaps.arcgis.com/stories/68a8a3253900411185ae9eb6bb5283d3 "InSAR On Demand!" ){target=_blank} 40 | -------------------------------------------------------------------------------- /docs/tutorials/hyp3_insar_stack_for_ts_analysis.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "d20494ef", 6 | "metadata": {}, 7 | "source": [ 8 | "# Time series analysis with HyP3 and MintPy\n", 9 | "\n", 10 | "This notebook walks through performing a time-series analysis of the 2019 Ridgecrest, CA earthquake with On Demand InSAR products from the Alaska Satellite facility and MintPy. We'll:\n", 11 | "\n", 12 | "1. Use the [ASF Search Python package](https://docs.asf.alaska.edu/asf_search/basics/) to:\n", 13 | " - Search ASF's catalog for Sentinel-1 SAR products covering the [Ridgecrest earthquake](https://earthquake.usgs.gov/storymap/index-ridgecrest.html)\n", 14 | " - Select a reference scene to generate a baseline stack\n", 15 | " - Select a [short baseline subset (SBAS)](https://docs.asf.alaska.edu/vertex/sbas/) of scene pairs for InSAR processing\n", 16 | "\n", 17 | "\n", 18 | "2. Use the [HyP3 Python SDK](https://hyp3-docs.asf.alaska.edu/using/sdk/) to:\n", 19 | " - Request On Demand InSAR products from ASF HyP3\n", 20 | " - Download the InSAR products when they are done processing\n", 21 | "\n", 22 | "\n", 23 | "3. Use [GDAL](https://gdal.org/api/index.html#python-api) and [MintPy](https://mintpy.readthedocs.io/en/latest/) to:\n", 24 | " - Prepare the InSAR products for MintPy\n", 25 | " - perform a time-series analysis with MintPy\n", 26 | " \n", 27 | "---\n", 28 | "\n", 29 | "**Note:** This notebook does assume you have some familiarity with InSAR processing with MintPy already, and is a minimal example without much context or explanations. If you're new to InSAR and MintPy, I suggest checking out:\n", 30 | "* our [InSAR on Demand Story Map](https://storymaps.arcgis.com/stories/68a8a3253900411185ae9eb6bb5283d3)\n", 31 | "\n", 32 | "\n", 33 | "* [OpenSARlab's](https://opensarlab-docs.asf.alaska.edu/) highly detailed walkthrough of using HyP3 + MintPy via these notebooks:\n", 34 | " * [Prepare a HyP3 InSAR Stack for MintPy](https://github.com/ASFOpenSARlab/opensarlab-notebooks/blob/master/SAR_Training/English/Master/Prepare_HyP3_InSAR_Stack_for_MintPy.ipynb)\n", 35 | " * [MintPy Time-series Analysis](https://github.com/ASFOpenSARlab/opensarlab-notebooks/blob/master/SAR_Training/English/Master/MintPy_Time_Series_From_Prepared_Data_Stack.ipynb)\n", 36 | " \n", 37 | " Note: While these notebooks make some assumptions you're working in OpenSARlab, you can run these \n", 38 | " notebooks outside OpenSARlab by creating [this conda environment](https://github.com/ASFOpenSARlab/opensarlab-envs/blob/main/Environment_Configs/insar_analysis_env.yml)." 39 | ] 40 | }, 41 | { 42 | "cell_type": "markdown", 43 | "id": "b0a6e353", 44 | "metadata": {}, 45 | "source": [ 46 | "## 0. Initial Setup\n", 47 | "\n", 48 | "To run this notebook, you'll need a conda environment with the required dependencies. You can set up a new environment (recommended) and run the jupyter server like:\n", 49 | "```shell\n", 50 | "conda create -n hyp3-mintpy python=3.10 \"asf_search>=7.0.0\" hyp3_sdk \"mintpy>=1.5.2\" pandas jupyter ipympl\n", 51 | "\n", 52 | "conda activate hyp3-mintpy\n", 53 | "jupyter notebook hyp3_insar_stack_for_ts_analysis.ipynb\n", 54 | "```\n", 55 | "Or, install these dependencies into your own environment:\n", 56 | "```shell\n", 57 | "conda install python=3.10 \"asf_search>=7.0.0\" hyp3_sdk \"mintpy>=1.5.2\" pandas jupyter ipympl\n", 58 | "\n", 59 | "jupyter notebook hyp3_insar_stack_for_ts_analysis.ipynb\n", 60 | "```" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": null, 66 | "id": "64e566f3", 67 | "metadata": {}, 68 | "outputs": [], 69 | "source": [ 70 | "from pathlib import Path\n", 71 | "\n", 72 | "from dateutil.parser import parse as parse_date" 73 | ] 74 | }, 75 | { 76 | "cell_type": "markdown", 77 | "id": "33543149", 78 | "metadata": {}, 79 | "source": [ 80 | "### Set parameters" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": null, 86 | "id": "7fa17bd0", 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "project_name = '2019_ridgecrest'\n", 91 | "work_dir = Path.cwd() / project_name\n", 92 | "data_dir = work_dir / 'data'\n", 93 | "\n", 94 | "stack_start = parse_date('2019-06-09 00:00:00Z')\n", 95 | "stack_end = parse_date('2019-08-19 00:00:00Z')\n", 96 | "max_temporal_baseline = 24 #days\n", 97 | "\n", 98 | "data_dir.mkdir(parents=True, exist_ok=True)" 99 | ] 100 | }, 101 | { 102 | "cell_type": "markdown", 103 | "id": "0e62c544", 104 | "metadata": {}, 105 | "source": [ 106 | "## 1. Select InSAR pairs with ASF Search" 107 | ] 108 | }, 109 | { 110 | "cell_type": "code", 111 | "execution_count": null, 112 | "id": "4498c66c", 113 | "metadata": {}, 114 | "outputs": [], 115 | "source": [ 116 | "import asf_search as asf\n", 117 | "import pandas as pd\n", 118 | "\n", 119 | "search_results = asf.geo_search(\n", 120 | " platform=asf.PLATFORM.SENTINEL1,\n", 121 | " intersectsWith='POINT(-117.55 35.77)',\n", 122 | " start='2019-06-09',\n", 123 | " end='2019-08-19',\n", 124 | " processingLevel=asf.PRODUCT_TYPE.SLC,\n", 125 | " beamMode=asf.BEAMMODE.IW,\n", 126 | " flightDirection=asf.FLIGHT_DIRECTION.DESCENDING,\n", 127 | " relativeOrbit=71\n", 128 | " )" 129 | ] 130 | }, 131 | { 132 | "cell_type": "code", 133 | "execution_count": null, 134 | "id": "9f7ccdcc", 135 | "metadata": {}, 136 | "outputs": [], 137 | "source": [ 138 | "baseline_results = asf.baseline_search.stack_from_product(search_results[-1])\n", 139 | "\n", 140 | "columns = list(baseline_results[0].properties.keys()) + ['geometry', ]\n", 141 | "data = [list(scene.properties.values()) + [scene.geometry, ] for scene in baseline_results]\n", 142 | "\n", 143 | "stack = pd.DataFrame(data, columns=columns)\n", 144 | "stack['startTime'] = stack.startTime.apply(parse_date)\n", 145 | "\n", 146 | "stack = stack.loc[(stack_start <= stack.startTime) & (stack.startTime <= stack_end)]" 147 | ] 148 | }, 149 | { 150 | "cell_type": "code", 151 | "execution_count": null, 152 | "id": "e8d9f38b", 153 | "metadata": {}, 154 | "outputs": [], 155 | "source": [ 156 | "sbas_pairs = set()\n", 157 | "\n", 158 | "for reference, rt in stack.loc[::-1, ['sceneName', 'temporalBaseline']].itertuples(index=False):\n", 159 | " secondaries = stack.loc[\n", 160 | " (stack.sceneName != reference)\n", 161 | " & (stack.temporalBaseline - rt <= max_temporal_baseline)\n", 162 | " & (stack.temporalBaseline - rt > 0)\n", 163 | " ]\n", 164 | " for secondary in secondaries.sceneName:\n", 165 | " sbas_pairs.add((reference, secondary))" 166 | ] 167 | }, 168 | { 169 | "cell_type": "markdown", 170 | "id": "9e5c5b0b", 171 | "metadata": {}, 172 | "source": [ 173 | "## 2. Request On Demand InSAR products from ASF HyP3\n", 174 | "\n", 175 | "Use your [NASA Earthdata login](https://urs.earthdata.nasa.gov/) to connect to [ASF HyP3](https://hyp3-docs.asf.alaska.edu/)." 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": null, 181 | "id": "be78f415", 182 | "metadata": {}, 183 | "outputs": [], 184 | "source": [ 185 | "import hyp3_sdk as sdk\n", 186 | "\n", 187 | "hyp3 = sdk.HyP3(prompt=True)" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": null, 193 | "id": "e1dec3dd", 194 | "metadata": {}, 195 | "outputs": [], 196 | "source": [ 197 | "jobs = sdk.Batch()\n", 198 | "for reference, secondary in sbas_pairs:\n", 199 | " jobs += hyp3.submit_insar_job(reference, secondary, name=project_name,\n", 200 | " include_dem=True, include_look_vectors=True)" 201 | ] 202 | }, 203 | { 204 | "cell_type": "code", 205 | "execution_count": null, 206 | "id": "22b82d81", 207 | "metadata": {}, 208 | "outputs": [], 209 | "source": [ 210 | "jobs = hyp3.watch(jobs)" 211 | ] 212 | }, 213 | { 214 | "cell_type": "code", 215 | "execution_count": null, 216 | "id": "a3b3d2b3", 217 | "metadata": {}, 218 | "outputs": [], 219 | "source": [ 220 | "jobs = hyp3.find_jobs(name=project_name)" 221 | ] 222 | }, 223 | { 224 | "cell_type": "code", 225 | "execution_count": null, 226 | "id": "4b461cf0", 227 | "metadata": {}, 228 | "outputs": [], 229 | "source": [ 230 | "insar_products = jobs.download_files(data_dir)\n", 231 | "insar_products = [sdk.util.extract_zipped_product(ii) for ii in insar_products]" 232 | ] 233 | }, 234 | { 235 | "cell_type": "markdown", 236 | "id": "3181031f", 237 | "metadata": {}, 238 | "source": [ 239 | "## 3. Time-series Analysis with MintPy" 240 | ] 241 | }, 242 | { 243 | "cell_type": "markdown", 244 | "id": "3cd1b850", 245 | "metadata": {}, 246 | "source": [ 247 | "### 3.1 Subset all GeoTIFFs to their common overlap" 248 | ] 249 | }, 250 | { 251 | "cell_type": "code", 252 | "execution_count": null, 253 | "id": "31f75d5c", 254 | "metadata": {}, 255 | "outputs": [], 256 | "source": [ 257 | "from pathlib import Path\n", 258 | "from typing import List, Union\n", 259 | "from osgeo import gdal\n", 260 | "\n", 261 | "\n", 262 | "def get_common_overlap(file_list: List[Union[str, Path]]) -> List[float]:\n", 263 | " \"\"\"Get the common overlap of a list of GeoTIFF files\n", 264 | " \n", 265 | " Arg:\n", 266 | " file_list: a list of GeoTIFF files\n", 267 | " \n", 268 | " Returns:\n", 269 | " [ulx, uly, lrx, lry], the upper-left x, upper-left y, lower-right x, and lower-right y\n", 270 | " corner coordinates of the common overlap\n", 271 | " \"\"\"\n", 272 | " \n", 273 | " corners = [gdal.Info(str(dem), format='json')['cornerCoordinates'] for dem in file_list]\n", 274 | "\n", 275 | " ulx = max(corner['upperLeft'][0] for corner in corners)\n", 276 | " uly = min(corner['upperLeft'][1] for corner in corners)\n", 277 | " lrx = min(corner['lowerRight'][0] for corner in corners)\n", 278 | " lry = max(corner['lowerRight'][1] for corner in corners)\n", 279 | " return [ulx, uly, lrx, lry]" 280 | ] 281 | }, 282 | { 283 | "cell_type": "code", 284 | "execution_count": null, 285 | "id": "43c55f08", 286 | "metadata": {}, 287 | "outputs": [], 288 | "source": [ 289 | "files = data_dir.glob('*/*_dem.tif')\n", 290 | "\n", 291 | "overlap = get_common_overlap(files)" 292 | ] 293 | }, 294 | { 295 | "cell_type": "code", 296 | "execution_count": null, 297 | "id": "20d94460", 298 | "metadata": {}, 299 | "outputs": [], 300 | "source": [ 301 | "from pathlib import Path\n", 302 | "from typing import List, Union\n", 303 | "\n", 304 | "def clip_hyp3_products_to_common_overlap(data_dir: Union[str, Path], overlap: List[float]) -> None:\n", 305 | " \"\"\"Clip all GeoTIFF files to their common overlap\n", 306 | " \n", 307 | " Args:\n", 308 | " data_dir:\n", 309 | " directory containing the GeoTIFF files to clip\n", 310 | " overlap:\n", 311 | " a list of the upper-left x, upper-left y, lower-right-x, and lower-tight y\n", 312 | " corner coordinates of the common overlap\n", 313 | " Returns: None\n", 314 | " \"\"\"\n", 315 | "\n", 316 | " \n", 317 | " files_for_mintpy = ['_water_mask.tif', '_corr.tif', '_unw_phase.tif', '_dem.tif', '_lv_theta.tif', '_lv_phi.tif']\n", 318 | "\n", 319 | " for extension in files_for_mintpy:\n", 320 | "\n", 321 | " for file in data_dir.rglob(f'*{extension}'):\n", 322 | "\n", 323 | " dst_file = file.parent / f'{file.stem}_clipped{file.suffix}'\n", 324 | "\n", 325 | " gdal.Translate(destName=str(dst_file), srcDS=str(file), projWin=overlap)" 326 | ] 327 | }, 328 | { 329 | "cell_type": "code", 330 | "execution_count": null, 331 | "id": "023ca045", 332 | "metadata": {}, 333 | "outputs": [], 334 | "source": [ 335 | "clip_hyp3_products_to_common_overlap(data_dir, overlap)" 336 | ] 337 | }, 338 | { 339 | "cell_type": "markdown", 340 | "id": "92be356f", 341 | "metadata": {}, 342 | "source": [ 343 | "### 3.2 Create the MintPy config file" 344 | ] 345 | }, 346 | { 347 | "cell_type": "code", 348 | "execution_count": null, 349 | "id": "9c3cf17d", 350 | "metadata": {}, 351 | "outputs": [], 352 | "source": [ 353 | "mintpy_config = work_dir / 'mintpy_config.txt'\n", 354 | "mintpy_config.write_text(\n", 355 | "f\"\"\"\n", 356 | "mintpy.load.processor = hyp3\n", 357 | "##---------interferogram datasets\n", 358 | "mintpy.load.unwFile = {data_dir}/*/*_unw_phase_clipped.tif\n", 359 | "mintpy.load.corFile = {data_dir}/*/*_corr_clipped.tif\n", 360 | "##---------geometry datasets:\n", 361 | "mintpy.load.demFile = {data_dir}/*/*_dem_clipped.tif\n", 362 | "mintpy.load.incAngleFile = {data_dir}/*/*_lv_theta_clipped.tif\n", 363 | "mintpy.load.azAngleFile = {data_dir}/*/*_lv_phi_clipped.tif\n", 364 | "mintpy.load.waterMaskFile = {data_dir}/*/*_water_mask_clipped.tif\n", 365 | "# mintpy.troposphericDelay.method = no\n", 366 | "\"\"\")" 367 | ] 368 | }, 369 | { 370 | "cell_type": "markdown", 371 | "id": "87385631", 372 | "metadata": {}, 373 | "source": [ 374 | "### 3.3 run MintPy to do the time series analysis" 375 | ] 376 | }, 377 | { 378 | "cell_type": "code", 379 | "execution_count": null, 380 | "id": "a012c642", 381 | "metadata": {}, 382 | "outputs": [], 383 | "source": [ 384 | "!smallbaselineApp.py --dir {work_dir} {mintpy_config}" 385 | ] 386 | }, 387 | { 388 | "cell_type": "code", 389 | "execution_count": null, 390 | "id": "3e866ac9", 391 | "metadata": {}, 392 | "outputs": [], 393 | "source": [ 394 | "%matplotlib widget\n", 395 | "from mintpy.cli import view, tsview" 396 | ] 397 | }, 398 | { 399 | "cell_type": "code", 400 | "execution_count": null, 401 | "id": "acf5cfdc", 402 | "metadata": {}, 403 | "outputs": [], 404 | "source": [ 405 | "view.main([f'{work_dir}/velocity.h5'])" 406 | ] 407 | }, 408 | { 409 | "cell_type": "code", 410 | "execution_count": null, 411 | "id": "b6172aa1", 412 | "metadata": {}, 413 | "outputs": [], 414 | "source": [ 415 | "tsview.main([f'{work_dir}/timeseries.h5'])" 416 | ] 417 | }, 418 | { 419 | "cell_type": "code", 420 | "execution_count": null, 421 | "id": "8a858c62", 422 | "metadata": {}, 423 | "outputs": [], 424 | "source": [] 425 | } 426 | ], 427 | "metadata": { 428 | "kernelspec": { 429 | "display_name": "Python 3 (ipykernel)", 430 | "language": "python", 431 | "name": "python3" 432 | }, 433 | "language_info": { 434 | "codemirror_mode": { 435 | "name": "ipython", 436 | "version": 3 437 | }, 438 | "file_extension": ".py", 439 | "mimetype": "text/x-python", 440 | "name": "python", 441 | "nbconvert_exporter": "python", 442 | "pygments_lexer": "ipython3", 443 | "version": "3.11.5" 444 | } 445 | }, 446 | "nbformat": 4, 447 | "nbformat_minor": 5 448 | } 449 | -------------------------------------------------------------------------------- /docs/tutorials/hyp3_isce2_burst_merge.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "d20494ef", 6 | "metadata": {}, 7 | "source": [ 8 | "# Merge HyP3 ISCE2 burst InSAR products\n", 9 | "\n", 10 | "This notebook demonstrates how to use the `merge_tops_burst` workflow of the HyP3-ISCE2 plugin. This workflow merges multiple burst InSAR Products and takes a directory that includes multiple HyP3-ISCE2 Burst InSAR Products as its input. These input products can be created by the HyP3-ISCE2 on-demand service. To learn how to create HyP3 burst InSAR Products, check out our [hyp3_isce2_burst_stack_for_ts_analysis](https://github.com/ASFHyP3/hyp3-docs/blob/main/docs/tutorials/hyp3_isce2_burst_stack_for_ts_analysis.ipynb) notebook.\n", 11 | "\n", 12 | "\n", 13 | "**Note:** This notebook does assume you have some familiarity with InSAR processing with HyP3 already, and is a minimal example without much context or explanations. If you're new to InSAR and HyP3, we suggest checking out the following resources. Note that some of these resources may be specific to our InSAR GAMMA products, so you may need to adapt them for use with our ISCE2-based burst InSAR products.\n", 14 | "\n", 15 | "* Our [Burst Data Download Story Map](https://storymaps.arcgis.com/stories/88c8fe67933340779eddef212d76b8b8)\n", 16 | "\n", 17 | "* Our [product guide](https://hyp3-docs.asf.alaska.edu/guides/burst_insar_product_guide/) for burst InSAR products\n", 18 | "\n", 19 | "* Our [GitHub repository](https://github.com/asfhyp3/hyp3-isce2) containg the workflow used to create burst InSAR products\n", 20 | "\n", 21 | "* Our [InSAR on Demand Story Map](https://storymaps.arcgis.com/stories/68a8a3253900411185ae9eb6bb5283d3)" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "id": "b0a6e353", 27 | "metadata": {}, 28 | "source": [ 29 | "## 0. Initial Setup\n", 30 | "To run this notebook, you will need a local copy of the HyP3-ISCE2 GitHub repository and to set up a conda environment with the required dependencies. In your terminal, you can do this with the following commands:\n", 31 | "\n", 32 | "```shell\n", 33 | "git clone https://github.com/ASFHyP3/hyp3-isce2.git\n", 34 | "cd hyp3-isce2\n", 35 | "mamba env create -f environment.yml\n", 36 | "mamba activate hyp3-isce2\n", 37 | "python -m pip install -e .\n", 38 | "mamba install -c conda-forge pandas jupyter ipympl\n", 39 | "jupyter notebook hyp3_isce2_burst_merge.ipynb\n", 40 | "```" 41 | ] 42 | }, 43 | { 44 | "cell_type": "markdown", 45 | "id": "7f093a40806f3ca5", 46 | "metadata": {}, 47 | "source": [ 48 | "This workflow requires an Earth Data Cloud login. If you haven't yet, you can make an [account for free](https://urs.earthdata.nasa.gov/users/new) and set up a `.netrc` file in your home directory with your personal username and password.\n", 49 | "\n", 50 | "```shell\n", 51 | "echo ‘machine urs.earthdata.nasa.gov login $USERNAME password $PASSWORD’ >> ~/.netrc \n", 52 | "```" 53 | ] 54 | }, 55 | { 56 | "cell_type": "markdown", 57 | "id": "5cce7457-a179-448d-a139-1f9e4f72b2d3", 58 | "metadata": {}, 59 | "source": [ 60 | "## 1. Create burst InSAR prodcuts to merge\n", 61 | "\n", 62 | "Before using the [HyP3-ISCE2 merge burst workflow](https://hyp3-docs.asf.alaska.edu/guides/burst_insar_product_guide/#merge-sentinel-1-burst-insar-products), we must create burst InSAR products that are merge-compatible. This means that they must:\n", 63 | "- Have the same reference and secondary dates\n", 64 | "- Have the same polarization\n", 65 | "- Have the same multilooking\n", 66 | "- Be from the same relative orbit\n", 67 | "- Be contiguous\n", 68 | "\n", 69 | "In this section, we'll create two such burst InSAR products and download them to a local directory." 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": null, 75 | "id": "7aac2af4-b703-4d21-8c1f-acd6cccfcc4c", 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "from pathlib import Path\n", 80 | "\n", 81 | "\n", 82 | "reference_granule1 = 'S1_136231_IW2_20200604T022312_VV_7C85-BURST'\n", 83 | "secondary_granule1 = 'S1_136231_IW2_20200616T022313_VV_5D11-BURST'\n", 84 | "\n", 85 | "reference_granule2 = 'S1_136232_IW2_20200604T022315_VV_7C85-BURST'\n", 86 | "secondary_granule2 = 'S1_136232_IW2_20200616T022316_VV_5D11-BURST'\n", 87 | "\n", 88 | "project_name = 'merge_demo'\n", 89 | "current_dir = Path.cwd() # Make sure we preserve this in case we want to navigate back\n", 90 | "work_dir = Path.cwd() / project_name\n", 91 | "data_dir = work_dir / 'data'\n", 92 | "data_dir.mkdir(parents=True, exist_ok=True)" 93 | ] 94 | }, 95 | { 96 | "cell_type": "code", 97 | "execution_count": null, 98 | "id": "3f422b8c-3362-4bd2-a4e8-601797c1be15", 99 | "metadata": {}, 100 | "outputs": [], 101 | "source": [ 102 | "import hyp3_sdk as sdk\n", 103 | "\n", 104 | "hyp3 = sdk.HyP3(prompt=True)\n", 105 | "\n", 106 | "jobs = sdk.Batch()\n", 107 | "for reference, secondary in [(reference_granule1, secondary_granule1), (reference_granule2, secondary_granule2)]:\n", 108 | " jobs += hyp3.submit_insar_isce_burst_job(\n", 109 | " granule1 = reference,\n", 110 | " granule2 = secondary, \n", 111 | " apply_water_mask = False,\n", 112 | " name = project_name,\n", 113 | " looks = '20x4'\n", 114 | " )" 115 | ] 116 | }, 117 | { 118 | "cell_type": "code", 119 | "execution_count": null, 120 | "id": "a1ea4c9a-f4af-46cd-a236-da6d4d9bc79e", 121 | "metadata": {}, 122 | "outputs": [], 123 | "source": [ 124 | "jobs = hyp3.watch(jobs)" 125 | ] 126 | }, 127 | { 128 | "cell_type": "code", 129 | "execution_count": null, 130 | "id": "3bfae6e2-909d-4c02-8934-80f7dd62bb5f", 131 | "metadata": {}, 132 | "outputs": [], 133 | "source": [ 134 | "from datetime import datetime\n", 135 | "\n", 136 | "now = datetime.now()\n", 137 | "start_of_today = datetime(now.year, now.month, now.day)\n", 138 | "\n", 139 | "jobs = hyp3.find_jobs(name=project_name, start=start_of_today)" 140 | ] 141 | }, 142 | { 143 | "cell_type": "code", 144 | "execution_count": null, 145 | "id": "0ca16997-9159-490f-aa53-dda56dc7ec7e", 146 | "metadata": {}, 147 | "outputs": [], 148 | "source": [ 149 | "insar_products = jobs.download_files(data_dir)\n", 150 | "insar_products = [sdk.util.extract_zipped_product(ii) for ii in insar_products]" 151 | ] 152 | }, 153 | { 154 | "cell_type": "markdown", 155 | "id": "9e5c5b0b", 156 | "metadata": {}, 157 | "source": [ 158 | "## 2. Merge the products using hyp3-isce2 merge_tops_burst workflow\n", 159 | "\n", 160 | "Now we have all the data we need and can merge these two burst InSAR products! You would typically run the command below on the command line, but we'll run it through the Jupyter Notebook here." 161 | ] 162 | }, 163 | { 164 | "cell_type": "code", 165 | "execution_count": null, 166 | "id": "ec6920e4-7aec-423e-9951-72c3235b62ce", 167 | "metadata": {}, 168 | "outputs": [], 169 | "source": [ 170 | "cd $work_dir" 171 | ] 172 | }, 173 | { 174 | "cell_type": "code", 175 | "execution_count": null, 176 | "id": "4e02a4de-d4b4-43f9-b861-98c9d6c64d53", 177 | "metadata": {}, 178 | "outputs": [], 179 | "source": [ 180 | "# Use this command to actually view the options for the merge_tops_burst workflow\n", 181 | "!python -m hyp3_isce2 ++process merge_tops_bursts --help" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": null, 187 | "id": "e1dec3dd", 188 | "metadata": { 189 | "scrolled": true 190 | }, 191 | "outputs": [], 192 | "source": [ 193 | "# Use this command to actually run the merge_tops_burst workflow\n", 194 | "!python -m hyp3_isce2 ++process merge_tops_bursts $data_dir" 195 | ] 196 | }, 197 | { 198 | "cell_type": "markdown", 199 | "id": "9b717cb5-2895-4c1b-abfb-6e597e648d23", 200 | "metadata": {}, 201 | "source": [ 202 | "## 3. Display the merged product\n", 203 | "\n", 204 | "We've successfully run the command, now let's look at the results!" 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "execution_count": null, 210 | "id": "80f7eedce43cd54c", 211 | "metadata": { 212 | "collapsed": false, 213 | "jupyter": { 214 | "outputs_hidden": false 215 | } 216 | }, 217 | "outputs": [], 218 | "source": [ 219 | "%matplotlib inline\n", 220 | "import numpy as np\n", 221 | "from osgeo import gdal\n", 222 | "import matplotlib\n", 223 | "import matplotlib.pyplot as plt\n", 224 | "\n", 225 | "tifs = [f for f in work_dir.glob(\"*/*.tif\")]\n", 226 | "unw_file = [f for f in tifs if 'unw_phase' in f.name][0]\n", 227 | "wrapped_file = [f for f in tifs if 'wrapped_phase' in f.name][0]\n", 228 | "corr_file = [f for f in tifs if 'corr' in f.name][0]\n", 229 | "desired_tifs = [wrapped_file, unw_file, corr_file]\n", 230 | "\n", 231 | "f, axs = plt.subplots(len(desired_tifs), figsize=(6,10))\n", 232 | "for i, tif in enumerate(desired_tifs): \n", 233 | " ds = gdal.Open(str(tif))\n", 234 | " merged_bursts = np.ma.masked_equal(ds.GetRasterBand(1).ReadAsArray(), 0)\n", 235 | " ds = None\n", 236 | " \n", 237 | " axs[i].imshow(merged_bursts)\n", 238 | " axs[i].set_title(tif.name)\n", 239 | "\n", 240 | " plt.setp(plt.gcf().get_axes(), xticks=[], yticks=[])\n", 241 | " plt.tight_layout()" 242 | ] 243 | }, 244 | { 245 | "cell_type": "code", 246 | "execution_count": null, 247 | "id": "f8bc3bd2-f126-4546-8f80-c52ecab20a68", 248 | "metadata": {}, 249 | "outputs": [], 250 | "source": [] 251 | } 252 | ], 253 | "metadata": { 254 | "kernelspec": { 255 | "display_name": "Python 3 (ipykernel)", 256 | "language": "python", 257 | "name": "python3" 258 | }, 259 | "language_info": { 260 | "codemirror_mode": { 261 | "name": "ipython", 262 | "version": 3 263 | }, 264 | "file_extension": ".py", 265 | "mimetype": "text/x-python", 266 | "name": "python", 267 | "nbconvert_exporter": "python", 268 | "pygments_lexer": "ipython3", 269 | "version": "3.11.6" 270 | } 271 | }, 272 | "nbformat": 4, 273 | "nbformat_minor": 5 274 | } 275 | -------------------------------------------------------------------------------- /docs/tutorials/new-insar-jobs.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "d74b0e53-3f0f-4e76-8099-e307f03c4d6e", 6 | "metadata": {}, 7 | "source": [ 8 | "# Using the HyP3 SDK to generate InSAR products for given search parameters\n", 9 | "\n", 10 | "Before running this notebook for the first time, please read [Using the HyP3 SDK to process new granules for given search parameters](https://hyp3-docs.asf.alaska.edu/tutorials/process-new-granules-for-search-parameters/) for a complete introduction to this tutorial.\n", 11 | "\n", 12 | "You can run this notebook to submit On Demand InSAR jobs for all granules that match a particular set of search parameters (date range, area of interest, etc.). After you run the notebook, more granules may become available for your search parameters over the following days (because there is a delay between data being acquired and becoming available in the archive), or you may decide to modify your search parameters. In either case, you can simply run the notebook again to submit InSAR jobs for all granules that have not yet been processed.\n", 13 | "\n", 14 | "This workflow is particularly useful for ongoing monitoring of a geographic area of interest, but it can be used whenever you want to augment your project with additional products without generating duplicates.\n", 15 | "\n", 16 | "First, install dependencies:" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "id": "9f5b47c3-86db-4574-b44e-09952106360d", 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "!pip install 'asf-search>=6.6.2' hyp3-sdk\n", 27 | "\n", 28 | "import asf_search\n", 29 | "from hyp3_sdk import HyP3" 30 | ] 31 | }, 32 | { 33 | "cell_type": "markdown", 34 | "id": "334cf891-8ad6-4f9a-b7c7-4f5c7fd7ddc7", 35 | "metadata": {}, 36 | "source": [ 37 | "Next, define your search parameters and job specification as shown below. The search parameters become keyword arguments to the `asf_search.search` function. See [here](https://docs.asf.alaska.edu/asf_search/searching/#keywords) for a full list of available keywords." 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": null, 43 | "id": "b6c5257b-31f2-42f8-b42a-147e88873f94", 44 | "metadata": {}, 45 | "outputs": [], 46 | "source": [ 47 | "search_parameters = {\n", 48 | " \"start\": \"2023-04-05T00:00:00Z\",\n", 49 | " \"end\": \"2023-04-10T00:00:00Z\",\n", 50 | " \"intersectsWith\":\n", 51 | " \"POLYGON((-110.7759 44.8543,-101.3998 44.8543,-101.3998 50.8183,-110.7759 50.8183,-110.7759 44.8543))\",\n", 52 | " \"platform\": \"S1\",\n", 53 | " \"processingLevel\": \"SLC\",\n", 54 | "}\n", 55 | "job_specification = {\n", 56 | " \"job_parameters\": {},\n", 57 | " \"job_type\": \"INSAR_GAMMA\",\n", 58 | " \"name\": \"Project Name\"\n", 59 | "}" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "id": "425a5bb4-497a-4de6-b841-f739d9cb37f1", 65 | "metadata": {}, 66 | "source": [ 67 | "Next, construct a list of unprocessed granules:" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "id": "bff7fe5d-2d0a-4360-8c6d-55de10d45c2e", 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "hyp3 = HyP3()\n", 78 | "\n", 79 | "previous_jobs = hyp3.find_jobs(\n", 80 | " name=job_specification['name'],\n", 81 | " job_type=job_specification['job_type'],\n", 82 | ")\n", 83 | "processed_granules = [job.job_parameters['granules'][0] for job in previous_jobs]\n", 84 | "print(f'Found {len(processed_granules)} previously processed granules')\n", 85 | "\n", 86 | "search_results = asf_search.search(**search_parameters)\n", 87 | "search_results.raise_if_incomplete()\n", 88 | "\n", 89 | "unprocessed_granules = [\n", 90 | " result for result in search_results if result.properties['sceneName'] not in processed_granules\n", 91 | "]\n", 92 | "print(f'Found {len(unprocessed_granules)} unprocessed granules')" 93 | ] 94 | }, 95 | { 96 | "cell_type": "markdown", 97 | "id": "23a0a176-5829-4a97-b6cc-ff7f0e36243c", 98 | "metadata": {}, 99 | "source": [ 100 | "Finally, get the temporal baseline for each unprocessed granule and submit a new InSAR job for each pair. You can adjust the number of pairs included for each unprocessed granule by changing the value of the `depth` parameter for the `get_neighbors` function.\n", 101 | "\n", 102 | "Note that unprocessed granules are handled in batches. You can adjust the batch size by changing the value of the `batch_size` variable." 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "id": "1d57114a-e652-4b04-ae1e-db7f727941dc", 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "from copy import deepcopy\n", 113 | "\n", 114 | "def get_neighbors(granule: asf_search.ASFProduct, platform: str, depth=2) -> list[str]:\n", 115 | " stack = asf_search.baseline_search.stack_from_product(granule)\n", 116 | " stack.raise_if_incomplete()\n", 117 | " stack = [item for item in stack if\n", 118 | " item.properties['temporalBaseline'] < 0 and item.properties['sceneName'].startswith(platform)]\n", 119 | " neighbors = [item.properties['sceneName'] for item in stack[-depth:]]\n", 120 | " return neighbors\n", 121 | "\n", 122 | "\n", 123 | "def get_jobs_for_granule(granule: asf_search.ASFProduct) -> list[dict]:\n", 124 | " jobs = []\n", 125 | " neighbors = get_neighbors(granule, search_parameters['platform'])\n", 126 | "\n", 127 | " for neighbor in neighbors:\n", 128 | " job = deepcopy(job_specification)\n", 129 | " job['job_parameters']['granules'] = [granule.properties['sceneName'], neighbor]\n", 130 | " jobs.append(job)\n", 131 | "\n", 132 | " return jobs\n", 133 | "\n", 134 | "\n", 135 | "batch_size = 10\n", 136 | "for i in range(0, len(unprocessed_granules), batch_size):\n", 137 | " new_jobs = [\n", 138 | " job for granule in unprocessed_granules[i:i+batch_size]\n", 139 | " for job in get_jobs_for_granule(granule)\n", 140 | " ]\n", 141 | " print(f'Submitting {len(new_jobs)} jobs')\n", 142 | " hyp3.submit_prepared_jobs(new_jobs)\n", 143 | "\n", 144 | "print('Done.')" 145 | ] 146 | } 147 | ], 148 | "metadata": { 149 | "kernelspec": { 150 | "display_name": "Python 3 (ipykernel)", 151 | "language": "python", 152 | "name": "python3" 153 | }, 154 | "language_info": { 155 | "codemirror_mode": { 156 | "name": "ipython", 157 | "version": 3 158 | }, 159 | "file_extension": ".py", 160 | "mimetype": "text/x-python", 161 | "name": "python", 162 | "nbconvert_exporter": "python", 163 | "pygments_lexer": "ipython3", 164 | "version": "3.9.17" 165 | } 166 | }, 167 | "nbformat": 4, 168 | "nbformat_minor": 5 169 | } 170 | -------------------------------------------------------------------------------- /docs/tutorials/new-rtc-jobs.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "d74b0e53-3f0f-4e76-8099-e307f03c4d6e", 6 | "metadata": {}, 7 | "source": [ 8 | "# Using the HyP3 SDK to generate RTC products for given search parameters\n", 9 | "\n", 10 | "Before running this notebook for the first time, please read [Using the HyP3 SDK to process new granules for given search parameters](https://hyp3-docs.asf.alaska.edu/tutorials/process-new-granules-for-search-parameters/) for a complete introduction to this tutorial.\n", 11 | "\n", 12 | "You can run this notebook to submit On Demand RTC jobs for all granules that match a particular set of search parameters (date range, area of interest, etc.). After you run the notebook, more granules may become available for your search parameters over the following days (because there is a delay between data being acquired and becoming available in the archive), or you may decide to modify your search parameters. In either case, you can simply run the notebook again to submit RTC jobs for all granules that have not yet been processed.\n", 13 | "\n", 14 | "This workflow is particularly useful for ongoing monitoring of a geographic area of interest, but it can be used whenever you want to augment your project with additional products without generating duplicates.\n", 15 | "\n", 16 | "First, install dependencies:" 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "id": "9f5b47c3-86db-4574-b44e-09952106360d", 23 | "metadata": {}, 24 | "outputs": [], 25 | "source": [ 26 | "!pip install 'asf-search>=6.6.2' hyp3-sdk\n", 27 | "\n", 28 | "import asf_search\n", 29 | "from hyp3_sdk import HyP3" 30 | ] 31 | }, 32 | { 33 | "cell_type": "markdown", 34 | "id": "334cf891-8ad6-4f9a-b7c7-4f5c7fd7ddc7", 35 | "metadata": {}, 36 | "source": [ 37 | "Next, define your search parameters and job specification as shown below. The search parameters become keyword arguments to the `asf_search.search` function. See [here](https://docs.asf.alaska.edu/asf_search/searching/#keywords) for a full list of available keywords." 38 | ] 39 | }, 40 | { 41 | "cell_type": "code", 42 | "execution_count": null, 43 | "id": "b6c5257b-31f2-42f8-b42a-147e88873f94", 44 | "metadata": {}, 45 | "outputs": [], 46 | "source": [ 47 | "search_parameters = {\n", 48 | " \"start\": \"2023-06-01T00:00:00Z\",\n", 49 | " \"end\": \"2023-06-30T00:00:00Z\",\n", 50 | " \"intersectsWith\":\n", 51 | " \"POLYGON((-110.7759 44.8543,-101.3998 44.8543,-101.3998 50.8183,-110.7759 50.8183,-110.7759 44.8543))\",\n", 52 | " \"platform\": \"S1\",\n", 53 | " \"processingLevel\": \"SLC\",\n", 54 | "}\n", 55 | "job_specification = {\n", 56 | " \"job_parameters\": {\n", 57 | " \"resolution\": 30,\n", 58 | " },\n", 59 | " \"job_type\": \"RTC_GAMMA\",\n", 60 | " \"name\": \"Project Name\"\n", 61 | "}" 62 | ] 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "id": "425a5bb4-497a-4de6-b841-f739d9cb37f1", 67 | "metadata": {}, 68 | "source": [ 69 | "Next, construct a list of unprocessed granules:" 70 | ] 71 | }, 72 | { 73 | "cell_type": "code", 74 | "execution_count": null, 75 | "id": "bff7fe5d-2d0a-4360-8c6d-55de10d45c2e", 76 | "metadata": {}, 77 | "outputs": [], 78 | "source": [ 79 | "hyp3 = HyP3()\n", 80 | "\n", 81 | "previous_jobs = hyp3.find_jobs(\n", 82 | " name=job_specification['name'],\n", 83 | " job_type=job_specification['job_type'],\n", 84 | ")\n", 85 | "processed_granules = [job.job_parameters['granules'][0] for job in previous_jobs]\n", 86 | "print(f'Found {len(processed_granules)} previously processed granules')\n", 87 | "\n", 88 | "search_results = asf_search.search(**search_parameters)\n", 89 | "search_results.raise_if_incomplete()\n", 90 | "\n", 91 | "unprocessed_granules = [\n", 92 | " result for result in search_results if result.properties['sceneName'] not in processed_granules\n", 93 | "]\n", 94 | "print(f'Found {len(unprocessed_granules)} unprocessed granules')" 95 | ] 96 | }, 97 | { 98 | "cell_type": "markdown", 99 | "id": "23a0a176-5829-4a97-b6cc-ff7f0e36243c", 100 | "metadata": {}, 101 | "source": [ 102 | "Finally, submit a new RTC job for each unprocessed granule. Note that unprocessed granules are handled in batches. You can adjust the batch size by changing the value of the `batch_size` variable." 103 | ] 104 | }, 105 | { 106 | "cell_type": "code", 107 | "execution_count": null, 108 | "id": "1d57114a-e652-4b04-ae1e-db7f727941dc", 109 | "metadata": {}, 110 | "outputs": [], 111 | "source": [ 112 | "from copy import deepcopy\n", 113 | "\n", 114 | "def get_job_for_granule(granule: asf_search.ASFProduct) -> dict:\n", 115 | " job = deepcopy(job_specification)\n", 116 | " job['job_parameters']['granules'] = [granule.properties['sceneName']]\n", 117 | " return job\n", 118 | "\n", 119 | "\n", 120 | "batch_size = 20\n", 121 | "for i in range(0, len(unprocessed_granules), batch_size):\n", 122 | " new_jobs = [\n", 123 | " get_job_for_granule(granule) for granule in unprocessed_granules[i:i+batch_size]\n", 124 | " ]\n", 125 | " print(f'Submitting {len(new_jobs)} jobs')\n", 126 | " hyp3.submit_prepared_jobs(new_jobs)\n", 127 | "\n", 128 | "print('Done.')" 129 | ] 130 | } 131 | ], 132 | "metadata": { 133 | "kernelspec": { 134 | "display_name": "Python 3 (ipykernel)", 135 | "language": "python", 136 | "name": "python3" 137 | }, 138 | "language_info": { 139 | "codemirror_mode": { 140 | "name": "ipython", 141 | "version": 3 142 | }, 143 | "file_extension": ".py", 144 | "mimetype": "text/x-python", 145 | "name": "python", 146 | "nbconvert_exporter": "python", 147 | "pygments_lexer": "ipython3", 148 | "version": "3.9.17" 149 | } 150 | }, 151 | "nbformat": 4, 152 | "nbformat_minor": 5 153 | } 154 | -------------------------------------------------------------------------------- /docs/tutorials/process-new-granules-for-search-parameters.md: -------------------------------------------------------------------------------- 1 | # Using the HyP3 SDK to process new granules for given search parameters 2 | 3 | In the past, ASF offered subscription functionality for HyP3 products. 4 | A user could create a subscription with a particular set of search parameters 5 | (date range, area of interest, etc.), 6 | and new Sentinel-1 acquisitions that met these criteria would be automatically submitted for processing. 7 | However, this feature was only accessed by a very small minority of HyP3 users, 8 | and most users did not regularly check their subscriptions and download the generated products before they expired. 9 | As such, we have removed this feature in favor of a more flexible approach. 10 | 11 | The following Jupyter notebooks demonstrate how to achieve subscription-like functionality. 12 | They can be run as needed so that you do not have to worry about your products expiring 13 | before you are ready to download them. 14 | This workflow is particularly useful for ongoing monitoring of a geographic area of interest. 15 | 16 | The first notebook demonstrates how to submit RTC jobs using this method, 17 | while the second notebook demonstrates how to submit InSAR jobs. 18 | These tutorials can easily be adapted to support other job types. 19 | Please [contact us](../contact.md) if you need help adapting these tutorials for your particular use case. 20 | 21 | * [Using the HyP3 SDK to generate RTC products for given search parameters](https://github.com/ASFHyP3/hyp3-docs/blob/main/docs/tutorials/new-rtc-jobs.ipynb "Using the HyP3 SDK to generate RTC products for given search parameters" ){target=_blank} 22 | * [Using the HyP3 SDK to generate InSAR products for given search parameters](https://github.com/ASFHyP3/hyp3-docs/blob/main/docs/tutorials/new-insar-jobs.ipynb "Using the HyP3 SDK to generate InSAR products for given search parameters" ){target=_blank} 23 | -------------------------------------------------------------------------------- /docs/usage_guidelines.md: -------------------------------------------------------------------------------- 1 | # Product Usage Guidelines 2 | 3 | When using this data in a publication or presentation, we ask that you include the 4 | acknowledgement provided with each product. DOIs are also provided for citation 5 | when discussing the HyP3 software or plugins. 6 | 7 | - For multi-file products, the acknowledgement and relevant DOIs are included in 8 | the `*.README.md.txt` file. 9 | - For netCDF products, the acknowledgement is included in the `source` global attribute 10 | and the DOIs are included in the `references` global attribute. 11 | 12 | {% include 'citing-snippet.md' %} 13 | -------------------------------------------------------------------------------- /docs/using-snippet.md: -------------------------------------------------------------------------------- 1 | On Demand products processed by HyP3 can be requested quickly and easily, either by 2 | using a web interface or programmatically. These services are currently only available for [Sentinel-1 datasets](sentinel1.md "Sentinel-1 Mission" ){target=_blank}. 3 | 4 | 16 | 17 | ### Web Access 18 | 19 | ASF's Data Search Vertex portal provides a rich interface to explore Sentinel-1 20 | acquisitions and find images to submit for On Demand processing. It also provides 21 | tools for selecting pairs and stacks for InSAR analysis. 22 | 23 | * [Vertex](using/vertex.md "Using Vertex") 24 | 25 | ### Programmatic Access 26 | Requesting and downloading On Demand products can also be done programmatically: 27 | 28 | * [HyP3 SDK for Python](using/sdk.md "Using SDK") 29 | * [HyP3 REST API](using/api.md "Using API") 30 | 31 | ### Public Visibility of Jobs 32 | 33 | !!! warning 34 | 35 | All jobs submitted to HyP3, whether via web access or programmatic access, are publicly visible. 36 | Anyone with access to HyP3 can potentially: 37 | 38 | * View your jobs and associated metadata, including job name and user ID. 39 | * Download any products generated by your jobs. 40 | 41 | In particular, do not include any sensitive information in your job names. 42 | -------------------------------------------------------------------------------- /docs/using.md: -------------------------------------------------------------------------------- 1 | # Using ASF HyP3 2 | 3 | {% include 'using-snippet.md' %} 4 | 5 | ## Citing HyP3 6 | 7 | {% include 'citing-snippet.md' %} 8 | 9 | See the [Usage Guidelines](usage_guidelines.md) section for more information on citing and/or acknowledging On Demand products. 10 | -------------------------------------------------------------------------------- /docs/using/api.md: -------------------------------------------------------------------------------- 1 | # Using the HyP3 API 2 | 3 | The HyP3 API is built on [OpenAPI](https://www.openapis.org/ "https://www.openapis.org/" ){target=_blank} and [Swagger](https://swagger.io/ "https://swagger.io/" ){target=_blank}. 4 | A friendly interface for exploring the API is available at: 5 | 6 | #### {target=_blank} 7 | 8 | In order to use the API, you'll need a `asf-urs` session cookie, which you can get 9 | by [signing in to Vertex](https://search.asf.alaska.edu/#/){target=_blank} 10 | 11 | ![vertex sign in](../images/vertex-sign-in.png) 12 | 13 | ### Confirm you are authenticated 14 | 15 | To confirm you are authenticated, you can run a `GET` request to our `/user` endpoint. 16 | Select the blue `GET` button next to `/user` and click the `Try it out` button 17 | ![GET /user try](../images/get_user_try.png) 18 | 19 | Then, execute the request and look at the response 20 | ![GET /user execute](../images/get_user_execute.png) 21 | 22 | If you get a `Code 200` you should see a JSON dictionary of your user information. 23 | 24 | !!! warning "Authentication Required" 25 | 26 | If you get a 401 response back you need to [sign in to Vertex](https://search.asf.alaska.edu/#/){target=_blank} to get the `asf-urs` session cookie. 27 | 28 | ```json 29 | { 30 | "detail": "No authorization token provided", 31 | "status": 401, 32 | "title": "Unauthorized", 33 | "type": "about:blank" 34 | } 35 | ``` 36 | 37 | ## Submitting Sentinel-1 RTC jobs 38 | 39 | Jobs are submitted through the API by providing a JSON payload with a list of job 40 | definitions. 41 | 42 | Sentinel-1 jobs are submitted using [ESA granule IDs](https://sentiwiki.copernicus.eu/web/s1-products#S1-Products-SAR-Naming-Convention){target=_blank}. 43 | A minimal job list for a single Sentinel-1 RTC job would look like: 44 | 45 | ```json 46 | { 47 | "jobs": [ 48 | { 49 | "name": "minimal-rtc-example", 50 | "job_type": "RTC_GAMMA", 51 | "job_parameters": { 52 | "granules": [ 53 | "S1A_IW_GRDH_1SDV_20210214T154837_20210214T154901_036588_044C54_032E" 54 | ] 55 | } 56 | } 57 | ] 58 | } 59 | ``` 60 | 61 | The job list may contain up to 200 job definitions. You can also provide custom RTC options: 62 | ```json 63 | { 64 | "jobs": [ 65 | { 66 | "name": "custom-rtc-example", 67 | "job_type": "RTC_GAMMA", 68 | "job_parameters": { 69 | "granules": [ 70 | "S1B_IW_GRDH_1SDV_20210210T153157_20210210T153222_025546_030B48_2901" 71 | ], 72 | "radiometry": "gamma0", 73 | "scale": "power", 74 | "dem_matching": false, 75 | "include_dem": true, 76 | "include_inc_map": true, 77 | "include_scattering_area": false, 78 | "speckle_filter": false 79 | } 80 | }, 81 | { 82 | "name": "custom-rtc-example", 83 | "job_type": "RTC_GAMMA", 84 | "job_parameters": { 85 | "granules": [ 86 | "S1B_IW_GRDH_1SDV_20210210T153132_20210210T153157_025546_030B48_4E31" 87 | ], 88 | "radiometry": "sigma0", 89 | "scale": "amplitude", 90 | "dem_matching": false, 91 | "include_dem": false, 92 | "include_inc_map": false, 93 | "include_scattering_area": true, 94 | "speckle_filter": true 95 | } 96 | } 97 | ] 98 | } 99 | ``` 100 | 101 | ## Submitting OPERA-RTC-S1 jobs 102 | 103 | The OPERA-RTC-S1 job takes a single **co-pol (VV or HH)** 104 | [ESA granule burst ID](https://sentiwiki.copernicus.eu/web/s1-products#S1Products-BurstIDMapsS1-Products-Burst-ID-Maps "ESA Burst Id Maps" ){target=_blank} 105 | from a Sentinel-1 IW SLC acquisition that meets the 106 | [date range](../guides/opera_rtc_product_guide.md#date-range-for-on-demand-opera-rtc-s1-products "OPERA RTC-S1 Product Guide" ){target=_blank} 107 | and 108 | [spatial extent](../guides/opera_rtc_product_guide.md#spatial-coverage-for-on-demand-opera-rtc-s1-products "OPERA RTC-S1 Product Guide" ){target=_blank} 109 | constraints for processing. 110 | 111 | Sentinel-1 Burst SLCs submitted for processing must have been acquired between April 14, 2016, and December 31, 2021, 112 | and must have 113 | [OPERA RTC-S1 Static Layer files](../guides/opera_rtc_product_guide.md#l2-radiometric-terrain-corrected-static-layer-rtc-static-files "OPERA RTC-S1 Product Guide" ){target=_blank} 114 | associated with that burst ID. 115 | 116 | ```json 117 | { 118 | "job_type": "OPERA_RTC_S1", 119 | "name": "opera-rtc-s1-example", 120 | "job_parameters": { 121 | "granules": ["S1_073251_IW2_20200128T020712_VV_2944-BURST"] 122 | } 123 | } 124 | ``` 125 | 126 | ## Submitting Sentinel-1 InSAR jobs 127 | 128 | You can also submit InSAR jobs for scene pairs using [ESA granule IDs](https://sentiwiki.copernicus.eu/web/s1-products#S1-Products-SAR-Naming-Convention){target=_blank}. 129 | ```json 130 | { 131 | "jobs": [ 132 | { 133 | "name": "minimal-insar-example", 134 | "job_type": "INSAR_GAMMA", 135 | "job_parameters": { 136 | "granules": [ 137 | "S1A_IW_SLC__1SDV_20200203T172103_20200203T172122_031091_03929B_3048", 138 | "S1A_IW_SLC__1SDV_20200110T172104_20200110T172123_030741_03864E_A996" 139 | ] 140 | } 141 | }, 142 | { 143 | "name": "custom-insar-example", 144 | "job_type": "INSAR_GAMMA", 145 | "job_parameters": { 146 | "granules": [ 147 | "S1A_IW_SLC__1SDV_20200527T195012_20200527T195028_032755_03CB56_3D96", 148 | "S1A_IW_SLC__1SDV_20200515T195012_20200515T195027_032580_03C609_4EBA" 149 | ], 150 | "looks": "10x2", 151 | "include_look_vectors": true, 152 | "include_los_displacement": true 153 | } 154 | } 155 | ] 156 | } 157 | ``` 158 | 159 | ## Submitting Sentinel-1 Burst InSAR jobs 160 | You can submit InSAR jobs using the `INSAR_ISCE_BURST` job type, which takes a single pair of [ESA granule burst IDs](https://sentiwiki.copernicus.eu/web/s1-products#S1Products-BurstIDMapsS1-Products-Burst-ID-Maps){target=_blank}, or use the `INSAR_ISCE_MULTI_BURST` job type, which accepts lists of up to 15 contiguous along-track burst IDs that will be merged together to produce reference and secondary input SLCs. 161 | ```json 162 | { 163 | "jobs": [ 164 | { 165 | "job_type": "INSAR_ISCE_BURST", 166 | "name": "single-burst-example", 167 | "job_parameters": { 168 | "granules": [ 169 | "S1_136231_IW2_20200604T022312_VV_7C85-BURST", 170 | "S1_136231_IW2_20200616T022313_VV_5D11-BURST" 171 | ] 172 | } 173 | }, 174 | { 175 | "job_type": "INSAR_ISCE_MULTI_BURST", 176 | "name": "multi-burst-example", 177 | "job_parameters": { 178 | "reference": [ 179 | "S1_136231_IW2_20200604T022312_VV_7C85-BURST", 180 | "S1_136232_IW2_20200604T022315_VV_7C85-BURST" 181 | ], 182 | "secondary": [ 183 | "S1_136231_IW2_20200616T022313_VV_5D11-BURST", 184 | "S1_136232_IW2_20200616T022316_VV_5D11-BURST" 185 | ], 186 | "apply_water_mask": true, 187 | "looks": "5x1" 188 | } 189 | } 190 | ] 191 | } 192 | ``` 193 | 194 | ## Submitting ARIA-S1-GUNW jobs 195 | The ARIA-S1-GUNW job type takes a reference [ESA granule ID](https://sentiwiki.copernicus.eu/web/s1-products#S1-Products-SAR-Naming-Convention){target=_blank} set, a secondary ESA granule ID set, and an ARIA-S1-GUNW Frame ID as input. See the [ARIA-S1-GUNW Product Guide Frame ID section](../guides/gunw_product_guide.md#aria-frame-ids){target=_blank} for more details on these inputs. 196 | ```json 197 | { 198 | "job_type": "ARIA_S1_GUNW", 199 | "name": "gunw-example", 200 | "job_parameters": { 201 | "reference": [ 202 | "S1A_IW_SLC__1SDV_20250127T010136_20250127T010203_057623_07199D_4B63", 203 | "S1A_IW_SLC__1SDV_20250127T010111_20250127T010138_057623_07199D_4E88", 204 | "S1A_IW_SLC__1SDV_20250127T010045_20250127T010113_057623_07199D_4D3B" 205 | ], 206 | "secondary": [ 207 | "S1A_IW_SLC__1SDV_20250103T010137_20250103T010204_057273_070BB6_CD45", 208 | "S1A_IW_SLC__1SDV_20250103T010113_20250103T010140_057273_070BB6_1133", 209 | "S1A_IW_SLC__1SDV_20250103T010047_20250103T010115_057273_070BB6_99C5" 210 | ], 211 | "frame_id": 23474 212 | } 213 | } 214 | ``` 215 | 216 | ## Submitting autoRIFT jobs 217 | 218 | AutoRIFT supports processing Sentinel-1, Sentinel-2, or Landsat-8 Collection 2 pairs. 219 | 220 | * Sentinel-1 jobs are submitted using [ESA granule IDs](https://sentiwiki.copernicus.eu/web/s1-products#S1-Products-SAR-Naming-Convention){target=_blank} 221 | * Sentinel-2 jobs are submitted using [ESA granule IDs](https://sentinel.esa.int/web/sentinel/user-guides/sentinel-2-msi/naming-convention){target=_blank} 222 | * Landsat-8 Collection 2 jobs are submitted using [USGS scene IDs](https://www.usgs.gov/faqs/what-naming-convention-landsat-collection-2-level-1-and-level-2-scenes?qt-news_science_products=0#qt-news_science_products){target=_blank} 223 | 224 | To submit an example set of jobs including all supported missions, you could write a job list like: 225 | 226 | ```json 227 | { 228 | "jobs": [ 229 | { 230 | "name": "autorift-example", 231 | "job_type": "AUTORIFT", 232 | "job_parameters": { 233 | "granules": [ 234 | "S1A_IW_SLC__1SSH_20170221T204710_20170221T204737_015387_0193F6_AB07", 235 | "S1B_IW_SLC__1SSH_20170227T204628_20170227T204655_004491_007D11_6654" 236 | ] 237 | } 238 | }, 239 | { 240 | "name": "autorift-example", 241 | "job_type": "AUTORIFT", 242 | "job_parameters": { 243 | "granules": [ 244 | "S2B_MSIL1C_20200612T150759_N0209_R025_T22WEB_20200612T184700", 245 | "S2A_MSIL1C_20200627T150921_N0209_R025_T22WEB_20200627T170912" 246 | ] 247 | } 248 | }, 249 | { 250 | "name": "autorift-example", 251 | "job_type": "AUTORIFT", 252 | "job_parameters": { 253 | "granules": [ 254 | "LC08_L1TP_009011_20200703_20200913_02_T1", 255 | "LC08_L1TP_009011_20200820_20200905_02_T1" 256 | ] 257 | } 258 | } 259 | ] 260 | } 261 | ``` 262 | 263 | With your JSON jobs definition, you can `POST` to the `/jobs` endpoint to 264 | submit the jobs. 265 | 266 | 1. click the green `POST` button next to `/jobs` 267 | 2. click `Try it out` on the right 268 | 3. paste your jobs definition into the `Request body` 269 | 4. click `execute` 270 | 271 | ![POST /jobs execute](../images/post_jobs_execute.png) 272 | 273 | If your jobs were submitted successfully you should see a `Code 200` and a 274 | JSON response of your job list, with some additional job attributes filled in. 275 | 276 | ## Querying jobs 277 | 278 | You can `GET` job information from the `/jobs` endpoint. You may provide query 279 | parameters to filter which jobs are returned: 280 | ![GET /jobs query](../images/get_jobs_query.png) 281 | 282 | For our above examples, you can get the RTC job that was submitted with the default options by 283 | searching for `name=minimal-rtc-example`. If you provide *no* query parameters, you'll get a 284 | JSON response with a jobs list for every job you've submitted. 285 | 286 | Within the jobs list, a complete job dictionary will look like: 287 | ```JSON 288 | { 289 | "jobs": [ 290 | { 291 | "name": "minimal-rtc-example", 292 | "job_type": "RTC_GAMMA", 293 | "job_parameters": { 294 | "granules": [ 295 | "S1A_IW_SLC__1SSV_20150621T120220_20150621T120232_006471_008934_72D8" 296 | ] 297 | }, 298 | "job_id": "20c377be-2511-46a8-b908-e015abd3c24e", 299 | "user_id": "MY_EDL_USERNAME", 300 | "status_code": "SUCCEEDED", 301 | "request_time": "2021-02-24T21:30:45+00:00", 302 | "expiration_time": "2021-03-11T00:00:00+00:00", 303 | "files": [ 304 | { 305 | "filename": "S1A_IW_20150621T120220_SVP_RTC30_G_gpuned_0AEA.zip", 306 | "s3": { 307 | "bucket": "hyp3-contentbucket-fo259f6r6dn6", 308 | "key": "20c377be-2511-46a8-b908-e015abd3c24e/S1A_IW_20150621T120220_SVP_RTC30_G_gpuned_0AEA.zip" 309 | }, 310 | "size": 28676279, 311 | "url": "https://hyp3-contentbucket-fo259f6r6dn6.s3.us-west-2.amazonaws.com/20c377be-2511-46a8-b908-e015abd3c24e/S1A_IW_20150621T120220_SVP_RTC30_G_gpuned_0AEA.zip" 312 | } 313 | ], 314 | "browse_images": [ 315 | "https://hyp3-contentbucket-fo259f6r6dn6.s3.us-west-2.amazonaws.com/20c377be-2511-46a8-b908-e015abd3c24e/S1A_IW_20150621T120220_SVP_RTC30_G_gpuned_0AEA.png" 316 | ], 317 | "thumbnail_images": [ 318 | "https://hyp3-contentbucket-fo259f6r6dn6.s3.us-west-2.amazonaws.com/20c377be-2511-46a8-b908-e015abd3c24e/S1A_IW_20150621T120220_SVP_RTC30_G_gpuned_0AEA_thumb.png" 319 | ], 320 | "logs": [ 321 | "https://hyp3-contentbucket-fo259f6r6dn6.s3.us-west-2.amazonaws.com/20c377be-2511-46a8-b908-e015abd3c24e/20c377be-2511-46a8-b908-e015abd3c24e.log" 322 | ] 323 | } 324 | ] 325 | } 326 | ``` 327 | 328 | Importantly, the `files` block provides download links for the product files. 329 | 330 | For large queries results may be truncated. In this case there will be a `next` key in the response that will contain a URL to continue the query (this response may be similarly truncated and include a `next` key). 331 | ```JSON 332 | { 333 | "jobs": [ 334 | ... 335 | ], 336 | "next": "https://hyp3-api.asf.alaska.edu/jobs?start_token=eyJqb2JfaWQiOiAiYzk1MDUzY2ItYWQzNy00ZGFhLTgxZDItYzA0YmQ4NWZiNDhiIiwgInVzZXJfaWQiOiAiamxyaW5lMiIsICJyZXF1ZXN0X3RpbWUiOiAiMjAyMC0xMC0yOVQxOTo0Mzo0NCswMDowMCJ9" 337 | } 338 | ``` 339 | -------------------------------------------------------------------------------- /docs/using/credits.md: -------------------------------------------------------------------------------- 1 | # Credits 2 | 3 | On Demand users are given an allotment of **{{ CREDITS_PER_MONTH }} credits per month** to use for processing jobs, 4 | and each type of job costs a different number of credits, as shown in the [Credit Cost Table](#credit-cost-table). 5 | 6 | The "Maximum Jobs Per Month" column displays the maximum number of jobs that you 7 | would be able to run in a single month if you spent your entire monthly credit 8 | allotment on jobs of that particular type. 9 | 10 | ## Credit Cost Table 11 | | Job Type | Cost (credits) | Maximum Jobs Per Month | 12 | |---------------------------------------------------------------|---------------:|------------------------------:| 13 | | [**RTC**](../guides/rtc_product_guide.md) | | | 14 | | {{ table_indent() }} 30-m pixel spacing | 5 | {{ max_jobs_per_month(5) }} | 15 | | {{ table_indent() }} 20-m pixel spacing | 15 | {{ max_jobs_per_month(15) }} | 16 | | {{ table_indent() }} 10-m pixel spacing | 60 | {{ max_jobs_per_month(60) }} | 17 | | [**OPERA RTC-S1**](../guides/opera_rtc_product_guide.md) | | | 18 | | {{ table_indent() }} Standard product (30-m pixel spacing) | 1 | {{ max_jobs_per_month(1) }} | 19 | | [**InSAR**](../guides/insar_product_guide.md) | | | 20 | | {{ table_indent() }} 80-m pixel spacing (20x4 looks) | 10 | {{ max_jobs_per_month(10) }} | 21 | | {{ table_indent() }} 40-m pixel spacing (10x2 looks) | 15 | {{ max_jobs_per_month(15) }} | 22 | | [**ARIA S1 GUNW**](../guides/gunw_product_guide.md) | | | 23 | | {{ table_indent() }} Standard product (90-m pixel spacing) | 60 | {{ max_jobs_per_month(60) }} | 24 | | [**Burst InSAR**](../guides/burst_insar_product_guide.md) | | | 25 | | {{ table_indent() }} 80-m pixel spacing (20x4 looks) | | | 26 | | {{ table_indent(count=2) }} 1–4 pairs | 1 | {{ max_jobs_per_month(1) }} | 27 | | {{ table_indent(count=2) }} 5–15 pairs | 5 | {{ max_jobs_per_month(5) }} | 28 | | {{ table_indent() }} 40-m pixel spacing (10x2 looks) | | | 29 | | {{ table_indent(count=2) }} 1–3 pairs | 1 | {{ max_jobs_per_month(1) }} | 30 | | {{ table_indent(count=2) }} 4–9 pairs | 5 | {{ max_jobs_per_month(5) }} | 31 | | {{ table_indent(count=2) }} 10–15 pairs | 10 | {{ max_jobs_per_month(10) }} | 32 | | {{ table_indent() }} 20-m pixel spacing (5x1 looks) | | | 33 | | {{ table_indent(count=2) }} 1 pair | 1 | {{ max_jobs_per_month(1) }} | 34 | | {{ table_indent(count=2) }} 2 pairs | 5 | {{ max_jobs_per_month(5) }} | 35 | | {{ table_indent(count=2) }} 3 pairs | 10 | {{ max_jobs_per_month(10) }} | 36 | | {{ table_indent(count=2) }} 4 pairs | 15 | {{ max_jobs_per_month(15) }} | 37 | | {{ table_indent(count=2) }} 5 pairs | 20 | {{ max_jobs_per_month(20) }} | 38 | | {{ table_indent(count=2) }} 6 pairs | 25 | {{ max_jobs_per_month(25) }} | 39 | | {{ table_indent(count=2) }} 7 pairs | 30 | {{ max_jobs_per_month(30) }} | 40 | | {{ table_indent(count=2) }} 8 pairs | 35 | {{ max_jobs_per_month(35) }} | 41 | | {{ table_indent(count=2) }} 9 pairs | 40 | {{ max_jobs_per_month(40) }} | 42 | | {{ table_indent(count=2) }} 10 pairs | 45 | {{ max_jobs_per_month(45) }} | 43 | | {{ table_indent(count=2) }} 11 pairs | 90 | {{ max_jobs_per_month(90) }} | 44 | | {{ table_indent(count=2) }} 12 pairs | 95 | {{ max_jobs_per_month(95) }} | 45 | | {{ table_indent(count=2) }} 13 pairs | 100 | {{ max_jobs_per_month(100) }} | 46 | | {{ table_indent(count=2) }} 14 pairs | 105 | {{ max_jobs_per_month(105) }} | 47 | | {{ table_indent(count=2) }} 15 pairs | 110 | {{ max_jobs_per_month(110) }} | 48 | | [**AutoRIFT**](https://its-live.jpl.nasa.gov/){target=_blank} | | | 49 | | {{ table_indent() }} Standard product (120-m pixel spacing) | 50 | {{ max_jobs_per_month(50) }} | 50 | 51 | The credit cost of a given job is roughly proportional to the computational resources required to process the job, 52 | allowing us to distribute our resources more equitably. 53 | This supports our mission of 54 | [making remote-sensing data accessible](https://asf.alaska.edu/about-asf/ 'asf.alaska.edu/about-asf' ){target=_blank}, 55 | with the goal of providing valuable products to the widest breadth of users possible. 56 | 57 | If your monthly credit allotment doesn't meet your needs, 58 | please contact us and let us know how you would like to use our service. 59 | We may be able to support increased processing, depending on your requirements. 60 | All requests will be balanced against our mission: to make remote-sensing data accessible to the community. 61 | 62 | ## Contact Us 63 | 64 | {% include 'contact-snippet.md' %} 65 | -------------------------------------------------------------------------------- /docs/using/requesting_access.md: -------------------------------------------------------------------------------- 1 | # Requesting Access 2 | 3 | 4 | Starting on **TODO: date**, new users will be required to request access 5 | before submitting jobs for On Demand processing using the HyP3 platform. 6 | HyP3 accounts are limited to one per person. 7 | If you have used HyP3 for On Demand processing in the past, you will be able to continue using our service without requesting access. 8 | 9 | 10 | 11 | 12 | You will need an [Earthdata Login](https://urs.earthdata.nasa.gov/ 'https://urs.earthdata.nasa.gov/' ){target=_blank} 13 | (EDL) account before requesting access. 14 | You can [register here](https://urs.earthdata.nasa.gov/users/new 'https://urs.earthdata.nasa.gov/users/new' ){target=_blank} 15 | if you do not already have an account. 16 | 17 | After submitting your request, you will receive an email with your approval status within **two business days.** 18 | You can expect to be approved if you have not already registered for HyP3 access 19 | using a different Earthdata Login account. 20 | 21 | Once your access request has been approved, you can submit jobs for On Demand processing using your EDL credentials. 22 | *You only need to complete this approval process once.* 23 | 24 | 25 | 31 | 32 | ## Workshops and Tutorials 33 | 34 | If you are leading a workshop or tutorial and your participants will require access to HyP3, 35 | we can provide you with a unique access code to streamline the access request process. Your participants can enter 36 | this code in the access request form to receive immediate, automatic approval. 37 | 38 | If you would like to request a unique access code, please email 39 | [uso@asf.alaska.edu](mailto:uso@asf.alaska.edu "uso@asf.alaska.edu") with a description of your workshop or tutorial. 40 | 41 | ## Contact Us 42 | 43 | Email ASF User Services at [uso@asf.alaska.edu](mailto:uso@asf.alaska.edu "uso@asf.alaska.edu") with any questions 44 | regarding access to HyP3. 45 | -------------------------------------------------------------------------------- /docs/using/sdk.md: -------------------------------------------------------------------------------- 1 | {{ get_content('https://raw.githubusercontent.com/ASFHyP3/hyp3-sdk/v7.5.0/README.md') }} 2 | -------------------------------------------------------------------------------- /docs/using/sdk_api.md: -------------------------------------------------------------------------------- 1 | # `hyp3_sdk` *v7.5.0* API Reference 2 | 3 | ::: hyp3_sdk 4 | options: 5 | show_root_heading: true 6 | show_submodules: true 7 | -------------------------------------------------------------------------------- /docs/using/subscriptions.md: -------------------------------------------------------------------------------- 1 | # Subscriptions in HyP3 2 | 3 | The Subscriptions feature of HyP3 4 | has been removed in favor of a more flexible approach. 5 | You can follow [these tutorials](../tutorials/process-new-granules-for-search-parameters.md) 6 | to achieve subscription-like functionality using Jupyter notebooks. 7 | -------------------------------------------------------------------------------- /docs/using/vertex.md: -------------------------------------------------------------------------------- 1 | # On Demand Sentinel-1 Processing in Vertex 2 | 3 | The Alaska Satellite Facility offers [On Demand processing of Sentinel-1 datasets to Radiometric Terrain Correction (RTC) or Interferometric SAR (InSAR) products through Vertex](https://search.asf.alaska.edu/#/?topic=onDemand "Vertex On Demand Documentation" ){target=_blank}, ASF's Data Search web portal. You can submit scenes to be processed into higher-level products, avoiding the cost and complexity of performing such processing yourself. 4 | 5 | [![Vertex Image](../images/vertex.png "Click to open Vertex in a new tab")](https://search.asf.alaska.edu/ "https://search.asf.alaska.edu" ){target=_blank} 6 | 7 | On Demand Sentinel-1 products are generated using ASF's HyP3 processing platform, leveraging GAMMA Software. Products are distributed as UTM-projected GeoTIFFs. To learn more about the finished products, refer to the Product Guides: 8 | 9 | * [ASF Sentinel-1 RTC Product Guide](../guides/rtc_product_guide.md) 10 | * [ASF Sentinel-1 InSAR Product Guide](../guides/insar_product_guide.md) 11 | 12 | ## Getting Started 13 | 14 | To request On Demand products, visit [ASF Data Search - Vertex](https://search.asf.alaska.edu "https://search.asf.alaska.edu" ){target=_blank}. 15 | 16 | 1. **Select your scenes** - RTC processing is available for Sentinel-1 GRD-H and SLC scenes with a beam mode of IW. InSAR processing requires pairs of IW SLC scenes. Use the Geographic Search in Vertex to find individual scenes to submit for RTC processing, or reference scenes to use for generating InSAR pairs. For InSAR, once you find a reference scene, use either the [Baseline](https://docs.asf.alaska.edu/vertex/baseline/ "Vertex Baseline Documentation" ){target=_blank} or [SBAS](https://docs.asf.alaska.edu/vertex/sbas/ "Vertex SBAS Documentation" ){target=_blank} Search to find scene pairs to submit for processing. 17 | 18 | 2. **Submit your request** - After selecting your scenes, access the *On Demand* queue to submit your processing request. You may process jobs worth up to a total of {{ CREDITS_PER_MONTH }} credits per month. See our [Credits](./credits.md) page for more details. 19 | 20 | 3. **Monitor your request** - The *On Demand Products* search type displays your running and completed requests. New requests are typically available for download within an hour, but wait time will depend on processing load. 21 | 22 | 4. **Download your data** - Finished On Demand products can be downloaded after an *On Demand Products* search either directly or via your download queue . On Demand products are retained and available to download for two weeks after processing. 23 | 24 | ## Tutorials 25 | 26 | Refer to our step-by-step tutorials for ordering and accessing [RTC](https://storymaps.arcgis.com/stories/2ead3222d2294d1fae1d11d3f98d7c35 "RTC On Demand StoryMap" ){target=_blank} and [InSAR](https://storymaps.arcgis.com/stories/68a8a3253900411185ae9eb6bb5283d3 "InSAR On Demand StoryMap" ){target=_blank} products in Vertex. 27 | 28 | [![RTC On Demand Image](../images/rtc-tutorial.png "Click to open RTC On Demand! tutorial")](https://storymaps.arcgis.com/stories/2ead3222d2294d1fae1d11d3f98d7c35 "RTC On Demand!" ){target=_blank} 29 | [![InSAR On Demand Image](../images/insar-tutorial.png "Click to open InSAR On Demand! tutorial")](https://storymaps.arcgis.com/stories/68a8a3253900411185ae9eb6bb5283d3 "InSAR On Demand!" ){target=_blank} 30 | -------------------------------------------------------------------------------- /docs/v2-transition.md: -------------------------------------------------------------------------------- 1 | # Welcome to HyP3 v2 2 | 3 | As of September 30, 2021, our beta HyP3 service available at 4 | has been retired in favor of our new On Demand service powered by HyP3 version 2 (hereafter, just "HyP3"). 5 | 6 | On Demand processing through HyP3 is now available directly in [Vertex](https://search.asf.alaska.edu/ "https://search.asf.alaska.edu" ){target=_blank}, 7 | ASF's data search portal. Vertex provides a friendly interface to request 8 | processing jobs and review previous jobs. To learn how to request jobs through Vertex, please consult the following resources: 9 | 10 | - [Vertex On Demand](https://search.asf.alaska.edu/#/?topic=onDemand "Vertex On Demand Tutorial" ){target=_blank} video tutorial 11 | - [InSAR On Demand!](https://storymaps.arcgis.com/stories/68a8a3253900411185ae9eb6bb5283d3 "InSAR On Demand! StoryMap" ){target=_blank} StoryMap tutorial 12 | - [RTC On Demand!](https://storymaps.arcgis.com/stories/2ead3222d2294d1fae1d11d3f98d7c35 "RTC On Demand! StoryMap" ){target=_blank} StoryMap tutorial 13 | 14 | For more information, check out our full documentation at . 15 | If you have any comments, questions or concerns, please reach out to us! We love feedback. 16 | 17 | ## Contact Us 18 | 19 | {% include 'contact-snippet.md' %} 20 | -------------------------------------------------------------------------------- /docs/water_masking.md: -------------------------------------------------------------------------------- 1 | # Water Masking 2 | 3 | ASF maintains a global water mask dataset for use during InSAR processing. 4 | 5 | Unwrapping phase differences over waterbodies can introduce unwrapping errors, resulting in misleading deformation signals. Applying a water mask to the interferogram *before* phase unwrapping can significantly improve the quality of the unwrapped interferogram, as illustrated in ASF's [InSAR Water Masking Tutorial](https://storymaps.arcgis.com/stories/485916be1b1d46889aa436794b5633cb "InSAR Water Masking StoryMap" ){target=_blank}. 6 | 7 | When ordering [On-Demand InSAR products from ASF](https://hyp3-docs.asf.alaska.edu/guides/insar_product_guide "ASF Sentinel-1 InSAR Product Guide" ){target=_blank}, users can choose the [option to apply the water mask](https://hyp3-docs.asf.alaska.edu/guides/insar_product_guide/#apply-water-mask "InSAR Product Guide - Processing Options - Apply Water Mask" ){target=_blank} prior to phase unwrapping. Even if users choose *not* to apply the water mask to the interferogram, a copy of the water mask is always included in the InSAR product package for reference. 8 | 9 | ## Water Mask Dataset 10 | 11 | ASF implemented the use of a new water mask for InSAR processing on February 15, 2024. The surface water extent datasets available from [OpenStreetMap](https://www.openstreetmap.org/about "openstreetmap.org/about" ){target=_blank} and [ESA WorldCover](https://esa-worldcover.org/en/about/about "esa-worldcover.org/en/about" ){target=_blank} were a significant improvement over the outdated version of the [Global Self-consistent, Hierarchical, High-resolution Geography](https://storymaps.arcgis.com/stories/485916be1b1d46889aa436794b5633cb#ref-n-pezhKQ "InSAR Water Masking Tutorial - GSSICB" ){target=_blank} dataset that we were using prior to this change. The data from these new sources is more recent, more detailed, and has fewer geolocation artifacts. 12 | 13 | The code used to generate this global water mask is available as part of the [asf_tools Python package](https://github.com/ASFHyP3/asf-tools "github.com/ASFHyP3/asf-tools" ){target=_blank}. More information on generating your own water mask using the same approach is available in the [readme file for the watermasking subpackage in the asf-tools GitHub repository](https://github.com/ASFHyP3/asf-tools/tree/develop/src/asf_tools/watermasking "asf_tools GitHub repo water masking readme" ){target=_blank}. 14 | 15 | ### Source Data 16 | 17 | ASF's water mask uses data from both [OpenStreetMap](https://www.openstreetmap.org/about "openstreetmap.org/about" ){target=_blank} and [ESA WorldCover](https://esa-worldcover.org/en/about/about "esa-worldcover.org/en/about" ){target=_blank}. Areas within Canada, Alaska, and Russia are primarily covered by ESA WorldCover data, while the rest of the world is covered by OpenStreetMap data. 18 | 19 | The water mask identifies coastal waters and most inland waterbodies. All remaining pixels (land, islands in large lakes, very small inland waterbodies, and landfast Antarctic ice) are considered to be not water. 20 | 21 | Source data for the water mask is only available from 85°S to 85°N. Areas north of 85°N are all treated as water, and areas south of 85°S are all treated as not water. 22 | 23 | #### OpenStreetMap (OSM) 24 | 25 | [OpenStreetMap](https://www.openstreetmap.org/about "openstreetmap.org/about" ){target=_blank} is a crowd-sourced open-data mapping effort. The [OSM database](https://planet.openstreetmap.org/ "planet.openstreetmap.org" ){target=_blank} of geographic features can be accessed by anyone, and it includes a number of categories that can be used to map surface water extent. 26 | 27 | OSM data was used to generate the water mask for all areas except Canada, Alaska, and Russia. To extract the relevant water extent data from the OSM database, the following filters were applied: 28 | 29 | - wr/natural = water 30 | - landuse = reservoir 31 | - waterway = * 32 | 33 | In many cases, waterway features stretch from one bank to the other, so islands within those waterways would not be identified as land. To remove islands from the water mask extent, the following filters were applied to the extracted surface water dataset: 34 | 35 | - place = island 36 | - place = islet 37 | 38 | The resulting list of features was exported as a shapefile, then converted to raster format for inclusion in the reference water mask. 39 | 40 | #### ESA WorldCover 41 | 42 | In October 2021, the European Space Agency (ESA) released the first version of its global land cover dataset, [WorldCover](https://esa-worldcover.org/en/about/about "esa-worldcover.org/en/about" ){target=_blank}. It uses remote sensing data from the Sentinel-1 and Sentinel-2 missions to generate land cover classes, including water. More information is available from the [ESA WorldCover 2020 website](https://worldcover2020.esa.int/ "worldcover2020.esa.int" ){target=_blank}. 43 | 44 | This dataset was used to generate the water masks for Canada, Alaska, and Russia. It includes one class for permanent water bodies. The version 1.0 source rasters were downloaded from the [ESA WorldCover 2020 Downloader site](https://worldcover2020.esa.int/downloader "worldcover2020.esa.int/downloader" ){target=_blank}. They were reclassified so that all areas with a value of 80 (Permanent water bodies) were defined as water, and all other values were considered not water. 45 | 46 | ## Reference Water Mask 47 | 48 | The water mask rasters generated from the OSM and WorldCover datasets were mosaicked together, then tiled to 5° latitude by 5° longitude for storage. Because source data is only available from 85°S to 85°N, tiles were added to fill the polar areas. All pixels north of 85°N are treated as water, and all pixels south of 85°S are treated as land. 49 | 50 | This reference dataset is stored in a public AWS S3 bucket: 51 | 52 | `s3://asf-dem-west/WATER_MASK/TILES/` 53 | 54 | In the reference raster dataset hosted in AWS, pixels with surface water are assigned a value of 1, and all other pixels are assigned a value of 0. 55 | 56 | ***Note that the pixel values used in the reference water mask are opposite to the pixel values used for the water masks included in the InSAR product packages. Refer to the [Applying the Water Mask section](#applying-the-water-mask-during-insar-processing "Jump to Applying the Water Mask during InSAR Processing section") for more information on how the reference water mask is transformed during InSAR processing.*** 57 | 58 | ### Acknowledgments 59 | 60 | #### OpenStreetMap 61 | OpenStreetMap® is open data, licensed under the [Open Data Commons Open Database License](https://opendatacommons.org/licenses/odbl/ "opendatacommons.org/licenses/odbl" ){target=_blank} (ODbL) by the [OpenStreetMap Foundation](https://osmfoundation.org/ "osmfoundation.org" ){target=_blank} (OSMF). 62 | 63 | #### ESA WorldCover 64 | © ESA WorldCover project. Contains modified Copernicus Sentinel data (2020) processed by ESA WorldCover consortium. 65 | 66 | ## Applying the Water Mask during InSAR Processing 67 | 68 | When an InSAR job is submitted for [ASF's On Demand processing](https://storymaps.arcgis.com/stories/68a8a3253900411185ae9eb6bb5283d3 "InSAR On Demand Tutorial" ){target=_blank}, the coordinates of the four corners of the input Sentinel-1 scene are used to find the water mask tile(s) that cover the scene. If the scene crosses multiple tiles, the necessary tiles are mosaicked together. The water mask is then clipped to match the spatial extent of the input Sentinel-1 scene pair. 69 | 70 | The pixel values of the mosaicked and clipped water mask are changed to meet the requirements of the InSAR processing software. **Water pixels are assigned a value of 0, and all remaining pixels are assigned a value of 1.** *Note that these pixel values differ from the reference water mask, where water pixels have a value of 1 and all other pixels have a value of 0.* 71 | 72 | If the option to [Apply Water Mask](https://hyp3-docs.asf.alaska.edu/guides/insar_product_guide/#apply-water-mask "InSAR Product Guide - Processing Options - Apply Water Mask" ){target=_blank} was selected by the user submitting the InSAR job, this mask is then used as an input, along with coherence values, to generate the [validity mask](https://hyp3-docs.asf.alaska.edu/guides/insar_product_guide/#masking "InSAR Product Guide - Masking" ){target=_blank} used for phase unwrapping. The 0-value water pixels are excluded from use in phase unwrapping. 73 | 74 | A copy of the water mask is always included in the InSAR product package for reference, even if the user chose not to select the option to apply the water mask. In this copy of the water mask, the pixel values are the same as what is used in InSAR processing: pixels indicating water have a value of 0, and all other pixels are assigned a value of 1. 75 | 76 | ## Older Water Mask Versions 77 | 78 | The first water mask that ASF used for InSAR On-Demand processing was generated using the [Global Self-consistent, Hierarchical, High-resolution Geography Database (GSHHG)](http://www.soest.hawaii.edu/wessel/gshhg/ "soest.hawaii.edu/wessel/gshhg/" ){target=_blank} dataset. 79 | 80 | This mask combined the GSHHG full-resolution L1 (boundary between land and ocean) and L5 (boundary between Antarctic landfast ice and ocean) datasets, and removed the L2 (boundary between land and large inland waterbodies) dataset minus the L3 (islands) dataset. 81 | 82 | Originally, the dataset was buffered out 3 km along coastlines and 5 km along the shorelines of inland waterbodies. This buffer was included to decrease the chance that valid land pixels would be excluded from phase unwrapping due to outdated shorelines or geolocation offsets. The [discovery that the inclusion of this extra water still led to phase unwrapping errors](https://storymaps.arcgis.com/stories/485916be1b1d46889aa436794b5633cb#ref-n-sKugOV "InSAR Water Mask Tutorial - Mt. Edgecumbe" ){target=_blank} resulted in the removal of the buffer from the dataset, effective September 27, 2022. 83 | 84 | ## Learn More 85 | 86 | Refer to the [InSAR Water Masking Tutorial](https://storymaps.arcgis.com/stories/485916be1b1d46889aa436794b5633cb "InSAR Water Masking StoryMap" ){target=_blank} for detailed descriptions of the changes to the water mask used for InSAR processing, and interactive content illustrating the impacts these changes have had on output products. 87 | 88 | [![InSAR Water Masking Tutorial](images/watermask-tutorial.png "InSAR Water Masking StoryMap Tutorial")](https://storymaps.arcgis.com/stories/485916be1b1d46889aa436794b5633cb "InSAR Water Masking StoryMap Tutorial" ){target=_blank} -------------------------------------------------------------------------------- /docs/whats_new.md: -------------------------------------------------------------------------------- 1 | # What's New 2 | 3 | Follow [@ASFHyP3](https://twitter.com/ASFHyP3 "https://twitter.com/ASFHyP3" ){target=_blank} on Twitter 4 | to keep up to date on all things HyP3! 5 | 6 | 7 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | name: hyp3-docs 2 | channels: 3 | - conda-forge 4 | - nodefaults 5 | dependencies: 6 | - 'python=3.10' 7 | - pip 8 | - hyp3_sdk=7.5.0 # also pinned in docs/using/sdk.md 9 | - asf_tools=0.8.3 # also pinned in docs/tools/asf_tools.md 10 | - pip: 11 | - -r requirements.txt 12 | -------------------------------------------------------------------------------- /macros.py: -------------------------------------------------------------------------------- 1 | import requests 2 | 3 | _CREDITS_PER_MONTH_VALUE = 10_000 4 | CREDITS_PER_MONTH = f'{_CREDITS_PER_MONTH_VALUE:,}' 5 | 6 | 7 | def define_env(env): 8 | env.macro(CREDITS_PER_MONTH, 'CREDITS_PER_MONTH') 9 | 10 | def get_content(url): 11 | response = requests.get(url) 12 | response.raise_for_status() 13 | return response.content.decode() 14 | 15 | env.macro(get_content, 'get_content') 16 | 17 | def table_indent(count=1): 18 | return ' ' * count * 8 19 | 20 | env.macro(table_indent, 'table_indent') 21 | 22 | def max_jobs_per_month(credit_cost): 23 | return f'{_CREDITS_PER_MONTH_VALUE // credit_cost:,}' 24 | 25 | env.macro(max_jobs_per_month, 'max_jobs_per_month') 26 | -------------------------------------------------------------------------------- /mkdocs.yml: -------------------------------------------------------------------------------- 1 | site_name: HyP3 2 | site_url: https://hyp3-docs.asf.alaska.edu/ 3 | site_author: ASF APD/Tools Team 4 | site_description: The Alaska Satellite Facility's Hybrid Pluggable Processing Pipeline 5 | 6 | # ASF's Google Analytics 7 | google_analytics: 8 | - UA-991100-5 9 | - search.asf.alaska.edu 10 | 11 | theme: 12 | name: asf-theme 13 | custom_dir: overrides 14 | logo: images/HyP3-graphic-only.png 15 | icon: 16 | repo: fontawesome/brands/github-alt 17 | features: 18 | # - navigation.instant # This doesn't work with the NASA Earthdata tophat; see ASFHyP3/hyp3-docs#371 19 | - navigation.footer 20 | - announce.dismiss 21 | 22 | repo_url: https://github.com/ASFHyP3 23 | repo_name: ASF HyP3 24 | edit_uri: '' 25 | 26 | extra: 27 | social: 28 | - icon: fontawesome/brands/gitter 29 | link: https://gitter.im/ASFHyP3/community 30 | 31 | markdown_extensions: 32 | - toc: 33 | permalink: true 34 | toc_depth: 4 35 | - attr_list 36 | - admonition 37 | - pymdownx.highlight 38 | - pymdownx.superfences 39 | - pymdownx.arithmatex: 40 | generic: true 41 | 42 | extra_javascript: 43 | - javascripts/mathjax.js 44 | - https://polyfill.io/v3/polyfill.min.js?features=es6 45 | - https://cdn.jsdelivr.net/npm/mathjax@3/es5/tex-mml-chtml.js 46 | 47 | nav: 48 | - Home: index.md 49 | - Using HyP3: 50 | - using.md 51 | # TODO TOOL-2787: uncomment this line: 52 | #- Requesting Access: using/requesting_access.md 53 | - Vertex: using/vertex.md 54 | - SDK: 55 | - using/sdk.md 56 | - Example Notebook: https://github.com/ASFHyP3/hyp3-sdk/blob/main/docs/sdk_example.ipynb" target="_blank 57 | - API Reference: using/sdk_api.md 58 | - API: using/api.md 59 | - Credits: using/credits.md 60 | - Subscriptions: using/subscriptions.md 61 | - Products: 62 | - products.md 63 | - RTC: 64 | - guides/rtc_product_guide.md 65 | - Product Guide: guides/rtc_product_guide.md 66 | - StoryMap: https://storymaps.arcgis.com/stories/2ead3222d2294d1fae1d11d3f98d7c35" target="_blank 67 | - Theoretical Basis: guides/rtc_atbd.md 68 | - InSAR: 69 | - guides/insar_product_guide.md 70 | - Product Guide: guides/insar_product_guide.md 71 | - StoryMap: https://storymaps.arcgis.com/stories/68a8a3253900411185ae9eb6bb5283d3" target="_blank 72 | - Burst InSAR: 73 | - guides/burst_insar_product_guide.md 74 | - Product Guide: guides/burst_insar_product_guide.md 75 | - StoryMap: https://storymaps.arcgis.com/stories/191bf1b6962c402086807390b3ce63b0" target="_blank 76 | - ARIA S1 GUNW: 77 | - guides/gunw_product_guide.md 78 | - Product Guide: guides/gunw_product_guide.md 79 | - AutoRIFT: https://its-live.jpl.nasa.gov/" target="_blank 80 | - Usage Guidelines: usage_guidelines.md 81 | - Sentinel-1 Mission: sentinel1.md 82 | - SAR Basics: 83 | - guides/introduction_to_sar.md 84 | - Introduction to SAR: guides/introduction_to_sar.md 85 | - Digital Elevation Models: dems.md 86 | - Water Masking: water_masking.md 87 | - SAR FAQ: https://asf.alaska.edu/information/sar-information/what-is-sar/#sar_faq" target="_blank 88 | - Tutorials: 89 | - tutorials.md 90 | - Using HyP3 Python SDK: https://github.com/ASFHyP3/hyp3-sdk/blob/main/docs/sdk_example.ipynb" target="_blank 91 | - Search another user's jobs: https://github.com/ASFHyP3/hyp3-sdk/blob/main/docs/search_other_user_jobs.ipynb" target="_blank 92 | - Update existing job name: https://github.com/ASFHyP3/hyp3-sdk/blob/main/docs/hyp3_job_name_change.ipynb" target="_blank 93 | - Process new granules for search parameters: tutorials/process-new-granules-for-search-parameters.md 94 | - InSAR time series with MintPy: https://github.com/ASFHyP3/hyp3-docs/blob/main/docs/tutorials/hyp3_insar_stack_for_ts_analysis.ipynb" target="_blank 95 | - InSAR burst time series with MintPy: https://github.com/ASFHyP3/hyp3-docs/blob/main/docs/tutorials/hyp3_isce2_burst_stack_for_ts_analysis.ipynb" target="_blank 96 | - Merging burst InSAR products: https://github.com/ASFHyP3/hyp3-docs/blob/develop/docs/tutorials/hyp3_isce2_burst_merge.ipynb" target="_blank 97 | - StoryMap Tutorials: https://asf-daac.maps.arcgis.com/home/index.html" target="_blank 98 | - Other Tools: 99 | - ArcGIS Toolbox: tools/arcgis_toolbox.md 100 | - ASF Tools for Python: 101 | - tools/asf_tools.md 102 | - API Reference: tools/asf_tools_api.md 103 | - Developers: 104 | - Architecture: how_it_works.md 105 | - Plugins: plugins.md 106 | - Contributing: contributing.md 107 | - Code of Conduct: https://github.com/ASFHyP3/.github/blob/main/CODE_OF_CONDUCT.md" target="_blank 108 | - What's New: whats_new.md 109 | - Contact Us: contact.md 110 | 111 | plugins: 112 | - search 113 | - section-index 114 | - macros: 115 | module_name: macros 116 | - mkdocstrings: 117 | handlers: 118 | python: 119 | inherited_members: true 120 | rendering: 121 | show_root_toc_entry: false 122 | - redirects: 123 | redirect_maps: 124 | # full link to our hosted docs needed b/c mkdocs-redirects doesn't support hash fragments 125 | # See: https://github.com/datarobot/mkdocs-redirects/issues/16 126 | getting_started.md: 'https://hyp3-docs.asf.alaska.edu/#getting-started' 127 | tutorials/mintpy.md: 'https://github.com/ASFHyP3/hyp3-docs/blob/main/docs/tutorials/hyp3_insar_stack_for_ts_analysis.ipynb' 128 | using/quota.md: 'using/credits.md' 129 | -------------------------------------------------------------------------------- /overrides/main.html: -------------------------------------------------------------------------------- 1 | {% extends "partials/main.html" %} 2 | 3 | {# Uncomment this block to enable the announcement banner: 4 | {% block announce %} 5 |
6 | ⚠️ TODO: Your announcement here.
7 | Read the full announcement. 8 |
9 | {% endblock %} 10 | #} 11 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | mkdocs==1.5.3 2 | mkdocs-material==9.4.2 3 | mkdocs-asf-theme==0.4.0 4 | mkdocs-redirects 5 | mkdocs-section-index 6 | mkdocstrings==0.27.0 # FIXME: https://github.com/ASFHyP3/hyp3-docs/issues/498 7 | mkdocstrings-python 8 | mkdocs-macros-plugin 9 | requests 10 | --------------------------------------------------------------------------------