├── .coveragerc
├── asf_search
├── health
│ ├── __init__.py
│ └── health.py
├── constants
│ ├── FLIGHT_DIRECTION.py
│ ├── INSTRUMENT.py
│ ├── PRODUCTION_CONFIGURATION.py
│ ├── RANGE_BANDWIDTH.py
│ ├── PLATFORM.py
│ ├── POLARIZATION.py
│ ├── DATASET.py
│ ├── __init__.py
│ ├── BEAMMODE.py
│ ├── INTERNAL.py
│ └── PRODUCT_TYPE.py
├── baseline
│ ├── __init__.py
│ └── stack.py
├── WKT
│ ├── __init__.py
│ └── RepairEntry.py
├── ASFSearchOptions
│ ├── __init__.py
│ ├── config.py
│ ├── validator_map.py
│ └── ASFSearchOptions.py
├── download
│ ├── __init__.py
│ ├── file_download_type.py
│ └── download.py
├── export
│ ├── __init__.py
│ ├── geojson.py
│ ├── export_translators.py
│ ├── jsonlite2.py
│ └── metalink.py
├── CMR
│ ├── __init__.py
│ ├── MissionList.py
│ └── field_map.py
├── search
│ ├── __init__.py
│ ├── granule_search.py
│ ├── product_search.py
│ ├── campaigns.py
│ ├── error_reporting.py
│ ├── collection_attributes.py
│ ├── search_count.py
│ └── baseline_search.py
├── Products
│ ├── UAVSARProduct.py
│ ├── SMAPProduct.py
│ ├── SIRCProduct.py
│ ├── __init__.py
│ ├── AIRSARProduct.py
│ ├── JERSProduct.py
│ ├── RADARSATProduct.py
│ ├── ERSProduct.py
│ ├── SEASATProduct.py
│ ├── ALOSProduct.py
│ ├── NISARProduct.py
│ ├── ALOS2Product.py
│ └── S1BurstProduct.py
├── exceptions.py
├── __init__.py
├── ASFStackableProduct.py
├── Pair.py
└── ASFSearchResults.py
├── tests
├── yml_tests
│ ├── Resources
│ │ ├── ShorelineMask26.shp
│ │ ├── Fairbanks_SLC.metalink
│ │ ├── Fairbanks_SLC.csv
│ │ ├── Fairbanks_SLC_jsonlite2.json
│ │ ├── Fairbanks_SLC_jsonlite.json
│ │ ├── Fairbanks_SLC.kml
│ │ ├── Fairbanks_SLC_Incomplete_Meta.yml
│ │ └── RADARSAT.yml
│ ├── test_known_bugs.yml
│ ├── test_authenticated
│ │ ├── test_collection_attributes_Auth.yml
│ │ └── test_ASFSubproduct_Auth.yml
│ ├── test_campaigns.yml
│ ├── test_notebooks.yml
│ ├── test_search_generator.yml
│ ├── test_download.yml
│ ├── test_ASFSubproduct.yml
│ ├── test_serialization.yml
│ ├── test_stack.yml
│ ├── test_baseline_search.yml
│ ├── test_ASFSession.yml
│ ├── test_collection_attributes.yml
│ ├── test_ASFProduct.yml
│ └── test_ASFSearchOptions.yml
├── Search
│ ├── test_collection_attributes.py
│ └── test_search_generator.py
├── CMR
│ └── test_MissionList.py
├── Serialization
│ └── test_serialization.py
├── Pair
│ └── test_Pair.py
├── download
│ └── test_download.py
├── BaselineSearch
│ ├── Stack
│ │ └── test_stack.py
│ └── test_baseline_search.py
├── ASFSearchOptions
│ └── test_ASFSearchOptions.py
├── WKT
│ └── test_validate_wkt.py
└── ASFProduct
│ ├── test_ASFSubproduct.py
│ └── test_ASFProduct.py
├── .github
├── workflows
│ ├── lint.yml
│ ├── changelog.yml
│ ├── label-prod-pr.yml
│ ├── pypi-publish.yml
│ ├── run-pytest.yml
│ ├── run-pytest-authenticated.yml
│ └── prod-request-merged.yml
├── ISSUE_TEMPLATE
│ ├── config.yml
│ ├── feature_request.md
│ └── bug_report.md
└── PULL_REQUEST_TEMPLATE.md
├── pyproject.toml
├── examples
├── 6-Outro.md
├── 0-Intro.md
└── hello_world.py
├── LICENSE
├── .gitignore
├── setup.py
└── conftest.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [run]
2 | omit = *tests* *examples*
3 |
--------------------------------------------------------------------------------
/asf_search/health/__init__.py:
--------------------------------------------------------------------------------
1 | from .health import health # noqa: F401
2 |
--------------------------------------------------------------------------------
/asf_search/constants/FLIGHT_DIRECTION.py:
--------------------------------------------------------------------------------
1 | ASCENDING = 'ASCENDING'
2 | DESCENDING = 'DESCENDING'
3 |
--------------------------------------------------------------------------------
/asf_search/baseline/__init__.py:
--------------------------------------------------------------------------------
1 | from .calc import * # noqa: F403 F401
2 | from .stack import * # noqa: F403 F401
3 |
--------------------------------------------------------------------------------
/asf_search/WKT/__init__.py:
--------------------------------------------------------------------------------
1 | from .validate_wkt import validate_wkt # noqa: F401
2 | from .RepairEntry import RepairEntry # noqa: F401
3 |
--------------------------------------------------------------------------------
/asf_search/constants/INSTRUMENT.py:
--------------------------------------------------------------------------------
1 | C_SAR = 'C-SAR'
2 | PALSAR = 'PALSAR'
3 | AVNIR_2 = 'AVNIR-2'
4 | L_SAR = 'L-SAR'
5 | S_SAR = 'S-SAR'
6 |
--------------------------------------------------------------------------------
/asf_search/ASFSearchOptions/__init__.py:
--------------------------------------------------------------------------------
1 | from .ASFSearchOptions import ASFSearchOptions # noqa F401
2 | from .validators import * # noqa F401 F403
3 |
--------------------------------------------------------------------------------
/asf_search/constants/PRODUCTION_CONFIGURATION.py:
--------------------------------------------------------------------------------
1 | # NISAR Production Configurations
2 | PRODUCTION = 'PR'
3 | URGENT_RESPONSE = 'UR'
4 | CUSTOM = 'OD'
5 |
--------------------------------------------------------------------------------
/tests/yml_tests/Resources/ShorelineMask26.shp:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/asfadmin/Discovery-asf_search/HEAD/tests/yml_tests/Resources/ShorelineMask26.shp
--------------------------------------------------------------------------------
/tests/yml_tests/test_known_bugs.yml:
--------------------------------------------------------------------------------
1 | # To exclude from the pipeline, but still keep track of
2 |
3 | tests:
4 | [] # placeholder until this file has tests
5 |
--------------------------------------------------------------------------------
/asf_search/download/__init__.py:
--------------------------------------------------------------------------------
1 | from .download import download_urls, download_url, remotezip # noqa: F401
2 | from .file_download_type import FileDownloadType # noqa: F401
3 |
--------------------------------------------------------------------------------
/asf_search/download/file_download_type.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 |
3 |
4 | class FileDownloadType(Enum):
5 | DEFAULT_FILE = 1
6 | ADDITIONAL_FILES = 2
7 | ALL_FILES = 3
8 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_authenticated/test_collection_attributes_Auth.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - Test SEASAT ASFSubproduct:
3 | scenes: ["SS_01502_STD_F2536"]
4 | opts:
5 | dataset: SEASAT
6 | expected_subclass: SEASATProduct
7 |
--------------------------------------------------------------------------------
/asf_search/WKT/RepairEntry.py:
--------------------------------------------------------------------------------
1 | class RepairEntry:
2 | def __init__(self, report_type: str, report: str) -> None:
3 | self.report_type = report_type
4 | self.report = report
5 |
6 | def __str__(self) -> str:
7 | return f'{self.report_type}: {self.report}'
8 |
--------------------------------------------------------------------------------
/.github/workflows/lint.yml:
--------------------------------------------------------------------------------
1 | on: push
2 |
3 | jobs:
4 | lint:
5 | runs-on: ubuntu-latest
6 |
7 | steps:
8 | - uses: actions/checkout@v5
9 | - uses: astral-sh/ruff-action@v3
10 | with:
11 | src: './asf_search'
12 | version-file: 'pyproject.toml'
--------------------------------------------------------------------------------
/asf_search/constants/RANGE_BANDWIDTH.py:
--------------------------------------------------------------------------------
1 | # Nisar Sensor Bandwidths
2 | ## L-SAR
3 | BW_20 = "20"
4 | BW_40 = "40"
5 | BW_20_5 = "20+5"
6 | BW_40_5 = "40+5"
7 | BW_77 = "77"
8 | BW_5 = "5"
9 | BW_5_5 = "5+5"
10 |
11 | ## S-SAR
12 | BW_10 = "10"
13 | BW_25 = "25"
14 | BW_37 = "37"
15 | BW_75 = "75"
16 |
--------------------------------------------------------------------------------
/asf_search/ASFSearchOptions/config.py:
--------------------------------------------------------------------------------
1 | from asf_search.constants import INTERNAL
2 | from asf_search.ASFSession import ASFSession
3 |
4 | config = {
5 | 'host': INTERNAL.CMR_HOST,
6 | 'provider': INTERNAL.DEFAULT_PROVIDER,
7 | 'session': ASFSession(),
8 | 'collectionAlias': True,
9 | }
10 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/config.yml:
--------------------------------------------------------------------------------
1 | contact_links:
2 | - name: Ask Questions
3 | url: https://github.com/asfadmin/Discovery-asf_search/discussions
4 | about: Feel free to ask and answer questions in GitHub's Discussions
5 |
6 | - name: Gitter Chat
7 | url: https://gitter.im/ASFDiscovery/asf_search
8 | about: Come chat with the asf_search community
--------------------------------------------------------------------------------
/asf_search/constants/PLATFORM.py:
--------------------------------------------------------------------------------
1 | SENTINEL1 = 'SENTINEL-1'
2 | SENTINEL1A = 'Sentinel-1A'
3 | SENTINEL1B = 'Sentinel-1B'
4 | SENTINEL1C = 'Sentinel-1C'
5 | SIRC = 'SIR-C'
6 | ALOS = 'ALOS'
7 | ERS = 'ERS'
8 | ERS1 = 'ERS-1'
9 | ERS2 = 'ERS-2'
10 | JERS = 'JERS-1'
11 | RADARSAT = 'RADARSAT-1'
12 | AIRSAR = 'AIRSAR'
13 | SEASAT = 'SEASAT 1'
14 | SMAP = 'SMAP'
15 | UAVSAR = 'UAVSAR'
16 | NISAR = 'NISAR'
17 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_campaigns.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - Test test_get_project_names S1-Interferogram-(Beta):
3 | cmr_ummjson: S1_Interferogram_(beta)_cmr_ummjson.yml
4 | campaigns: [
5 | "S1 I-grams (BETA) - Central CA",
6 | "S1 I-grams (BETA) - Kilauea Volcano, HI",
7 | "S1 I-grams (BETA) - Northern CA",
8 | "S1 I-grams (BETA) - Other",
9 | "S1 I-grams (BETA) - Southern CA"
10 | ]
11 |
--------------------------------------------------------------------------------
/asf_search/constants/POLARIZATION.py:
--------------------------------------------------------------------------------
1 | HH = 'HH'
2 | VV = 'VV'
3 | VV_VH = 'VV+VH'
4 | HH_HV = 'HH+HV'
5 | DUAL_HH = 'DUAL HH'
6 | DUAL_VV = 'DUAL VV'
7 | DUAL_HV = 'DUAL HV'
8 | DUAL_VH = 'DUAL VH'
9 | HH_3SCAN = 'HH 3SCAN'
10 | HH_4SCAN = 'HH 4SCAN'
11 | HH_5SCAN = 'HH 5SCAN'
12 | QUAD = 'quadrature'
13 | HH_VV = 'HH+VV'
14 | HH_HV_VH_VV = 'HH+HV+VH+VV'
15 | FULL = 'full'
16 | UNKNOWN = 'UNKNOWN'
17 | # NISAR
18 | LH_LV="LH+LV"
19 | RH_RV="RH+RV"
20 |
--------------------------------------------------------------------------------
/asf_search/export/__init__.py:
--------------------------------------------------------------------------------
1 | from .export_translators import ASFSearchResults_to_properties_list # noqa: F401
2 | from .csv import results_to_csv # noqa: F401
3 | from .metalink import results_to_metalink # noqa: F401
4 | from .kml import results_to_kml # noqa: F401
5 | from .jsonlite import results_to_jsonlite # noqa: F401
6 | from .jsonlite2 import results_to_jsonlite2 # noqa: F401
7 | from .geojson import results_to_geojson # noqa: F401
8 | from .json import results_to_json # noqa: F401
9 |
--------------------------------------------------------------------------------
/asf_search/CMR/__init__.py:
--------------------------------------------------------------------------------
1 | from .MissionList import get_campaigns # noqa: F401
2 | from .subquery import build_subqueries # noqa: F401
3 | from .translate import translate_opts # noqa: F401
4 | from .field_map import field_map # noqa: F401
5 | from .datasets import ( # noqa: F401
6 | dataset_collections, # noqa: F401
7 | collections_per_platform, # noqa: F401
8 | collections_by_processing_level, # noqa: F401
9 | get_concept_id_alias, # noqa: F401
10 | get_dataset_concept_ids, # noqa: F401
11 | )
12 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools>=42",
4 | "wheel",
5 | "setuptools_scm[toml]>=3.4"
6 | ]
7 | build-backend = "setuptools.build_meta"
8 |
9 | # Same as declaring use_scm_version in setup.py, but avoids
10 | # "UserWarning: Unknown distribution option: 'use_scm_version'"
11 | # if setuptools_scm isn't installed when setup.py is called:
12 | [tool.setuptools_scm]
13 |
14 | [tool.ruff]
15 | line-length = 100
16 | fix = true
17 |
18 | [tool.ruff.format]
19 | # Prefer single quotes over double quotes.
20 | quote-style = "single"
21 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_notebooks.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - Test Basic-Overview notebook:
3 | notebook: 1-Basic_Overview.ipynb
4 |
5 | - Test Geographic-Search notebook:
6 | notebook: 2-Geographic_Search.ipynb
7 |
8 | - Test Granule-Search notebook:
9 | notebook: 3-Granule_Search.ipynb
10 |
11 | - Test Baseline-Search notebook:
12 | notebook: 4-Baseline_Search.ipynb
13 |
14 | - Test Baseline-Search notebook:
15 | notebook: 4-Baseline_Search.ipynb
16 |
17 | - Test Baseline-Search notebook:
18 | notebook: Advanced-Custom-ASFProduct-Subclassing.ipynb
19 |
--------------------------------------------------------------------------------
/asf_search/search/__init__.py:
--------------------------------------------------------------------------------
1 | from .search import search # noqa: F401
2 | from .granule_search import granule_search # noqa: F401
3 | from .product_search import product_search # noqa: F401
4 | from .geo_search import geo_search # noqa: F401
5 | from .baseline_search import stack_from_id # noqa: F401
6 | from .campaigns import campaigns # noqa: F401
7 | from .search_count import search_count # noqa: F401
8 | from .search_generator import search_generator, preprocess_opts # noqa: F401
9 | from .collection_attributes import get_searchable_attributes # noqa: F401
10 |
--------------------------------------------------------------------------------
/.github/workflows/changelog.yml:
--------------------------------------------------------------------------------
1 | name: Update changelog on Releases
2 | on:
3 | pull_request:
4 | types:
5 | - opened
6 | - labeled
7 | - unlabeled
8 | - synchronize
9 | branches:
10 | - stable
11 |
12 | jobs:
13 | changelog-updated:
14 | runs-on: ubuntu-latest
15 | steps:
16 | - uses: actions/checkout@v1
17 |
18 | - name: Changelog check
19 | uses: Zomzog/changelog-checker@v1.3.0
20 | with:
21 | fileName: CHANGELOG.md
22 | noChangelogLabel: bumpless
23 | env:
24 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
25 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_search_generator.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - test-ASFSearch-search-valid S1:
3 | parameters:
4 | {
5 | maxResults: 1250,
6 | platform: 'Sentinel-1'
7 | }
8 |
9 | - test-ASFSearch-search-valid S1A S1B:
10 | parameters:
11 | [
12 | {
13 | maxResults: 1001,
14 | platform: 'Sentinel-1A'
15 | },
16 | {
17 | maxResults: 575,
18 | platform: 'Sentinel-1B'
19 | }
20 | ]
21 |
22 | - test-ASFSearch-search-valid S1:
23 | parameters:
24 | {
25 | maxResults: 250,
26 | platform: 'UAVSAR'
27 | }
28 |
--------------------------------------------------------------------------------
/.github/workflows/label-prod-pr.yml:
--------------------------------------------------------------------------------
1 | name: Check for required labels
2 |
3 | on:
4 | pull_request:
5 | types:
6 | - opened
7 | - reopened
8 | - labeled
9 | - unlabeled
10 | - synchronize
11 | branches:
12 | - stable
13 |
14 | jobs:
15 | check-version-label:
16 | runs-on: ubuntu-latest
17 | if: github.event.pull_request.state == 'open'
18 | steps:
19 | - name: Require Version Label
20 | uses: mheap/github-action-required-labels@v5.5.0
21 | with:
22 | mode: exactly
23 | count: 1
24 | labels: "major, minor, patch, bumpless"
25 |
--------------------------------------------------------------------------------
/tests/yml_tests/Resources/Fairbanks_SLC.metalink:
--------------------------------------------------------------------------------
1 |
2 |
3 | Alaska Satellite Facilityhttp://www.asf.alaska.edu/
4 |
5 | https://datapool.asf.alaska.edu/SLC/SB/S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081.zip6dd7f6a56ed98ba7037dfeb833217d5b4193723581
6 |
7 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_authenticated/test_ASFSubproduct_Auth.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - test-collection-attributes RSLC:
3 | params:
4 | processingLevel: RSLC
5 | expected_attributes: ['PRODUCT_TYPE', 'RANGE_BANDWIDTH_CONCAT', 'FREQUENCY_A_POLARIZATION_CONCAT', 'STACK_ID', 'FULL_FRAME', 'ASCENDING_DESCENDING', 'FREQUENCY_B_POLARIZATION_CONCAT', 'PRODUCT_VERSION', 'EPHEMERIS_ACCURACY', 'FRAME_NUMBER', 'FREQUENCIES', 'PRODUCT_TYPE_DESC', 'JOINT_OBSERVATION', 'FREQUENCY_A_POLARIZATION', 'FREQUENCY_B_POLARIZATION', 'FREQUENCY_A_RANGE_BANDWIDTH', 'FREQUENCY_B_RANGE_BANDWIDTH', 'PRODUCTION_PIPELINE', 'PATH_NUMBER', 'PROCESSING_CENTER', 'PROCESSING_LEVEL']
6 |
--------------------------------------------------------------------------------
/asf_search/constants/DATASET.py:
--------------------------------------------------------------------------------
1 | SENTINEL1 = 'SENTINEL-1'
2 | OPERA_S1 = 'OPERA-S1'
3 | OPERA_S1_CALVAL = 'OPERA-S1-CALVAL'
4 | SLC_BURST = 'SLC-BURST'
5 | ALOS_PALSAR = 'ALOS PALSAR'
6 | ALOS_AVNIR_2 = 'ALOS AVNIR-2'
7 | ALOS_2 = 'ALOS-2'
8 | SIRC = 'SIR-C'
9 | ARIA_S1_GUNW = 'ARIA S1 GUNW'
10 | SMAP = 'SMAP'
11 | UAVSAR = 'UAVSAR'
12 | RADARSAT_1 = 'RADARSAT-1'
13 | ERS = 'ERS'
14 | JERS_1 = 'JERS-1'
15 | AIRSAR = 'AIRSAR'
16 | SEASAT = 'SEASAT'
17 | NISAR = 'NISAR'
18 | """NISAR provides L and S-band SAR data to measure Earth's changing ecosystems,
19 | dynamic surfaces, and ice masses with 12-day regularity
20 | on ascending and descending passes."""
21 |
--------------------------------------------------------------------------------
/tests/Search/test_collection_attributes.py:
--------------------------------------------------------------------------------
1 | from asf_search.ASFSession import ASFSession
2 | from asf_search.search.collection_attributes import get_searchable_attributes
3 | import pytest
4 |
5 | def run_test_collection_attributes(params: dict, expected_attributes: list[str], session: ASFSession, expect_failure: bool) -> None:
6 | if expect_failure:
7 | with pytest.raises(ValueError):
8 | actual_attributes = get_searchable_attributes(**params, session=session)
9 | else:
10 | actual_attributes = get_searchable_attributes(**params, session=session)
11 | assert sorted(expected_attributes) == sorted(actual_attributes.keys())
12 |
--------------------------------------------------------------------------------
/asf_search/constants/__init__.py:
--------------------------------------------------------------------------------
1 | """Various constants to be used in search and related functions,
2 | provided as a convenience to help ensure sensible values."""
3 |
4 | from .BEAMMODE import * # noqa: F403 F401
5 | from .FLIGHT_DIRECTION import * # noqa: F403 F401
6 | from .INSTRUMENT import * # noqa: F403 F401
7 | from .PLATFORM import * # noqa: F403 F401
8 | from .POLARIZATION import * # noqa: F403 F401
9 | from .PRODUCT_TYPE import * # noqa: F403 F401
10 | from .INTERNAL import * # noqa: F403 F401
11 | from .DATASET import * # noqa: F403 F401
12 | from .RANGE_BANDWIDTH import * # noqa: F403 F401
13 | from .PRODUCTION_CONFIGURATION import * # noqa: F403 F401
14 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/feature_request.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Feature request
3 | about: Suggest an idea for this project
4 | title: "[Feature]"
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Is your feature request related to a problem? Please describe.**
11 | A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12 |
13 | **Describe the solution you'd like**
14 | A clear and concise description of what you want to happen.
15 |
16 | **Describe alternatives you've considered**
17 | A clear and concise description of any alternative solutions or features you've considered.
18 |
19 | **Additional context**
20 | Add any other context or screenshots about the feature request here.
21 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_download.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - Test test-download-url url error:
3 | url: urlError
4 | path: " "
5 | filename: "error"
6 |
7 | - Test test-download-url path error:
8 | url: pathError
9 | path: " "
10 | filename: "error"
11 |
12 | - Test test-download-url filename warning:
13 | url: filenameError
14 | path: " "
15 | filename: "error"
16 |
17 | - Test test-download-url url regular:
18 | url: "mock_url"
19 | path: "./"
20 | filename: "Regular product"
21 |
22 | - Test test-download-url url burst:
23 | url: "https://sentinel1-burst.asf.alaska.edu/S1A_EW_SLC__1SDH_20221221T143159_20221221T143302_046431_059001_547B/EW5/HH/19.tiff"
24 | path: "./"
25 | filename: "BURST"
--------------------------------------------------------------------------------
/asf_search/health/health.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 | import requests
3 | import json
4 |
5 | import asf_search.constants
6 |
7 |
8 | def health(host: str = None) -> Dict:
9 | """
10 | Checks basic connectivity to and health of the ASF SearchAPI.
11 |
12 | Parameters
13 | ----------
14 | param host:
15 | SearchAPI host, defaults to Production SearchAPI.
16 | This option is intended for dev/test purposes.
17 |
18 | Returns
19 | -------
20 | Current configuration and status of subsystems as a dict
21 | """
22 |
23 | if host is None:
24 | host = asf_search.INTERNAL.CMR_HOST
25 | return json.loads(requests.get(f'https://{host}{asf_search.INTERNAL.CMR_HEALTH_PATH}').text)
26 |
--------------------------------------------------------------------------------
/asf_search/Products/UAVSARProduct.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 | from asf_search import ASFProduct, ASFSession
3 |
4 |
5 | class UAVSARProduct(ASFProduct):
6 | """
7 | ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/uavsar/
8 | """
9 |
10 | _base_properties = {
11 | **ASFProduct._base_properties,
12 | 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]},
13 | 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]},
14 | 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]},
15 | }
16 |
17 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
18 | super().__init__(args, session)
19 |
--------------------------------------------------------------------------------
/asf_search/constants/BEAMMODE.py:
--------------------------------------------------------------------------------
1 | IW = 'IW'
2 | EW = 'EW'
3 | S1 = 'S1'
4 | S2 = 'S2'
5 | S3 = 'S3'
6 | S4 = 'S4'
7 | S5 = 'S5'
8 | S6 = 'S6'
9 | WV = 'WV'
10 | DSN = 'DSN'
11 | FBS = 'FBS'
12 | FBD = 'FBD'
13 | PLR = 'PLR'
14 | WB1 = 'WB1'
15 | WB2 = 'WB2'
16 | OBS = 'OBS'
17 | SIRC11 = '11'
18 | SIRC13 = '13'
19 | SIRC16 = '16'
20 | SIRC20 = '20'
21 | SLC = 'SLC'
22 | STD = 'STD'
23 | POL = 'POL'
24 | RPI = 'RPI'
25 | EH3 = 'EH3'
26 | EH4 = 'EH4'
27 | EH6 = 'EH6'
28 | EL1 = 'EL1'
29 | FN1 = 'FN1'
30 | FN2 = 'FN2'
31 | FN3 = 'FN3'
32 | FN4 = 'FN4'
33 | FN5 = 'FN5'
34 | SNA = 'SNA'
35 | SNB = 'SNB'
36 | ST1 = 'ST1'
37 | ST2 = 'ST2'
38 | ST3 = 'ST3'
39 | ST4 = 'ST4'
40 | ST5 = 'ST5'
41 | ST6 = 'ST6'
42 | ST7 = 'ST7'
43 | SWA = 'SWA'
44 | SWB = 'SWB'
45 | WD1 = 'WD1'
46 | WD2 = 'WD2'
47 | WD3 = 'WD3'
48 |
--------------------------------------------------------------------------------
/asf_search/Products/SMAPProduct.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 | from asf_search import ASFProduct, ASFSession
3 |
4 |
5 | class SMAPProduct(ASFProduct):
6 | """
7 | ASF Dataset Documentation Page:
8 | https://asf.alaska.edu/data-sets/sar-data-sets/soil-moisture-active-passive-smap-mission/
9 | """
10 |
11 | _base_properties = {
12 | **ASFProduct._base_properties,
13 | 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]},
14 | 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]},
15 | 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]},
16 | }
17 |
18 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
19 | super().__init__(args, session)
20 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_ASFSubproduct.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - Test OPERA-S1 ASFSubproduct:
3 | scenes:
4 | [
5 | "OPERA_L2_RTC-S1_T160-342208-IW3_20221221T161230Z_20250302T093113Z_S1A_30_v1.0",
6 | "OPERA_L2_CSLC-S1_T160-342208-IW3_20221127T161232Z_20240801T232256Z_S1A_VV_v1.1",
7 | "OPERA_L2_RTC-S1-STATIC_T160-342208-IW3_20140403_S1B_30_v1.0",
8 | "OPERA_L2_CSLC-S1-STATIC_T160-342208-IW3_20140403_S1B_v1.0",
9 | "OPERA_L3_DISP-S1_IW_F42776_VV_20180504T161139Z_20180516T161139Z_v1.0_20250829T201146Z",
10 | "OPERA_L4_TROPO-ZENITH_20250930T180000Z_20251003T000713Z_HRES_v1.0",
11 | ]
12 | expected_subclass: OPERAS1Product
13 |
14 | - Test S1Burst ASFSubproduct:
15 | scenes: ["S1_055219_EW1_20250418T163543_HH_1D57-BURST"]
16 | expected_subclass: S1BurstProduct
17 |
--------------------------------------------------------------------------------
/asf_search/Products/SIRCProduct.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 | from asf_search import ASFProduct, ASFSession
3 |
4 |
5 | class SIRCProduct(ASFProduct):
6 | """
7 | Dataset Documentation Page: https://eospso.nasa.gov/missions/spaceborne-imaging-radar-c
8 | """
9 |
10 | _base_properties = {
11 | **ASFProduct._base_properties,
12 | 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]},
13 | 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]},
14 | 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']},
15 | 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]},
16 | }
17 |
18 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
19 | super().__init__(args, session)
20 |
--------------------------------------------------------------------------------
/asf_search/constants/INTERNAL.py:
--------------------------------------------------------------------------------
1 | ASF_AUTH_HOST = 'cumulus.asf.alaska.edu'
2 |
3 | CMR_HOST = 'cmr.earthdata.nasa.gov'
4 | CMR_HOST_UAT = 'cmr.uat.earthdata.nasa.gov'
5 | CMR_TIMEOUT = 30
6 | CMR_FORMAT_EXT = 'umm_json'
7 | CMR_GRANULE_PATH = f'/search/granules.{CMR_FORMAT_EXT}'
8 | CMR_COLLECTIONS = '/search/collections'
9 | CMR_COLLECTIONS_PATH = f'{CMR_COLLECTIONS}.{CMR_FORMAT_EXT}'
10 | CMR_HEALTH_PATH = '/search/health'
11 | CMR_PAGE_SIZE = 250
12 | EDL_HOST = 'urs.earthdata.nasa.gov'
13 | EDL_HOST_UAT = f'uat.{EDL_HOST}'
14 |
15 | EDL_CLIENT_ID = 'BO_n7nTIlMljdvU6kRRB3g'
16 |
17 | DEFAULT_PROVIDER = 'ASF'
18 |
19 | AUTH_DOMAINS = ['asf.alaska.edu', 'earthdata.nasa.gov'] #, 'earthdatacloud.nasa.gov']
20 | AUTH_COOKIES = ['urs_user_already_logged', 'uat_urs_user_already_logged', 'asf-urs']
21 |
22 | ERROR_REPORTING_ENDPOINT = 'search-error-report.asf.alaska.edu'
23 |
--------------------------------------------------------------------------------
/asf_search/Products/__init__.py:
--------------------------------------------------------------------------------
1 | from .S1Product import S1Product # noqa: F401
2 | from .ALOSProduct import ALOSProduct # noqa: F401
3 | from .RADARSATProduct import RADARSATProduct # noqa: F401
4 | from .AIRSARProduct import AIRSARProduct # noqa: F401
5 | from .ERSProduct import ERSProduct # noqa: F401
6 | from .JERSProduct import JERSProduct # noqa: F401
7 | from .UAVSARProduct import UAVSARProduct # noqa: F401
8 | from .SIRCProduct import SIRCProduct # noqa: F401
9 | from .SEASATProduct import SEASATProduct # noqa: F401
10 | from .SMAPProduct import SMAPProduct # noqa: F401
11 | from .S1BurstProduct import S1BurstProduct # noqa: F401
12 | from .OPERAS1Product import OPERAS1Product # noqa: F401
13 | from .ARIAS1GUNWProduct import ARIAS1GUNWProduct # noqa: F401
14 | from .NISARProduct import NISARProduct # noqa: F401
15 | from .ALOS2Product import ALOS2Product # noqa: F401
16 |
--------------------------------------------------------------------------------
/examples/6-Outro.md:
--------------------------------------------------------------------------------
1 | # Thank You!
2 | ***
3 |
4 | ## Where to Go Next
5 | `asf_search` is available through:
6 | - [PyPi](https://pypi.org/project/asf-search/)
7 | - [Conda](https://anaconda.org/conda-forge/asf_search)
8 | - [Github](https://github.com/asfadmin/Discovery-asf_search)
9 | - [The notebooks used for this presentation](https://github.com/asfadmin/Discovery-asf_search/tree/master/examples)
10 | - [Documentation](https://docs.asf.alaska.edu/)
11 |
12 | Contact ASF at:
13 | - [ASF Website](https://asf.alaska.edu)
14 | - [Contact ASF](https://asf.alaska.edu/contact/)
15 |
16 | Contact the ASF Discovery team directly:
17 | - [Gitter](https://gitter.im/ASFDiscovery/)
18 |
19 | ***
20 | ## The ASF Discovery Team
21 |
22 | Andrew Anderson, Tyler Chase, Olena Ellis, Kim Fairbanks, Christy Fleming, Gregory Short, Cameron Showalter, William Horn
23 |
24 | ***
25 | [Back to Start](./0-Intro.md)
--------------------------------------------------------------------------------
/.github/workflows/pypi-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflow will upload a Python Package using Twine when a release is created
2 | # For more information see: https://help.github.com/en/actions/language-and-framework-guides/using-python-with-github-actions#publishing-to-package-registries
3 |
4 | name: Upload Python Package
5 |
6 | on:
7 | release:
8 | types: [created]
9 | branches:
10 | - stable
11 |
12 | jobs:
13 |
14 | DeployToPypi:
15 | runs-on: ubuntu-latest
16 | steps:
17 | - uses: actions/checkout@v5
18 |
19 | - name: Install dependencies
20 | run: python3 -m pip install --upgrade pip build
21 |
22 | - name: Build package
23 | run: python3 -m build .
24 |
25 | - name: Publish package
26 | uses: pypa/gh-action-pypi-publish@bea5cda687c2b79989126d589ef4411bedce0195
27 | with:
28 | user: __token__
29 | password: ${{ secrets.PYPI_TOKEN }}
30 |
--------------------------------------------------------------------------------
/asf_search/CMR/MissionList.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 | from asf_search.exceptions import CMRError
3 | from asf_search.constants.INTERNAL import CMR_HOST, CMR_COLLECTIONS_PATH
4 |
5 | import requests
6 |
7 |
8 | def get_campaigns(data) -> Dict:
9 | """Queries CMR Collections endpoint for
10 | collections associated with the given platform
11 |
12 | :param data: a dictionary with required keys:
13 | 'include_facets', 'provider', 'platform[]' and optional key: 'instrument[]'
14 |
15 | :return: Dictionary containing CMR umm_json response
16 | """
17 | response = requests.post(f'https://{CMR_HOST}{CMR_COLLECTIONS_PATH}', data=data)
18 | if response.status_code != 200:
19 | raise CMRError(f'CMR_ERROR {response.status_code}: {response.text}')
20 |
21 | try:
22 | data = response.json()
23 | except Exception as e:
24 | raise CMRError(f'CMR_ERROR: Error parsing JSON from CMR: {e}')
25 |
26 | return data
27 |
--------------------------------------------------------------------------------
/.github/workflows/run-pytest.yml:
--------------------------------------------------------------------------------
1 | name: tests
2 |
3 | on: push
4 | jobs:
5 | run-tests:
6 | runs-on: ubuntu-latest
7 | steps:
8 | - uses: actions/checkout@v5
9 | - uses: actions/setup-python@v6
10 | with:
11 | python-version: '3.10'
12 | - name: Install Dependencies
13 | run: |
14 | python3 -m pip install --upgrade pip
15 | python3 -m pip install .[extras,test,asf-enumeration,coherence]
16 |
17 | - name: Run Tests
18 | run: python3 -m pytest -n auto --cov=asf_search --cov-report=xml --dont-run-file test_known_bugs --ignore=tests/yml_tests/test_authenticated .
19 |
20 | - name: Upload coverage to Codecov
21 | uses: codecov/codecov-action@v5
22 | with:
23 | token: ${{ secrets.CODECOV_TOKEN }}
24 | fail_ci_if_error: false
25 | files: ./coverage.xml
26 | flags: unittests
27 | name: asf_admin pytest
28 | verbose: true
29 |
--------------------------------------------------------------------------------
/asf_search/Products/AIRSARProduct.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 | from asf_search import ASFSession, ASFProduct
3 | from asf_search.CMR.translate import try_parse_int
4 |
5 |
6 | class AIRSARProduct(ASFProduct):
7 | """
8 | ASF Dataset Overview Page: https://asf.alaska.edu/data-sets/sar-data-sets/airsar/
9 | """
10 |
11 | _base_properties = {
12 | **ASFProduct._base_properties,
13 | 'frameNumber': {
14 | 'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0],
15 | 'cast': try_parse_int,
16 | },
17 | 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]},
18 | 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]},
19 | 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]},
20 | }
21 |
22 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
23 | super().__init__(args, session)
24 |
--------------------------------------------------------------------------------
/tests/CMR/test_MissionList.py:
--------------------------------------------------------------------------------
1 | from asf_search.CMR.MissionList import get_campaigns
2 | from asf_search.search.campaigns import _get_project_names
3 | import pytest
4 | import requests_mock
5 |
6 | from asf_search.constants.INTERNAL import CMR_COLLECTIONS_PATH, CMR_HOST
7 | from asf_search.exceptions import CMRError
8 |
9 |
10 | def test_getMissions_error():
11 | with requests_mock.Mocker() as m:
12 | m.register_uri(
13 | 'POST',
14 | 'https://' + CMR_HOST + CMR_COLLECTIONS_PATH,
15 | status_code=300,
16 | json={'error': {'report': ''}},
17 | )
18 |
19 | with pytest.raises(CMRError):
20 | get_campaigns({})
21 |
22 |
23 | def test_getMissions_error_parsing():
24 | with requests_mock.Mocker() as m:
25 | m.post('https://' + CMR_HOST + CMR_COLLECTIONS_PATH)
26 |
27 | with pytest.raises(CMRError):
28 | get_campaigns({})
29 |
30 |
31 | def run_test_get_project_names(cmr_ummjson, campaigns):
32 | assert _get_project_names(cmr_ummjson) == campaigns
33 |
--------------------------------------------------------------------------------
/asf_search/search/granule_search.py:
--------------------------------------------------------------------------------
1 | from typing import Sequence
2 | from copy import copy
3 |
4 | from asf_search.search import search
5 | from asf_search.ASFSearchOptions import ASFSearchOptions
6 | from asf_search.ASFSearchResults import ASFSearchResults
7 |
8 |
9 | def granule_search(granule_list: Sequence[str], opts: ASFSearchOptions = None) -> ASFSearchResults:
10 | """
11 | Performs a granule name search using the ASF SearchAPI
12 |
13 | Parameters
14 | ----------
15 | granule_list:
16 | List of specific granules.
17 | Search results may include several products per granule name.
18 | opts:
19 | An ASFSearchOptions object describing the search parameters to be used.
20 | Search parameters specified outside this object will override in event of a conflict.
21 |
22 | Returns
23 | -------
24 | `asf_search.ASFSearchResults` (list of search results of subclass ASFProduct)
25 | """
26 |
27 | opts = ASFSearchOptions() if opts is None else copy(opts)
28 |
29 | opts.merge_args(granule_list=granule_list)
30 |
31 | return search(opts=opts)
32 |
--------------------------------------------------------------------------------
/asf_search/search/product_search.py:
--------------------------------------------------------------------------------
1 | from typing import Sequence
2 | from copy import copy
3 |
4 | from asf_search.search import search
5 | from asf_search.ASFSearchOptions import ASFSearchOptions
6 | from asf_search.ASFSearchResults import ASFSearchResults
7 |
8 |
9 | def product_search(product_list: Sequence[str], opts: ASFSearchOptions = None) -> ASFSearchResults:
10 | """
11 | Performs a product ID search using the ASF SearchAPI
12 |
13 | Parameters
14 | ----------
15 | :param product_list:
16 | List of specific products.
17 | Guaranteed to be at most one product per product name.
18 | opts:
19 | An ASFSearchOptions object describing the search parameters to be used.
20 | Search parameters specified outside this object will override in event of a conflict.
21 |
22 | Returns
23 | -------
24 | `asf_search.ASFSearchResults` (list of search results of subclass ASFProduct)
25 | """
26 |
27 | opts = ASFSearchOptions() if opts is None else copy(opts)
28 |
29 | opts.merge_args(product_list=product_list)
30 |
31 | return search(opts=opts)
32 |
--------------------------------------------------------------------------------
/.github/workflows/run-pytest-authenticated.yml:
--------------------------------------------------------------------------------
1 | name: authenticated tests
2 | permissions:
3 | contents: read
4 | # For tests that require authenticated searches
5 |
6 | on:
7 | push:
8 | branches:
9 | - master
10 |
11 |
12 | jobs:
13 | run-tests:
14 | runs-on: ubuntu-latest
15 | environment: pre-release
16 | steps:
17 | - uses: actions/checkout@v5
18 | - uses: actions/setup-python@v6
19 |
20 | with:
21 | python-version: '3.10'
22 | - name: Install Dependencies
23 | run: |
24 | python3 -m pip install --upgrade pip
25 | python3 -m pip install .[extras,test,asf-enumeration,coherence]
26 |
27 | - name: Run Tests
28 | env:
29 | EDL_TOKEN: ${{ secrets.EDL_TOKEN }}
30 | run: python3 -m pytest --should_auth_session TRUE
31 |
32 | # - name: Upload coverage to Codecov
33 | # uses: codecov/codecov-action@v5
34 | # with:
35 | # token: ${{ secrets.CODECOV_TOKEN }}
36 | # fail_ci_if_error: false
37 | # files: ./coverage.xml
38 | # flags: unittests
39 | # name: asf_admin pytest
40 | # verbose: true
41 |
--------------------------------------------------------------------------------
/tests/yml_tests/Resources/Fairbanks_SLC.csv:
--------------------------------------------------------------------------------
1 | "Granule Name","Platform","Sensor","Beam Mode","Beam Mode Description","Orbit","Path Number","Frame Number","Acquisition Date","Processing Date","Processing Level","Start Time","End Time","Center Lat","Center Lon","Near Start Lat","Near Start Lon","Far Start Lat","Far Start Lon","Near End Lat","Near End Lon","Far End Lat","Far End Lon","Faraday Rotation","Ascending or Descending?","URL","Size (MB)","Off Nadir Angle","Stack Size","Doppler","GroupID","Pointing Angle"
2 | "S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081","Sentinel-1B","C-SAR","IW","Interferometric Wide. 250 km swath, 5 m x 20 m spatial resolution and burst synchronization for interferometry. IW is considered to be the standard mode over land masses.","24970","94","210","2021-01-02T03:20:58.000000","2021-01-02T03:20:31.000000","SLC","2021-01-02T03:20:31.000000","2021-01-02T03:20:58.000000","64.9861","-147.0909","63.942123","-149.246063","64.386414","-144.136368","65.53125","-150.172562","65.99025","-144.751495","None","ASCENDING","https://datapool.asf.alaska.edu/SLC/SB/S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081.zip","3999.446469306946","None","None","0","S1B_IWDV_0209_0216_024970_094",""
3 |
--------------------------------------------------------------------------------
/tests/yml_tests/Resources/Fairbanks_SLC_jsonlite2.json:
--------------------------------------------------------------------------------
1 | {
2 | "results": [
3 | {
4 | "b": [],
5 | "bm": "IW",
6 | "d": "Sentinel-1B",
7 | "du": "https://datapool.asf.alaska.edu/SLC/SB/{gn}.zip",
8 | "f": 210,
9 | "fd": "ASCENDING",
10 | "fl": null,
11 | "fn": "{gn}.zip",
12 | "fr": null,
13 | "gid": "S1B_IWDV_0209_0216_024970_094",
14 | "gn": "S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081",
15 | "i": "C-SAR",
16 | "in": true,
17 | "mn": null,
18 | "o": ["24970"],
19 | "on": null,
20 | "p": 94,
21 | "pa": null,
22 | "pid": "{gn}-SLC",
23 | "po": "VV+VH",
24 | "pt": "SLC",
25 | "ptd": "L1 Single Look Complex (SLC)",
26 | "s": 3999.446469306946,
27 | "ss": null,
28 | "st": "2021-01-02T03:20:31.000000",
29 | "stp": "2021-01-02T03:20:58.000000",
30 | "t": null,
31 | "w": "POLYGON((-144.751495 65.990250,-144.136368 64.386414,-149.246063 63.942123,-150.172562 65.531250,-144.751495 65.990250))",
32 | "wu": "POLYGON ((-144.751495 65.990250, -144.136368 64.386414, -149.246063 63.942123, -150.172562 65.531250, -144.751495 65.990250))"
33 | }
34 | ]
35 | }
36 |
--------------------------------------------------------------------------------
/asf_search/exceptions.py:
--------------------------------------------------------------------------------
1 | class ASFError(Exception):
2 | """Base ASF Exception, not intended for direct use"""
3 |
4 |
5 | class ASFSearchError(ASFError):
6 | """Base search-related Exception"""
7 |
8 |
9 | class ASFSearch4xxError(ASFSearchError):
10 | """Raise when CMR returns a 4xx error"""
11 |
12 |
13 | class ASFSearch5xxError(ASFSearchError):
14 | """Raise when CMR returns a 5xx error"""
15 |
16 |
17 | class ASFBaselineError(ASFSearchError):
18 | """Raise when baseline related errors occur"""
19 |
20 |
21 | class ASFDownloadError(ASFError):
22 | """Base download-related Exception"""
23 |
24 |
25 | class ASFAuthenticationError(ASFError):
26 | """Base download-related Exception"""
27 |
28 |
29 | class ASFWKTError(ASFError):
30 | """Raise when wkt related errors occur"""
31 |
32 |
33 | class CoherenceEstimationError(ASFError):
34 | """Raise if coherence estimation is requested for a Pair with a temporal baseline > 48 days"""
35 |
36 |
37 | class CMRError(Exception):
38 | """Base CMR Exception"""
39 |
40 |
41 | class CMRConceptIDError(CMRError):
42 | """Raise when CMR encounters a concept-id error"""
43 |
44 |
45 | class CMRIncompleteError(CMRError):
46 | """Raise when CMR returns an incomplete page of results"""
47 |
--------------------------------------------------------------------------------
/asf_search/Products/JERSProduct.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Union
2 | from asf_search import ASFSession, ASFStackableProduct
3 | from asf_search.constants import PRODUCT_TYPE
4 |
5 |
6 | class JERSProduct(ASFStackableProduct):
7 | """
8 | ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/jers-1/
9 | """
10 |
11 | _base_properties = {
12 | **ASFStackableProduct._base_properties,
13 | 'browse': {'path': ['RelatedUrls', ('Type', [('GET RELATED VISUALIZATION', 'URL')])]},
14 | 'groupID': {'path': ['AdditionalAttributes', ('Name', 'GROUP_ID'), 'Values', 0]},
15 | 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]},
16 | 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]},
17 | 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]},
18 | }
19 |
20 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
21 | super().__init__(args, session)
22 |
23 | @staticmethod
24 | def get_default_baseline_product_type() -> Union[str, None]:
25 | """
26 | Returns the product type to search for when building a baseline stack.
27 | """
28 | return PRODUCT_TYPE.L0
29 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_serialization.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - test serialization single SLC ASFProduct Empty:
3 | product: {meta: {}, umm: {}, properties: {}, geometry: {}}
4 |
5 | - test serialization single SLC ASFProduct:
6 | product: Fairbanks_SLC.yml
7 |
8 | - test serialization single SLC ASFProduct missing-state-vectors:
9 | product: Fairbanks_SLC_no_stateVectors.yml
10 |
11 | - test serialization single L1 ASFProduct:
12 | product: Fairbanks_L1.yml
13 |
14 | - test serialization single Alos ASFProduct:
15 | product: Alos_response.yml
16 |
17 | - test serialization ASFSearchResults Empty:
18 | results: []
19 | searchOpts: null
20 |
21 | - test serialization ASFSearchResults ers stack:
22 | results: Fairbanks_ers_stack.yml
23 | searchOpts:
24 | maxResults: 3
25 | platform: ["ERS-1", "ERS-2"]
26 |
27 | - test serialization ASFSearchResults incomplete s1 stack:
28 | results: Fairbanks_S1_stack incomplete.yml
29 | searchOpts:
30 | maxResults: 3
31 | platform: ["SENTINEL-1A", "SENTINEL-1B"]
32 |
33 | - test serialization ASFSearchResults non-default config-opts:
34 | results: Fairbanks_ers_stack preprocessed.yml
35 | searchOpts:
36 | session: null
37 | host: cmr.uat.earthdata.nasa.gov
38 | provider: Not-ASF
39 |
--------------------------------------------------------------------------------
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: "[Bug]"
5 | labels: ''
6 | assignees: ''
7 |
8 | ---
9 |
10 | **Describe the bug**
11 | A clear and concise description of what the bug is.
12 |
13 | **To Reproduce**
14 | Provide a minimal python snippet to reproduce the behavior.
15 |
16 | \*Reminder: If authentication is required **do not** leave any sensitive credentials in the snippet. Use the `getpass` module https://docs.python.org/3/library/getpass.html
17 |
18 | Example snippet:
19 | ``` python
20 | import asf_search as asf
21 | from getpass import getpass
22 |
23 | granule_list= ['S1A_IW_GRDH_1SDV_20250922T162824_20250922T162849_061103_079DCA_9515']
24 | response = asf.search(granule_list=granule_list)
25 |
26 | session = asf.ASFSession()
27 | session.auth_with_token(getpass('Earth Data Login Token'))
28 |
29 | # The line below raises an error for some reason
30 | response[0].download('./', session=session)
31 | ```
32 |
33 | **Expected behavior**
34 | A clear and concise description of what you expected to happen.
35 |
36 | **Screenshots**
37 | If applicable, add screenshots to help explain your problem.
38 |
39 | **Desktop (please complete the following information):**
40 | - OS: [e.g. Ubuntu 20.04]
41 | - Python Version [e.g. python3.11]
42 | - Pip Environment ['python3 -m pip freeze']
43 |
44 | **Additional context**
45 | Add any other context about the problem here.
46 |
--------------------------------------------------------------------------------
/.github/PULL_REQUEST_TEMPLATE.md:
--------------------------------------------------------------------------------
1 | # Merge Requirements:
2 | The following requirements must be met for your pull request to be considered for review & merging. Until these requirements are met please mark the pull request as a draft.
3 |
4 | ## Purpose
5 | Why is this pull request necessary? Provide a reference to a related issue in this repository that your pull request addresses (if applicable).
6 |
7 | ## Description
8 | A brief description of the changes proposed in the pull request. If there are any changes to packaging requirements please list them.
9 |
10 | ## Snippet
11 | If the pull request provides a new feature, provide an example demonstrating the use-case(s) for this pull request (If applicable).
12 |
13 | Example:
14 | ``` python
15 | import asf_search as asf
16 |
17 | response = asf.search(dataset=asf.DATASET.SENTINEL1, maxResults=250)
18 |
19 | useful_data = response.new_feature()
20 | ```
21 |
22 | ## Error/Warning/Regression Free
23 | Your code runs without any unhandled errors, warnings, or regressions
24 |
25 | ## Unit Tests
26 | You have added unit tests to the test suite see the [README Testing section](https://github.com/asfadmin/Discovery-asf_search?tab=readme-ov-file#testing) for an overview on adding tests to the test suite.
27 |
28 | ## Target Merge Branch
29 | Your pull request targets the `master` branch
30 |
31 |
32 | ***
33 |
34 | ### Checklist
35 | - [ ] Purpose
36 | - [ ] Description
37 | - [ ] Snippet
38 | - [ ] Error/Warning/Regression Free
39 | - [ ] Unit Tests
40 | - [ ] Target Merge Branch
--------------------------------------------------------------------------------
/asf_search/Products/RADARSATProduct.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Union
2 | from asf_search import ASFSession, ASFStackableProduct
3 | from asf_search.CMR.translate import try_parse_float, try_parse_int
4 | from asf_search.constants import PRODUCT_TYPE
5 |
6 |
7 | class RADARSATProduct(ASFStackableProduct):
8 | """
9 | ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/radarsat-1/
10 | """
11 |
12 | _base_properties = {
13 | **ASFStackableProduct._base_properties,
14 | 'faradayRotation': {'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0], 'cast': try_parse_float},
15 | 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]},
16 | 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]},
17 | 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]},
18 | 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, #Sentinel and ALOS product alt for frameNumber (ESA_FRAME)
19 | 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0], 'cast': try_parse_int},
20 | }
21 |
22 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
23 | super().__init__(args, session)
24 |
25 | @staticmethod
26 | def get_default_baseline_product_type() -> Union[str, None]:
27 | """
28 | Returns the product type to search for when building a baseline stack.
29 | """
30 | return PRODUCT_TYPE.L0
31 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2021, Alaska Satellite Facility
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/tests/yml_tests/Resources/Fairbanks_SLC_jsonlite.json:
--------------------------------------------------------------------------------
1 | {
2 | "results": [
3 | {
4 | "beamMode": "IW",
5 | "browse": [],
6 | "canInSAR": true,
7 | "dataset": "Sentinel-1B",
8 | "downloadUrl": "https://datapool.asf.alaska.edu/SLC/SB/S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081.zip",
9 | "faradayRotation": null,
10 | "fileName": "S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081.zip",
11 | "flightDirection": "ASCENDING",
12 | "flightLine": null,
13 | "frame": 210,
14 | "granuleName": "S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081",
15 | "groupID": "S1B_IWDV_0209_0216_024970_094",
16 | "instrument": "C-SAR",
17 | "missionName": null,
18 | "offNadirAngle": null,
19 | "orbit": [
20 | "24970"
21 | ],
22 | "path": 94,
23 | "pointingAngle": null,
24 | "polarization": "VV+VH",
25 | "productID": "S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081-SLC",
26 | "productType": "SLC",
27 | "productTypeDisplay": "L1 Single Look Complex (SLC)",
28 | "sizeMB": 3999.446469306946,
29 | "stackSize": null,
30 | "startTime": "2021-01-02T03:20:31.000000",
31 | "stopTime": "2021-01-02T03:20:58.000000",
32 | "thumb": null,
33 | "wkt": "POLYGON((-144.751495 65.990250,-144.136368 64.386414,-149.246063 63.942123,-150.172562 65.531250,-144.751495 65.990250))",
34 | "wkt_unwrapped": "POLYGON ((-144.751495 65.990250, -144.136368 64.386414, -149.246063 63.942123, -150.172562 65.531250, -144.751495 65.990250))"
35 | }
36 | ]
37 | }
--------------------------------------------------------------------------------
/asf_search/Products/ERSProduct.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Union
2 | from asf_search import ASFSession, ASFStackableProduct
3 | from asf_search.CMR.translate import try_round_float
4 | from asf_search.constants import PRODUCT_TYPE
5 |
6 |
7 | class ERSProduct(ASFStackableProduct):
8 | """
9 | Used for ERS-1 and ERS-2 products
10 |
11 | ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/
12 | ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/
13 | """
14 |
15 | _base_properties = {
16 | **ASFStackableProduct._base_properties,
17 | 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0]},
18 | 'bytes': {
19 | 'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0],
20 | 'cast': try_round_float,
21 | },
22 | 'esaFrame': {'path': ['AdditionalAttributes', ('Name', 'CENTER_ESA_FRAME'), 'Values', 0]},
23 | 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]},
24 | 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]},
25 | 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]},
26 | }
27 |
28 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
29 | super().__init__(args, session)
30 |
31 | @staticmethod
32 | def get_default_baseline_product_type() -> Union[str, None]:
33 | """
34 | Returns the product type to search for when building a baseline stack.
35 | """
36 | return PRODUCT_TYPE.L0
37 |
--------------------------------------------------------------------------------
/asf_search/export/geojson.py:
--------------------------------------------------------------------------------
1 | import inspect
2 | import json
3 | from types import GeneratorType
4 |
5 | from asf_search import ASF_LOGGER
6 |
7 |
8 | def results_to_geojson(results):
9 | ASF_LOGGER.info('started translating results to geojson format')
10 |
11 | if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType):
12 | results = [results]
13 |
14 | streamer = GeoJSONStreamArray(results)
15 |
16 | for p in json.JSONEncoder(indent=2, sort_keys=True).iterencode(
17 | {'type': 'FeatureCollection', 'features': streamer}
18 | ):
19 | yield p
20 |
21 |
22 | class GeoJSONStreamArray(list):
23 | def __init__(self, results):
24 | self.results = results
25 |
26 | # need to make sure we actually have results so we can intelligently set __len__, otherwise
27 | # iterencode behaves strangely and will output invalid json
28 | self.len = 1
29 |
30 | def __iter__(self):
31 | return self.streamDicts()
32 |
33 | def __len__(self):
34 | return self.len
35 |
36 | def streamDicts(self):
37 | completed = False
38 | for page_idx, page in enumerate(self.results):
39 | ASF_LOGGER.info(f'Streaming {len(page)} products from page {page_idx}')
40 | completed = page.searchComplete
41 |
42 | yield from [self.getItem(p) for p in page if p is not None]
43 |
44 | if not completed:
45 | ASF_LOGGER.warn('Failed to download all results from CMR')
46 |
47 | ASF_LOGGER.info('Finished streaming geojson results')
48 |
49 | def getItem(self, p):
50 | return p.geojson()
51 |
--------------------------------------------------------------------------------
/tests/Serialization/test_serialization.py:
--------------------------------------------------------------------------------
1 | from asf_search import ASFSearchResults, ASFSession
2 | from asf_search.ASFSearchOptions.ASFSearchOptions import ASFSearchOptions
3 |
4 | import os
5 | import json
6 |
7 | from asf_search.search.search_generator import as_ASFProduct
8 |
9 |
10 | def run_test_serialization(product=None, results=None, opts=ASFSearchOptions()):
11 | if product is None:
12 | to_serialize = ASFSearchResults([json_to_product(prod) for prod in results])
13 | else:
14 | to_serialize = ASFSearchResults([json_to_product(product)])
15 |
16 | with open('serialized_product.json', 'w') as f:
17 | f.write(json.dumps({'results': to_serialize.geojson(), 'opts': dict(opts)}))
18 | f.close()
19 |
20 | with open('serialized_product.json', 'r') as f:
21 | deserialized = json.loads(f.read())
22 | f.close()
23 |
24 | os.remove('serialized_product.json')
25 |
26 | deserialized_results = deserialized.get('results')
27 | deserialized_opts = deserialized.get('opts')
28 |
29 | for key, value in deserialized_opts.items():
30 | assert value == getattr(opts, key)
31 |
32 | for idx, original in enumerate(to_serialize):
33 | assert deserialized_results['features'][idx]['properties'] == original.properties
34 | assert deserialized_results['features'][idx]['geometry'] == original.geometry
35 |
36 | assert deserialized_results['type'] == 'FeatureCollection'
37 |
38 |
39 | def json_to_product(product):
40 | output = as_ASFProduct(product, session=ASFSession())
41 | output.meta = product['meta']
42 | output.properties = product['properties']
43 | output.geometry = product['geometry']
44 | output.umm = product['umm']
45 | return output
46 |
--------------------------------------------------------------------------------
/asf_search/export/export_translators.py:
--------------------------------------------------------------------------------
1 | from types import FunctionType
2 | from datetime import datetime
3 |
4 | from asf_search import ASFSearchResults
5 |
6 |
7 | # ASFProduct.properties don't have every property required of certain output formats,
8 | # This grabs the missing properties from ASFProduct.umm required by the given format
9 | def ASFSearchResults_to_properties_list(
10 | results: ASFSearchResults, get_additional_fields: FunctionType
11 | ):
12 | property_list = []
13 |
14 | for product in results:
15 | additional_fields = get_additional_fields(product)
16 | properties = {**product.properties, **additional_fields}
17 | property_list.append(properties)
18 |
19 | # Format dates to match format used by SearchAPI output formats
20 | for product in property_list:
21 | # S1 date properties are formatted differently from other platforms
22 | platform = product.get("platform")
23 | if platform is None:
24 | platform = ""
25 | is_S1 = platform.upper() in [
26 | 'SENTINEL-1',
27 | 'SENTINEL-1B',
28 | 'SENTINEL-1A',
29 | 'SENTINEL-1C',
30 | ]
31 | for key, data in product.items():
32 | if ('date' in key.lower() or 'time' in key.lower()) and data is not None:
33 | if not is_S1:
34 | # Remove trailing zeroes from miliseconds, add Z
35 | if len(data.split('.')) == 2:
36 | d = len(data.split('.')[0])
37 | data = data[:d] + 'Z'
38 | time = datetime.strptime(data, '%Y-%m-%dT%H:%M:%SZ')
39 | product[key] = time.strftime('%Y-%m-%dT%H:%M:%SZ')
40 |
41 | return property_list
42 |
--------------------------------------------------------------------------------
/asf_search/Products/SEASATProduct.py:
--------------------------------------------------------------------------------
1 | from typing import Dict
2 | from asf_search import ASFSession, ASFProduct
3 | from asf_search.CMR.translate import try_parse_int, try_round_float
4 |
5 |
6 | class SEASATProduct(ASFProduct):
7 | """
8 | ASF Dataset Documentation Page: https://asf.alaska.edu/data-sets/sar-data-sets/seasat/
9 | """
10 |
11 | _base_properties = {
12 | **ASFProduct._base_properties,
13 | 'md5sum': {'path': ['AdditionalAttributes', ('Name', 'MD5SUM'), 'Values', 0]},
14 | 'frameNumber': {'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0], 'cast': try_parse_int}, # for consolidated collection
15 | 'bytes': {'path': ['DataGranule', 'ArchiveAndDistributionInformation']},
16 | }
17 |
18 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
19 | super().__init__(args, session)
20 |
21 | bytes_mapping = {
22 | entry['Name']: {'bytes': entry['SizeInBytes'], 'format': entry['Format']}
23 | for entry in self.properties['bytes']
24 | }
25 | md5sum_mapping = {
26 | entry['Name']: entry['Checksum']['Value']
27 | for entry in self.properties['bytes']
28 | }
29 |
30 | self.properties['bytes'] = bytes_mapping
31 | self.properties['md5sum'] = md5sum_mapping
32 |
33 | self.properties['additionalUrls'] = self._get_additional_urls()
34 | self.properties['browse'] = [url for url in self._get_urls() if url.endswith('.png') or url.endswith('.jpg') or url.endswith('.jpeg')]
35 | self.properties['s3Urls'] = self._get_s3_uris()
36 |
37 | center = self.centroid()
38 | self.properties['centerLat'] = center.y
39 | self.properties['centerLon'] = center.x
40 |
--------------------------------------------------------------------------------
/asf_search/Products/ALOSProduct.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Union
2 | from asf_search import ASFSession, ASFStackableProduct
3 | from asf_search.CMR.translate import try_parse_float, try_parse_int, try_round_float
4 | from asf_search.constants import PRODUCT_TYPE
5 |
6 |
7 | class ALOSProduct(ASFStackableProduct):
8 | """
9 | Used for ALOS Palsar and Avnir dataset products
10 |
11 | ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/
12 | """
13 |
14 | _base_properties = {
15 | **ASFStackableProduct._base_properties,
16 | 'frameNumber': {
17 | 'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0],
18 | 'cast': try_parse_int,
19 | },
20 | 'faradayRotation': {
21 | 'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0],
22 | 'cast': try_parse_float,
23 | },
24 | 'offNadirAngle': {
25 | 'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0],
26 | 'cast': try_parse_float,
27 | },
28 | 'bytes': {
29 | 'path': ['AdditionalAttributes', ('Name', 'BYTES'), 'Values', 0],
30 | 'cast': try_round_float,
31 | },
32 | 'insarStackId': {'path': ['AdditionalAttributes', ('Name', 'INSAR_STACK_ID'), 'Values', 0]},
33 | 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]},
34 | }
35 |
36 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
37 | super().__init__(args, session)
38 |
39 | if self.properties.get('groupID') is None:
40 | self.properties['groupID'] = self.properties['sceneName']
41 |
42 | @staticmethod
43 | def get_default_baseline_product_type() -> Union[str, None]:
44 | """
45 | Returns the product type to search for when building a baseline stack.
46 | """
47 | return PRODUCT_TYPE.L1_1
48 |
--------------------------------------------------------------------------------
/asf_search/search/campaigns.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, List, Union
2 | from asf_search.CMR.MissionList import get_campaigns
3 |
4 |
5 | def campaigns(platform: str) -> List[str]:
6 | """
7 | Returns a list of campaign names for the given platform,
8 | each name being usable as a campaign for asf_search.search() and asf_search.geo_search()
9 |
10 | :param platform: The name of the platform to gather campaign names for.
11 | Platforms currently supported include UAVSAR, AIRSAR, and SENTINEL-1 INTERFEROGRAM (BETA)
12 |
13 | :return: A list of campaign names for the given platform
14 | """
15 | data = {'include_facets': 'true', 'provider': 'ASF'}
16 |
17 | if platform is not None:
18 | if platform == 'UAVSAR':
19 | data['platform[]'] = 'G-III'
20 | data['instrument[]'] = 'UAVSAR'
21 | elif platform == 'AIRSAR':
22 | data['platform[]'] = 'DC-8'
23 | data['instrument[]'] = 'AIRSAR'
24 | elif platform == 'SENTINEL-1 INTERFEROGRAM (BETA)':
25 | data['platform[]'] = 'SENTINEL-1A'
26 | else:
27 | data['platform[]'] = platform
28 |
29 | missions = get_campaigns(data)
30 | mission_names = _get_project_names(missions)
31 |
32 | return mission_names
33 |
34 |
35 | def _get_project_names(data: Union[Dict, List]) -> List[str]:
36 | """
37 | Recursively searches for campaign names
38 | under "Projects" key in CMR umm_json response
39 |
40 | :param data: CMR umm_json response
41 |
42 | :return: A list of found campaign names for the given platform
43 | """
44 | output = []
45 | if isinstance(data, Dict):
46 | for key, value in data.items():
47 | if key == 'Projects':
48 | return [list(item.values())[0] for item in value]
49 | output.extend(_get_project_names(value))
50 | elif isinstance(data, List):
51 | for item in data:
52 | output.extend(_get_project_names(item))
53 |
54 | return output
55 |
--------------------------------------------------------------------------------
/tests/Pair/test_Pair.py:
--------------------------------------------------------------------------------
1 | from datetime import datetime, timedelta, timezone
2 | from asf_search.ASFSearchOptions import ASFSearchOptions
3 | from asf_search.search import product_search
4 | from asf_search import Pair
5 | import numpy as np
6 |
7 |
8 | def test_make_s1_pairs():
9 |
10 | args = ASFSearchOptions(
11 | **{"start": '2022-02-10', "end": '2022-07-01'}
12 | )
13 |
14 | granule_product = product_search('S1A_IW_SLC__1SDV_20220215T225119_20220215T225146_041930_04FE2E_9252-SLC')[0]
15 | granule_stack = granule_product.stack(args)
16 | granule_pair = Pair(granule_product, granule_stack[1])
17 | assert granule_pair.ref.properties['sceneName'] == "S1A_IW_SLC__1SDV_20220215T225119_20220215T225146_041930_04FE2E_9252"
18 | assert granule_pair.sec.properties['sceneName'] == "S1A_IW_SLC__1SDV_20220227T225119_20220227T225146_042105_050431_987E"
19 | assert granule_pair.ref_time == datetime(2022, 2, 15, 22, 51, 19, tzinfo=timezone.utc)
20 | assert granule_pair.sec_time == datetime(2022, 2, 27, 22, 51, 19, tzinfo=timezone.utc)
21 | assert granule_pair.perpendicular_baseline == -15
22 | assert granule_pair.temporal_baseline == timedelta(days=12)
23 | assert np.floor(granule_pair.estimate_s1_mean_coherence()) == 18.0
24 |
25 | burst_product = product_search('S1_181296_IW1_20220219T125501_VV_10AF-BURST')[0]
26 | burst_stack = burst_product.stack(args)
27 | burst_pair = Pair(burst_product, burst_stack[1])
28 | assert burst_pair.ref.properties['sceneName'] == "S1_181296_IW1_20220219T125501_VV_10AF-BURST"
29 | assert burst_pair.sec.properties['sceneName'] == "S1_181296_IW1_20220303T125501_VV_F03A-BURST"
30 | assert burst_pair.ref_time == datetime(2022, 2, 19, 12, 55, 3, tzinfo=timezone.utc)
31 | assert burst_pair.sec_time == datetime(2022, 3, 3, 12, 55, 2, tzinfo=timezone.utc)
32 | assert burst_pair.perpendicular_baseline == -75
33 | assert burst_pair.temporal_baseline == timedelta(days=12)
34 | assert np.floor(burst_pair.estimate_s1_mean_coherence()) == 52.0
35 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_stack.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - test-find-new-reference empty stack:
3 | stack: []
4 | output_index: None
5 |
6 | - test-find-new-reference s1 stack:
7 | stack: Fairbanks_S1_stack_preprocessed.yml
8 | output_index: 0
9 |
10 | - test-find-new-reference s1 stack 1st no positions:
11 | stack: Fairbanks_S1_stack_preprocessed_incomplete.yml
12 | output_index: 1
13 |
14 | - test-get_default_product_type S1:
15 | product: Fairbanks_SLC.yml
16 | product_type: SLC
17 |
18 | - test-get_default_product_type ALOS:
19 | product: Alos_response.yml
20 | product_type: L1.1
21 |
22 | - test-get_default_product_type ERS:
23 | product: Fairbanks_L1.yml
24 | product_type: L0
25 |
26 | - test-get-baseline-from-stack error:
27 | reference: Fairbanks_SLC.yml
28 | stack: []
29 | output_stack: []
30 | error: TypeError
31 |
32 | - test-get-baseline-from-stack L1 missing Baseline:
33 | reference: Alos_response_missing_baseline.yml
34 | stack: ['Alos_response_missing_baseline.yml', 'Alos_response_missing_baseline.yml']
35 | output_stack: []
36 | error: TypeError
37 |
38 | - test-get-baseline-from-stack L1 missing Baseline:
39 | reference: Alos_response_missing_baseline.yml
40 | stack: ['Fairbanks_SLC_no_stateVectors.yml', 'Fairbanks_SLC_no_stateVectors.yml']
41 | output_stack: []
42 | error: TypeError
43 |
44 | - test-get-baseline-from-stack fairbanks SLC stack:
45 | reference: Fairbanks_SLC.yml
46 | stack: Fairbanks_S1_stack_preprocessed.yml
47 | output_stack: Fairbanks_S1_stack.yml
48 | error: null
49 |
50 | - test-get-baseline-from-stack fairbanks L1 stack:
51 | reference: Fairbanks_L1.yml
52 | stack: Fairbanks_ers_stack preprocessed.yml
53 | output_stack: Fairbanks_ers_stack.yml
54 | error: null
55 |
56 | - test-valid-state-vectors fairbanks slc:
57 | reference: Fairbanks_SLC.yml
58 | output: True
59 |
60 | - test-valid-state-vectors fairbanks slc:
61 | reference: Fairbanks_SLC_no_stateVectors.yml
62 | output: False
63 |
--------------------------------------------------------------------------------
/asf_search/search/error_reporting.py:
--------------------------------------------------------------------------------
1 | from asf_search import ASF_LOGGER, ASFSearchOptions
2 | from asf_search import INTERNAL
3 | import requests
4 |
5 |
6 |
7 | def report_search_error(search_options: ASFSearchOptions, message: str):
8 | """Reports CMR Errors automatically to ASF"""
9 |
10 | from asf_search import REPORT_ERRORS
11 |
12 | if not REPORT_ERRORS:
13 | ASF_LOGGER.warning(
14 | 'Automatic search error reporting is turned off,'
15 | 'search errors will NOT be reported to ASF.'
16 | '\nTo enable automatic error reporting, set asf_search.REPORT_ERRORS to True'
17 | '\nIf you have any questions email uso@asf.alaska.edu'
18 | )
19 | return
20 |
21 | user_agent = search_options.session.headers.get('User-Agent')
22 | search_options_list = '\n'.join(
23 | [f'\t{option}: {key}' for option, key in dict(search_options).items()]
24 | )
25 | message = f'Error Message: {str(message)}\nUser Agent: {user_agent} \
26 | \nSearch Options: {{\n{search_options_list}\n}}'
27 |
28 | response = requests.post(
29 | f'https://{INTERNAL.ERROR_REPORTING_ENDPOINT}',
30 | data={'Message': f'This error message and info was automatically generated:\n\n{message}'},
31 | )
32 |
33 | try:
34 | response.raise_for_status()
35 | except requests.exceptions.HTTPError:
36 | ASF_LOGGER.error(
37 | 'asf-search failed to automatically report an error,'
38 | 'if you have any questions email uso@asf.alaska.edu'
39 | f"\nError Text: HTTP {response.status_code}: {response.json()['errors']}"
40 | )
41 | return
42 | if response.status_code == 200:
43 | ASF_LOGGER.error(
44 | (
45 | 'The asf-search module ecountered an error with CMR,'
46 | 'and the following message was automatically reported to ASF:'
47 | '\n\n"\nmessage\n"'
48 | 'If you have any questions email uso@asf.alaska.edu'
49 | )
50 | )
51 |
--------------------------------------------------------------------------------
/.github/workflows/prod-request-merged.yml:
--------------------------------------------------------------------------------
1 | name: Merged to Stable
2 |
3 | on:
4 | pull_request:
5 | types: [closed]
6 | branches:
7 | - stable
8 |
9 | jobs:
10 | OpenRequest:
11 | runs-on: ubuntu-latest
12 | # If a merge request triggered the push, and that request DOESN'T contain the 'bumpless' label.
13 | # (Need to check all three, instead of 'not bumpless', because if and admin overrides the tests,
14 | # it might not have ANY labels at that point.).
15 | if: >
16 | github.event.pull_request.merged &&
17 | (
18 | contains(github.event.pull_request.labels.*.name, 'patch') ||
19 | contains(github.event.pull_request.labels.*.name, 'minor') ||
20 | contains(github.event.pull_request.labels.*.name, 'major')
21 | )
22 | steps:
23 | - uses: actions/checkout@v5
24 |
25 | - name: Save version type
26 | # Whichever one return's true, will let their 'echo' statement run:
27 | # Must wrap in "(*) || true" to prevent it from exiting on failure, until
28 | # 'allow-failure' is finished getting added: https://github.com/actions/toolkit/issues/399
29 | run: |
30 | (${{ contains(github.event.pull_request.labels.*.name, 'patch') }} && echo "version_type=patch" >> $GITHUB_ENV) || true
31 | (${{ contains(github.event.pull_request.labels.*.name, 'minor') }} && echo "version_type=minor" >> $GITHUB_ENV) || true
32 | (${{ contains(github.event.pull_request.labels.*.name, 'major') }} && echo "version_type=major" >> $GITHUB_ENV) || true
33 | - name: Create a Release
34 | uses: zendesk/action-create-release@v3
35 | env:
36 | # NOT built in token, so this can trigger other actions:
37 | GITHUB_TOKEN: ${{ secrets.DISCO_GITHUB_MACHINE_USER }}
38 | with:
39 | # version_type populated with the last job just above ^^
40 | auto_increment_type: "${{ env.version_type }}"
41 | tag_schema: semantic
42 | draft: false
43 | prerelease: false
44 | body: "${{ github.event.pull_request.body }}"
45 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_baseline_search.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - Test Get test-preprocessed-product ALOS Stack Params:
3 | product: Alos_response.yml
4 |
5 | - Test Get test-unprocessed-product S1 Stack Params:
6 | product: S1_response.yml
7 |
8 | - Test Get test-unprocessed-product ARIA Stack Params:
9 | product: ARIAS1-GUNW_response.yml
10 |
11 | - Test Get test-unprocessed-product Burst Stack Params:
12 | product: SLC_BURST.yml
13 |
14 | - Test Get test-invalid-insarStackID Stack Params:
15 | product: Alos_response.yml
16 |
17 | - Test Get test-temporal-baseline test-product-stack SLC Params:
18 | product: S1_response.yml
19 | stack: S1_baseline_stack.yml
20 |
21 | - Test get test-temporal-baseline test-product-stack ERS Params:
22 | product: Fairbanks_ers_reference.yml
23 | stack: Fairbanks_ers_stack.yml
24 |
25 | - Test get test-temporal-baseline test-product-stack BURST Params:
26 | product: SLC_BURST.yml
27 | stack: SLC_BURST_stack.yml
28 |
29 | - Test get test-temporal-baseline test-product-stack ALOS-2 Params:
30 | product: ALOS_2.yml
31 | stack: ALOS_2_stack.yml
32 |
33 | - Test get test-temporal-baseline test-product-stack JERS Params:
34 | product: JERS.yml
35 | stack: JERS_stack.yml
36 |
37 | - Test get test-temporal-baseline test-product-stack RADARSAT Params:
38 | product: RADARSAT.yml
39 | stack: RADARSAT_stack.yml
40 |
41 | - Test get test-temporal-baseline test-product-stack ARIA-S1 GUNW Params:
42 | product: ARIAS1-GUNW_response.yml
43 | stack: ARIAS1GUNW_stack.yml
44 |
45 | - test-stack-id empty ID:
46 | stack_id: ''
47 | stack_reference: Alos_response.yml
48 | stack: []
49 |
50 | - test-stack-id ARIA S1 GUNW ID:
51 | stack_id: 5289
52 | stack_reference: null
53 | stack: ARIAS1GUNW_stack.yml
54 | dataset: 'ARIA S1 GUNW'
55 |
56 | - test-stack-id S1 ID:
57 | stack_reference: null
58 | stack: S1A_IW_SLC__1SSV_20160528T141908_20160528T141938_011460_011746_335C_stack.yml
59 | stack_id: S1A_IW_SLC__1SSV_20160528T141908_20160528T141938_011460_011746_335C-SLC
60 |
--------------------------------------------------------------------------------
/tests/download/test_download.py:
--------------------------------------------------------------------------------
1 | import unittest
2 | from asf_search.exceptions import ASFAuthenticationError, ASFDownloadError
3 | import pytest
4 | from unittest.mock import patch
5 |
6 | import requests
7 |
8 | from asf_search.download.download import download_url
9 |
10 |
11 | def run_test_download_url_auth_error(url, path, filename):
12 | with patch('asf_search.ASFSession.get') as mock_get:
13 | resp = requests.Response()
14 | resp.status_code = 401
15 | mock_get.return_value = resp
16 |
17 | if url == 'pathError':
18 | with pytest.raises(ASFDownloadError):
19 | download_url(url, path, filename)
20 |
21 | with patch('os.path.isdir') as path_mock:
22 | path_mock.return_value = True
23 |
24 | if url == 'urlError':
25 | with patch('os.path.isfile') as isfile_mock:
26 | isfile_mock.return_value = False
27 |
28 | with pytest.raises(ASFAuthenticationError):
29 | download_url(url, path, filename)
30 |
31 | with patch('os.path.isfile') as isfile_mock:
32 | isfile_mock.return_value = True
33 |
34 | with pytest.warns(Warning):
35 | download_url(url, path, filename)
36 |
37 |
38 | def run_test_download_url(url, path, filename):
39 | if filename == 'BURST':
40 | with patch('asf_search.ASFSession.get') as mock_get:
41 | resp = requests.Response()
42 | resp.status_code = 202
43 | resp.headers.update({'content-type': 'application/json'})
44 | mock_get.return_value = resp
45 |
46 | with patch('asf_search.ASFSession.get') as mock_get_burst:
47 | resp_2 = requests.Response()
48 | resp_2.status_code = 200
49 | resp_2.headers.update({'content-type': 'image/tiff'})
50 | mock_get_burst.return_value = resp_2
51 | resp_2.iter_content = lambda chunk_size: []
52 |
53 | with patch('builtins.open', unittest.mock.mock_open()):
54 | download_url(url, path, filename)
55 | else:
56 | with patch('asf_search.ASFSession.get') as mock_get:
57 | resp = requests.Response()
58 | resp.status_code = 200
59 | mock_get.return_value = resp
60 | resp.iter_content = lambda chunk_size: []
61 |
62 | with patch('builtins.open', unittest.mock.mock_open()):
63 | download_url(url, path, filename)
64 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | # VS Code
132 | .vscode/
133 | search_results.csv
134 | search_results.metalink
135 |
--------------------------------------------------------------------------------
/asf_search/__init__.py:
--------------------------------------------------------------------------------
1 | from importlib.metadata import PackageNotFoundError, version
2 |
3 | ## Setup logging now, so it's available if __version__ fails:
4 | import logging
5 | ASF_LOGGER = logging.getLogger(__name__)
6 | # Add null handle so we do nothing by default. It's up to whatever
7 | # imports us, if they want logging.
8 | ASF_LOGGER.addHandler(logging.NullHandler())
9 |
10 | try:
11 | __version__ = version(__name__)
12 | except PackageNotFoundError as e:
13 | msg = str(
14 | "package is not installed!\n"
15 | "Install in editable/develop mode via (from the top of this repo):\n"
16 | " python3 -m pip install -e .\n"
17 | "Or, to just get the version number use:\n"
18 | " python setup.py --version"
19 | )
20 | print(msg)
21 | ASF_LOGGER.exception(msg) # type: ignore # noqa: F821
22 | raise PackageNotFoundError(
23 | "Install with 'python3 -m pip install -e .' to use"
24 | ) from e
25 |
26 | ASF_LOGGER = logging.getLogger(__name__)
27 | # Add null handle so we do nothing by default. It's up to whatever
28 | # imports us, if they want logging.
29 | ASF_LOGGER.addHandler(logging.NullHandler())
30 |
31 | from .ASFSession import ASFSession # noqa: F401, E402
32 | from .ASFProduct import ASFProduct # noqa: F401 E402
33 | from .ASFStackableProduct import ASFStackableProduct # noqa: F401 E402
34 | from .ASFSearchResults import ASFSearchResults # noqa: F401 E402
35 | from .ASFSearchOptions import ASFSearchOptions, validators # noqa: F401 E402
36 | from .Products import * # noqa: F403 F401 E402
37 | from .exceptions import * # noqa: F403 F401 E402
38 | from .constants import ( # noqa: F401 E402
39 | BEAMMODE, # noqa: F401 E402
40 | FLIGHT_DIRECTION, # noqa: F401 E402
41 | INSTRUMENT, # noqa: F401 E402
42 | PLATFORM, # noqa: F401 E402
43 | POLARIZATION, # noqa: F401 E402
44 | PRODUCT_TYPE, # noqa: F401 E402
45 | INTERNAL, # noqa: F401 E402
46 | DATASET, # noqa: F401 E402
47 | RANGE_BANDWIDTH, # noqa: F401 E402,
48 | PRODUCTION_CONFIGURATION, # noqa: F401 E402
49 | )
50 | from .health import * # noqa: F403 F401 E402
51 | from .search import * # noqa: F403 F401 E402
52 | from .download import * # noqa: F403 F401 E402
53 | from .CMR import * # noqa: F403 F401 E402
54 | from .baseline import * # noqa: F403 F401 E402
55 | from .WKT import validate_wkt # noqa: F401 E402
56 | from .export import * # noqa: F403 F401 E402
57 | from .Pair import Pair # noqa: F401, E402
58 |
59 | REPORT_ERRORS = True
60 | """Enables automatic search error reporting to ASF, send any questions to uso@asf.alaska.edu"""
61 |
--------------------------------------------------------------------------------
/tests/yml_tests/Resources/Fairbanks_SLC.kml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | ASF Datapool Search Results
5 | Search Performed:
6 |
15 |
16 | S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081
17 | <![CDATA[
18 | Sentinel-1B (Interferometric Wide. 250 km swath, 5 m x 20 m spatial resolution and burst synchronization for interferometry. IW is considered to be the standard mode over land masses.), acquired 2021-01-02T03:20:58.000000
19 | https://datapool.asf.alaska.edu/SLC/SB/S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081.zip
20 |
21 |
Metadata
22 |
23 | - Processing type: L1 Single Look Complex (SLC)
24 | - Frame: 210
25 | - Path: 94
26 | - Orbit: 24970
27 | - Start time: 2021-01-02T03:20:31.000000
28 | - End time: 2021-01-02T03:20:58.000000
29 |
30 | - Faraday Rotation: None
31 |
32 | - Ascending/Descending: ASCENDING
33 |
34 | - Off Nadir Angle: None
35 |
36 |
37 | - Pointing Angle: None
38 |
39 |
40 |
41 |
42 |
47 |
48 | #yellowLineGreenPoly
49 |
50 | 1
51 | relativeToGround
52 |
53 |
54 |
55 | -144.751495,65.990250,2000
56 | -144.136368,64.386414,2000
57 | -149.246063,63.942123,2000
58 | -150.172562,65.531250,2000
59 | -144.751495,65.990250,2000
60 |
61 |
62 |
63 |
64 |
65 |
66 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_ASFSession.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - Test ASFSession empty password-login raises error:
3 | username: ' '
4 | password: ' '
5 |
6 | - Test ASFSession empty token-auth raises error:
7 | token: ''
8 |
9 | - Test ASFSession bad token-auth raises error:
10 | token: 'bad_token'
11 |
12 | - Test ASFSession bad cookiejar-auth raises missing login-cookie error:
13 | cookies: [
14 | {
15 | version: 0,
16 | name: 'not_the_urs_user_already_logged_cookie',
17 | value: 'yes',
18 | port: None,
19 | # port_specified: False,
20 | domain: '.earthdata.nasa.gov',
21 | # domain_specified: True,
22 | # domain_initial_dot: False,
23 | # path: '/',
24 | # path_specified: True,
25 | secure: True,
26 | expires: 0000000001,
27 | discard: False,
28 | comment: null,
29 | comment_url: None,
30 | rest: {'HttpOnly': None},
31 | rfc2109: False
32 | }
33 | ]
34 |
35 | - Test ASFSession bad cookiejar-auth raises expired login-cookie error:
36 | cookies: [
37 | {
38 | version: 0,
39 | name: 'urs_user_already_logged',
40 | value: 'yes',
41 | port: None,
42 | # port_specified: False,
43 | domain: '.earthdata.nasa.gov',
44 | # domain_specified: True,
45 | # domain_initial_dot: False,
46 | # path: '/',
47 | # path_specified: True,
48 | secure: True,
49 | expires: 0000000001,
50 | discard: False,
51 | comment: null,
52 | comment_url: None,
53 | rest: {'HttpOnly': None},
54 | rfc2109: False
55 | }
56 | ]
57 |
58 | - Test ASFSession rebuild_auth non-authorized asf-redirect-to-google domains:
59 | original_domain: "asf.alaska.edu"
60 | response_domain: "google.com"
61 | response_code: 302
62 | final_token: None
63 |
64 | - Test ASFSession rebuild_auth non-authorized asf-redirect-to-asf domains:
65 | original_domain: "asf.alaska.edu"
66 | response_domain: "asf.alaska.edu"
67 | response_code: 302
68 | final_token: 'Bearer fakeToken'
69 |
70 | - Test ASFSession rebuild_auth non-authorized asf-redirect-to-nasa domains:
71 | original_domain: "asf.alaska.edu"
72 | response_domain: "earthdata.nasa.gov"
73 | response_code: 302
74 | final_token: 'Bearer fakeToken'
75 |
76 | - Test ASFSession rebuild_auth non-authorized nasa-redirect-to-asf domains:
77 | original_domain: "earthdata.nasa.gov"
78 | response_domain: "asf.alaska.edu"
79 | response_code: 302
80 | final_token: 'Bearer fakeToken'
81 |
--------------------------------------------------------------------------------
/tests/Search/test_search_generator.py:
--------------------------------------------------------------------------------
1 |
2 | from asf_search import ASFSearchOptions, ASFSearchResults
3 | from asf_search import INTERNAL
4 | from typing import List
5 |
6 | import math
7 |
8 | from asf_search.search import search_generator, preprocess_opts
9 |
10 |
11 | def run_test_search_generator_multi(search_opts: List[ASFSearchOptions]):
12 | queries = [search_generator(opts=opts) for opts in search_opts]
13 |
14 | expected_results_size = sum([opts.maxResults for opts in search_opts])
15 | expected_page_count = sum(
16 | [math.ceil(opts.maxResults / INTERNAL.CMR_PAGE_SIZE) for opts in search_opts]
17 | )
18 | combined_results = []
19 |
20 | page_count = 0
21 | searches = {}
22 |
23 | for opt in search_opts:
24 | if isinstance(opt.platform, list):
25 | for platform in opt.platform:
26 | searches[platform] = False
27 | else:
28 | searches[opt.platform] = False
29 |
30 | while len(queries):
31 | queries_iter = iter(queries)
32 | for idx, query in enumerate(queries_iter): # Alternate pages between results
33 | page = next(query, None)
34 | if page is not None:
35 | combined_results.extend(page)
36 | page_count += 1
37 | if page.searchComplete:
38 | if isinstance(page.searchOptions.platform, list):
39 | for platform in page.searchOptions.platform:
40 | searches[platform] = True
41 | else:
42 | searches[page.searchOptions.platform] = True
43 | else:
44 | queries[idx] = None
45 |
46 | queries = [query for query in queries if query is not None]
47 |
48 | assert page_count == expected_page_count
49 | assert len(combined_results) == expected_results_size
50 | assert len([completed for completed in searches if completed]) >= len(search_opts)
51 |
52 |
53 | def run_test_search_generator(search_opts: ASFSearchOptions):
54 | pages_iter = search_generator(opts=search_opts)
55 |
56 | page_count = int(search_opts.maxResults / INTERNAL.CMR_PAGE_SIZE)
57 |
58 | page_idx = 0
59 |
60 | results = ASFSearchResults([])
61 | for page in pages_iter:
62 | results.extend(page)
63 | results.searchComplete = page.searchComplete
64 | results.searchOptions = page.searchOptions
65 | page_idx += 1
66 |
67 | assert page_count <= page_idx
68 | assert len(results) <= search_opts.maxResults
69 | assert results.searchComplete
70 |
71 | preprocess_opts(search_opts)
72 |
73 | for key, val in search_opts:
74 | if key != 'maxResults':
75 | assert getattr(results.searchOptions, key) == val
76 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | """asf_search setuptools configuration"""
2 |
3 | from setuptools import find_packages, setup
4 |
5 | requirements = [
6 | 'requests',
7 | 'shapely',
8 | 'pytz',
9 | 'numpy',
10 | 'dateparser',
11 | 'python-dateutil',
12 | 'tenacity>=8.2.2',
13 | ]
14 |
15 | test_requirements = [
16 | 'pytest==8.1.1',
17 | 'pytest-automation==3.0.0',
18 | 'pytest-cov',
19 | 'pytest-xdist',
20 | 'coverage',
21 | 'requests-mock==1.11.0',
22 | 'nbformat',
23 | 'nbconvert',
24 | 'ipykernel',
25 | ]
26 |
27 | extra_requirements = [
28 | 'remotezip>=0.10.0',
29 | 'ciso8601',
30 | ]
31 |
32 | # Required for ARIA-S1 GUNW Stacking
33 | asf_enumeration = [
34 | 'asf-enumeration>=0.4.0'
35 | ]
36 |
37 | # Required for optional Sentinel-1 Pair coherence estimation
38 | coherence = [
39 | 'pandas',
40 | 'zarr',
41 | 's3fs',
42 | 'rioxarray',
43 | ]
44 |
45 | with open('README.md', 'r') as readme_file:
46 | readme = readme_file.read()
47 |
48 | setup(
49 | name='asf_search',
50 | # version=Declared in pyproject.toml, through "[tool.setuptools_scm]"
51 | author='Alaska Satellite Facility Discovery Team',
52 | author_email='uaf-asf-discovery@alaska.edu',
53 | description="Python wrapper for ASF's SearchAPI",
54 | long_description=readme,
55 | long_description_content_type='text/markdown',
56 | url='https://github.com/asfadmin/Discovery-asf_search.git',
57 | project_urls={'Documentation': 'https://docs.asf.alaska.edu/asf_search/basics/'},
58 | packages=find_packages(exclude=['tests.*', 'tests', 'examples.*', 'examples']),
59 | package_dir={'asf_search': 'asf_search'},
60 | include_package_data=True,
61 | python_requires='>=3.10',
62 | install_requires=requirements,
63 | extras_require={'test': test_requirements, 'extras': extra_requirements, 'asf-enumeration': asf_enumeration, 'coherence': coherence},
64 | license='BSD',
65 | license_files=('LICENSE',),
66 | classifiers=[
67 | 'Development Status :: 5 - Production/Stable',
68 | 'License :: OSI Approved :: BSD License',
69 | 'Operating System :: OS Independent',
70 | 'Intended Audience :: Developers',
71 | 'Intended Audience :: Science/Research',
72 | 'Programming Language :: Python :: 3',
73 | 'Programming Language :: Python :: 3 :: Only',
74 | 'Programming Language :: Python :: 3.10',
75 | 'Programming Language :: Python :: 3.11',
76 | 'Programming Language :: Python :: 3.12',
77 | 'Topic :: Software Development',
78 | 'Topic :: Scientific/Engineering :: Atmospheric Science',
79 | 'Topic :: Scientific/Engineering :: GIS',
80 | 'Topic :: Scientific/Engineering :: Hydrology',
81 | 'Topic :: Utilities',
82 | ],
83 | )
84 |
--------------------------------------------------------------------------------
/examples/0-Intro.md:
--------------------------------------------------------------------------------
1 | # SAR Data in Python: Getting to Know asf_search
2 |
3 | ***
4 | ## About the Alaska Satellite Facility
5 |
6 | __ASF is part of the Geophysical Institute of the University of Alaska Fairbanks.__
7 |
8 | - ASF downlinks, processes, archives, and distributes remote-sensing data to scientific users around the world.
9 | - ASF promotes, facilitates, and participates in the advancement of remote sensing to support national and international Earth science research, field operations, and commercial applications.
10 | - ASF commits to provide the highest quality data and services in a timely manner.
11 |
12 | __Distributed Active Archive Center (DAAC):__ ASF operates the NASA archive of synthetic aperture radar (SAR) data from a variety of satellites and aircraft, providing these data and associated specialty support services to researchers in support of NASA’s Earth Science Data and Information System (ESDIS) project.
13 |
14 | [ASF Website](https://asf.alaska.edu)
15 |
16 | [Contact ASF](https://asf.alaska.edu/contact/)
17 |
18 | ***
19 | ## ASF Discovery Team
20 |
21 | __The ASF Discovery team's focus is to provide tools that help users find and acquire the data they want as quickly and smoothly as possible.__
22 |
23 | Some tools provided by ASF's Discovery team include:
24 | - [Vertex](https://search.asf.alaska.edu): web application for searching the ASF archive, as well as performing meta-analysis and custom On Demand processing
25 | - [ASF Search API](https://docs.asf.alaska.edu/api/basics/): Public REST API for searching the ASF archive
26 | - [ASF Python Search Module](https://docs.asf.alaska.edu/asf_search/basics/) (asf_search): Python module for programmatically finding and acquiring data from the ASF archive
27 |
28 | ***
29 | ## Working in Python: asf_search
30 | __asf_search is a Python module created to simplify the process of finding and acquiring data programmatically.__
31 | - Search API is very technical: we saw a need to reduce the technical overhead required when using Search API.
32 | - Vertex is very interactive: sometimes, an automated or semi-automated process is required, and we wanted to make the power of Vertex available in that context.
33 | - Downloading data can be difficult: there are many ways to acquire data, and knowing how to use them effectively can be difficult.
34 |
35 | ***
36 | ## Today's Topic
37 | __We will explore some basic usage of asf_search, with a focus on search functionality.__
38 |
39 | Specific features to be covered include:
40 | - Classes for working with ASF data
41 | - Search functions
42 | - Authentication methods
43 | - Download functionality
44 |
45 | This session is targeted largely at users who have a passing familiarity with Python, but `asf_search` is designed to be easily used by everyone from novice to expert.
46 |
47 | ***
48 | Next: [Basic Overview](./1-Basic_Overview.ipynb)
--------------------------------------------------------------------------------
/tests/yml_tests/test_collection_attributes.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - test-collection-attributes SLC:
3 | params:
4 | processingLevel: SLC
5 | expected_attributes: ['SV_POSITION_PRE', 'SV_VELOCITY_PRE', 'SV_POSITION_POST', 'SV_VELOCITY_POST', 'ASC_NODE_TIME', 'SV_START_POSITION', 'SV_START_VELOCITY', 'SV_CENTER_POSITION', 'SV_CENTER_VELOCITY', 'SV_END_POSITION', 'SV_END_VELOCITY', 'POLARIZATION', 'FLIGHT_LINE', 'BEAM_MODE', 'BEAM_MODE_DESC', 'BEAM_MODE_TYPE', 'FARADAY_ROTATION', 'INSAR_STACK_ID', 'INSAR_STACK_SIZE', 'INSAR_BASELINE', 'DOPPLER', 'ASCENDING_DESCENDING', 'THUMBNAIL_URL', 'CENTER_FRAME_ID', 'PATH_NUMBER', 'FRAME_NUMBER', 'CENTER_ESA_FRAME', 'MISSION_NAME', 'GRANULE_TYPE', 'PROCESSING_DATE', 'PROCESSING_DESCRIPTION', 'LOOK_DIRECTION', 'PROCESSING_TYPE', 'PROCESSING_LEVEL', 'PROCESSING_TYPE_DISPLAY', 'BYTES', 'MD5SUM', 'OFF_NADIR_ANGLE', 'ACQUISITION_DATE', 'CENTER_LON', 'CENTER_LAT', 'NEAR_START_LAT', 'NEAR_START_LON', 'FAR_START_LAT', 'FAR_START_LON', 'NEAR_END_LAT', 'NEAR_END_LON', 'FAR_END_LAT', 'FAR_END_LON', 'ASF_PLATFORM', 'GROUP_ID', 'SV_POSITION', 'SV_VELOCITY']
6 |
7 | - test-collection-attributes DISP-S1:
8 | params:
9 | processingLevel: DISP-S1
10 | expected_attributes: ['POLARIZATION', 'PRODUCT_VERSION', 'PROCESSING_TYPE', 'FRAME_ID', 'STACK_ID', 'ASCENDING_DESCENDING', 'PATH_NUMBER', 'REFERENCE_ZERO_DOPPLER_START_TIME', 'REFERENCE_ZERO_DOPPLER_END_TIME', 'SECONDARY_ZERO_DOPPLER_START_TIME', 'SECONDARY_ZERO_DOPPLER_END_TIME', 'FRAME_NUMBER']
11 |
12 | - test-collection-attributes ALOS2_L1_PSR2:
13 | params:
14 | shortName: ALOS2_L1_PSR2
15 | expected_attributes: ['ASCENDING_DESCENDING', 'BEAM_MODE', 'BEAM_MODE_DESC', 'CENTER_LAT', 'CENTER_LON', 'FRAME_NUMBER', 'LOOK_DIRECTION', 'OFF_NADIR_ANGLE', 'OPERATIONAL_MODE', 'OPERATIONAL_MODE_DESC', 'PATH_NUMBER', 'POLARIZATION', 'PROCESSING_LEVEL', 'SV_POSITION', 'SV_VELOCITY']
16 |
17 | - test-collection-attributes SEASAT C3576379529-ASF:
18 | params:
19 | conceptID: C3576379529-ASF
20 | expected_attributes: ['ASCENDING_DESCENDING', 'BEAM_MODE', 'BEAM_MODE_DESC', 'FRAME_NUMBER', 'PATH_NUMBER', 'POLARIZATION']
21 |
22 | - test-collection-attributes Fake Concept ID:
23 | params:
24 | conceptID: NotAValidConceptID-ASF
25 | expected_attributes: []
26 | expect_failure: True
27 |
28 | - test-collection-attributes Wrong CMR Host Maturity Correct Concept ID:
29 | params:
30 | conceptID: 'C1271768606-ASF'
31 | expected_attributes: []
32 | expect_failure: True
33 |
34 | - test-collection-attributes Wrong CMR Host Maturity Correct Concept ID:
35 | params:
36 | conceptID: None
37 | shortName: None
38 | processingLevel: None
39 | expected_attributes: []
40 | expect_failure: True
41 |
42 | - test-collection-attributes Wrong CMR invalid/missing processing level:
43 | params:
44 | processingLevel: NotARealProcessingLevel
45 | expected_attributes: []
46 | expect_failure: True
47 |
--------------------------------------------------------------------------------
/conftest.py:
--------------------------------------------------------------------------------
1 | import argparse
2 | import pytest
3 | import os
4 | from asf_search.ASFSession import ASFSession
5 | from getpass import getpass
6 |
7 | def string_to_session(user_input: str) -> ASFSession:
8 | session = ASFSession()
9 |
10 | if user_input is not None and len(user_input):
11 | session.auth_with_token(user_input)
12 |
13 | return session
14 |
15 | def set_should_auth_session(user_input: str) -> ASFSession:
16 | should_auth = string_to_bool(user_input)
17 | session = ASFSession()
18 | if should_auth:
19 | if (token:=os.environ.get('EDL_TOKEN')) is not None:
20 | try:
21 | session.auth_with_token(token=token)
22 | except Exception as exc:
23 | raise argparse.ArgumentTypeError(f"Unabled to authenticate with the given environment's `EDL_TOKEN` (token may need to be refreshed). Original exception: {str(exc)}")
24 | else:
25 | raise argparse.ArgumentTypeError("ERROR: Environment variable `EDL_TOKEN` token not set, cannot create authenticated session for tests. Are you running this in the correct local/github action environment?")
26 |
27 | return session
28 |
29 | def set_should_auth_session_with_creds(user_input: str) -> ASFSession:
30 | should_auth = string_to_bool(user_input)
31 | session = ASFSession()
32 | if should_auth:
33 | session.auth_with_creds(input('EDL Username'), getpass('EDL Password'))
34 |
35 | return session
36 |
37 | def set_should_auth_session_with_token(user_input: str) -> ASFSession:
38 | should_auth = string_to_bool(user_input)
39 | session = ASFSession()
40 | if should_auth:
41 | session.auth_with_token(getpass('EDL Token'))
42 |
43 | return session
44 |
45 | def string_to_bool(user_input: str) -> bool:
46 | user_input = str(user_input).upper()
47 | if 'TRUE'.startswith(user_input):
48 | return True
49 | elif 'FALSE'.startswith(user_input):
50 | return False
51 | else:
52 | raise argparse.ArgumentTypeError(f"ERROR: Could not convert '{user_input}' to bool (true/false/t/f).")
53 |
54 | def pytest_addoption(parser: pytest.Parser):
55 | parser.addoption("--should_auth_session", action="store", dest="authenticated_session", type=set_should_auth_session, default='FALSE',
56 | help = "'should_auth_session': Set if the test case requires authentication (pull from `EDL_TOKEN` environment variable)"
57 | )
58 |
59 | parser.addoption("--auth_with_creds", action="store", dest="authenticated_session", type=set_should_auth_session_with_creds, default='FALSE',
60 | help = "'auth_with_creds': Use EDL username and password to authenticate session for relevant tests")
61 |
62 | parser.addoption("--auth_with_token", action="store", dest="authenticated_session", type=set_should_auth_session_with_token, default='FALSE',
63 | help = "'auth_with_creds': Use EDL token to authenticate session for relevant tests")
64 |
--------------------------------------------------------------------------------
/tests/BaselineSearch/Stack/test_stack.py:
--------------------------------------------------------------------------------
1 | from typing import List
2 | from numbers import Number
3 | from asf_search.baseline.stack import find_new_reference, get_baseline_from_stack
4 | from asf_search import ASFProduct, ASFSearchResults, ASFSession, ASFStackableProduct
5 | from asf_search.search.search_generator import as_ASFProduct
6 |
7 | import pytest
8 |
9 |
10 | def run_test_find_new_reference(stack: List, output_index: Number) -> None:
11 | """
12 | Test asf_search.baseline.stack.find_new_reference
13 | """
14 |
15 | if stack == []:
16 | assert find_new_reference(stack) is None
17 | else:
18 | products = [as_ASFProduct(product, ASFSession()) for product in stack]
19 | for idx, product in enumerate(products):
20 | product = clear_baseline(stack[idx], product)
21 | assert (
22 | find_new_reference(products).properties['sceneName']
23 | == stack[output_index]['properties']['sceneName']
24 | )
25 |
26 |
27 | def run_test_get_default_product_type(product: ASFStackableProduct, product_type: str) -> None:
28 | assert product.get_default_baseline_product_type() == product_type
29 |
30 |
31 | def run_test_get_baseline_from_stack(reference, stack, output_stack, error):
32 | reference = as_ASFProduct(reference, ASFSession())
33 | stack = ASFSearchResults([as_ASFProduct(product, ASFSession()) for product in stack])
34 |
35 | if error is None:
36 | stack, warnings = get_baseline_from_stack(reference, stack)
37 |
38 | keys = ['sceneName', 'perpendicularBaseline', 'temporalBaseline']
39 |
40 | for idx, product in enumerate(stack):
41 | for key in keys:
42 | assert product.properties[key] == output_stack[idx]['properties'][key]
43 |
44 | return
45 |
46 | with pytest.raises(ValueError):
47 | for product in stack:
48 | if product.baseline.get('insarBaseline', False):
49 | product.baseline = {}
50 | else:
51 | product.baseline['stateVectors']['positions'] = {}
52 | product.baseline['stateVectors']['velocities'] = {}
53 | reference.baseline = {}
54 | get_baseline_from_stack(reference=reference, stack=stack)
55 |
56 |
57 | def run_test_valid_state_vectors(reference, output):
58 | if reference is not None:
59 | product = as_ASFProduct(reference, ASFSession())
60 | clear_baseline(reference, product)
61 | assert output == product.is_valid_reference()
62 | return
63 |
64 |
65 | def clear_baseline(resource, product: ASFProduct):
66 | # Baseline values can be restored from UMM in asfProduct constructor,
67 | # this erases them again if the resource omitted them from the product
68 | if stateVectors := resource['baseline'].get('stateVectors'):
69 | if stateVectors.get('positions') == {}:
70 | product.baseline = {'stateVectors': {'positions': {}, 'velocities': {}}}
71 |
72 | return product
73 |
--------------------------------------------------------------------------------
/tests/ASFSearchOptions/test_ASFSearchOptions.py:
--------------------------------------------------------------------------------
1 | import copy
2 | from asf_search.ASFSearchOptions import validators, ASFSearchOptions
3 | from asf_search.ASFSearchOptions.config import config
4 | from asf_search.ASFSearchOptions.validator_map import validate, validator_map
5 | from pytest import raises
6 |
7 |
8 | def run_test_validator_map_validate(key, value, output):
9 | if key not in list(validator_map.keys()):
10 | with raises(KeyError) as keyerror:
11 | validate(key, value)
12 |
13 | if key in [
14 | validator_key.lower()
15 | for validator_key in list(validator_map.keys())
16 | if key not in config.keys()
17 | ]:
18 | assert 'Did you mean' in str(keyerror.value)
19 |
20 | return
21 |
22 | assert validate(key, value) == output
23 |
24 |
25 | def run_test_ASFSearchOptions_validator(validator_name, param, output, error):
26 | validator = getattr(validators, validator_name)
27 |
28 | if error is None:
29 | assert output == validator(param)
30 | else:
31 | with raises(ValueError) as e:
32 | validator(param)
33 | assert error in str(e.value)
34 |
35 |
36 | def run_test_ASFSearchOptions(**kwargs):
37 | test_info = copy.copy(kwargs['test_info'])
38 | exception = test_info['exception'] # Can be "None" for don't.
39 | if 'expect_output' in test_info:
40 | expect_output = test_info.pop('expect_output')
41 | else:
42 | expect_output = {}
43 |
44 | # Take out anything that isn't supposed to reach the options object:
45 | del test_info['title']
46 | del test_info['exception']
47 |
48 | try:
49 | options_obj = ASFSearchOptions(**test_info)
50 | except (KeyError, ValueError) as e:
51 | assert (
52 | type(e).__name__ == exception
53 | ), f"ERROR: Didn't expect exception {type(e).__name__} to occur."
54 | return
55 | else:
56 | assert (
57 | exception is None
58 | ), f'ERROR: Expected exception {exception}, but SearchOptions never threw.'
59 |
60 | for key, val in expect_output.items():
61 | assert (
62 | getattr(options_obj, key) == val
63 | ), f"ERROR: options object param '{key}' should have value '{val}'. Got '{getattr(options_obj, key)}'."
64 |
65 | # test ASFSearchOptions.reset_search()
66 | options_obj.reset_search()
67 |
68 | assert (
69 | len([val for key, val in dict(options_obj).items() if key not in config.keys()]) == 0
70 | ), 'ERROR: ASFSearchOptions.reset() did not clear all non-default searchable params'
71 |
72 | for key, value in config.items():
73 | if test_info.get(key) is not None:
74 | assert (
75 | getattr(options_obj, key) == test_info[key]
76 | ), f"ERROR: User defined value '{test_info[key]}' for default param '{key}', but value was lost after ASFSearchOptions.reset()"
77 | else:
78 | assert (
79 | getattr(options_obj, key) == value
80 | ), f"ERROR: default param '{key}' left default by user changed, should have value '{val}'. Got '{getattr(options_obj, key)}'."
81 |
--------------------------------------------------------------------------------
/asf_search/ASFStackableProduct.py:
--------------------------------------------------------------------------------
1 | from enum import Enum
2 | import copy
3 | from typing import Dict, Union
4 | from asf_search import ASFSession, ASFProduct
5 | from asf_search.ASFSearchOptions import ASFSearchOptions
6 | from asf_search.exceptions import ASFBaselineError
7 |
8 |
9 | class ASFStackableProduct(ASFProduct):
10 | """
11 | Used for ERS-1 and ERS-2 products
12 |
13 | ASF ERS-1 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-1/
14 | ASF ERS-2 Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/ers-2/
15 | """
16 |
17 | class BaselineCalcType(Enum):
18 | """
19 | Defines how asf-search will calculate perpendicular baseline for products of this subclass
20 | """
21 |
22 | PRE_CALCULATED = 0
23 | """Has pre-calculated insarBaseline value that will be used for perpendicular calculations""" # noqa F401
24 | CALCULATED = 1
25 | """Uses position/velocity state vectors and ascending node time for perpendicular calculations""" # noqa F401
26 |
27 | baseline_type = BaselineCalcType.PRE_CALCULATED
28 | """Determines how asf-search will attempt to stack products of this type."""
29 |
30 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
31 | super().__init__(args, session)
32 | self.baseline = self.get_baseline_calc_properties()
33 |
34 | def get_baseline_calc_properties(self) -> Dict:
35 | insarBaseline = self.umm_cast(
36 | float,
37 | self.umm_get(
38 | self.umm,
39 | 'AdditionalAttributes',
40 | ('Name', 'INSAR_BASELINE'),
41 | 'Values',
42 | 0,
43 | ),
44 | )
45 |
46 | if insarBaseline is None:
47 | return None
48 |
49 | return {'insarBaseline': insarBaseline}
50 |
51 | def get_stack_opts(self, opts: ASFSearchOptions = None):
52 | stack_opts = ASFSearchOptions() if opts is None else copy(opts)
53 | stack_opts.processingLevel = self.get_default_baseline_product_type()
54 |
55 | if self.properties.get('insarStackId') in [None, 'NA', 0, '0']:
56 | raise ASFBaselineError(
57 | 'Requested reference product needs a baseline stack ID '
58 | f'but does not have one: {self.properties["fileID"]}'
59 | )
60 |
61 | stack_opts.insarStackId = self.properties['insarStackId']
62 | return stack_opts
63 |
64 | def is_valid_reference(self):
65 | # we don't stack at all if any of stack is missing insarBaseline,
66 | # unlike stacking S1 products(?)
67 | if 'insarBaseline' not in self.baseline:
68 | raise ValueError('No baseline values available for precalculated dataset')
69 |
70 | return True
71 |
72 | @staticmethod
73 | def get_default_baseline_product_type() -> Union[str, None]:
74 | """
75 | Returns the product type to search for when building a baseline stack.
76 | """
77 | return None
78 |
79 | def has_baseline(self) -> bool:
80 | baseline = self.get_baseline_calc_properties()
81 |
82 | return baseline is not None
83 |
--------------------------------------------------------------------------------
/asf_search/constants/PRODUCT_TYPE.py:
--------------------------------------------------------------------------------
1 | # Sentinel-1
2 | GRD_HD = 'GRD_HD'
3 | GRD_MD = 'GRD_MD'
4 | GRD_MS = 'GRD_MS'
5 | GRD_HS = 'GRD_HS'
6 | GRD_FD = 'GRD_FD'
7 | SLC = 'SLC'
8 | OCN = 'OCN'
9 | RAW = 'RAW'
10 | METADATA_GRD_HD = 'METADATA_GRD_HD'
11 | METADATA_GRD_MD = 'METADATA_GRD_MD'
12 | METADATA_GRD_MS = 'METADATA_GRD_MS'
13 | METADATA_GRD_HS = 'METADATA_GRD_HS'
14 | METADATA_SLC = 'METADATA_SLC'
15 | METADATA_OCN = 'METADATA_OCN'
16 | METADATA_RAW = 'METADATA_RAW'
17 | BURST = 'BURST'
18 |
19 | # ALOS PALSAR
20 | L1_0 = 'L1.0'
21 | L1_1 = 'L1.1'
22 | L1_5 = 'L1.5'
23 | L2_2 = 'L2.2'
24 | RTC_LOW_RES = 'RTC_LOW_RES'
25 | RTC_HIGH_RES = 'RTC_HI_RES'
26 | KMZ = 'KMZ'
27 |
28 | # ALOS AVNIR
29 | # No PROCESSING_TYPE attribute in CMR
30 |
31 | # SIR-C
32 | # SLC and SLC metadata are both 'SLC', provided by Sentinel-1 constants
33 |
34 | # Sentinel-1 InSAR
35 | GUNW_STD = 'GUNW_STD'
36 | GUNW_AMP = 'GUNW_AMP'
37 | GUNW_CON = 'GUNW_CON'
38 | GUN_COH = 'GUNW_COH'
39 | GUNW_UNW = 'GUNW_UNW'
40 |
41 | # SMAP
42 | L1A_RADAR_RO_HDF5 = 'L1A_Radar_RO_HDF5'
43 | L1A_RADAR_HDF5 = 'L1A_Radar_HDF5'
44 | L1B_S0_LOW_RES_HDF5 = 'L1B_S0_LoRes_HDF5'
45 | L1C_S0_HIGH_RES_HDF5 = 'L1C_S0_HiRes_HDF5'
46 | L1A_RADAR_RO_QA = 'L1A_Radar_RO_QA'
47 | L1A_RADAR_QA = 'L1A_Radar_QA'
48 | L1B_S0_LOW_RES_QA = 'L1B_S0_LoRes_QA'
49 | L1C_S0_HIGH_RES_QA = 'L1C_S0_HiRes_QA'
50 | L1A_RADAR_RO_ISO_XML = 'L1A_Radar_RO_ISO_XML'
51 | L1B_S0_LOW_RES_ISO_XML = 'L1B_S0_LoRes_ISO_XML'
52 | L1C_S0_HIGH_RES_ISO_XML = 'L1C_S0_HiRes_ISO_XML'
53 |
54 | # UAVSAR
55 | AMPLITUDE = 'AMPLITUDE'
56 | STOKES = 'STOKES'
57 | AMPLITUDE_GRD = 'AMPLITUDE_GRD'
58 | PROJECTED = 'PROJECTED'
59 | PROJECTED_ML5X5 = 'PROJECTED_ML5X5'
60 | PROJECTED_ML3X3 = 'PROJECTED_ML3X3'
61 | INTERFEROMETRY_GRD = 'INTERFEROMETRY_GRD'
62 | INTERFEROMETRY = 'INTERFEROMETRY'
63 | COMPLEX = 'COMPLEX'
64 | # KMZ provided by ALOS PALSAR
65 | INC = 'INC'
66 | SLOPE = 'SLOPE'
67 | DEM_TIFF = 'DEM_TIFF'
68 | PAULI = 'PAULI'
69 | METADATA = 'METADATA'
70 |
71 | # RADARSAT
72 | L0 = 'L0'
73 | L1 = 'L1'
74 |
75 | # ERS
76 | # L0 provided by RADARSAT
77 | # L1 provided by RADARSAT
78 |
79 | # JERS
80 | # L0 provided by RADARSAT
81 | # L1 provided by RADARSAT
82 |
83 | # AIRSAR
84 | CTIF = 'CTIF'
85 | PTIF = 'PTIF'
86 | LTIF = 'LTIF'
87 | JPG = 'JPG'
88 | LSTOKES = 'LSTOKES'
89 | PSTOKES = 'PSTOKES'
90 | CSTOKES = 'CSTOKES'
91 | DEM = 'DEM'
92 | THREEFP = '3FP'
93 |
94 | # OPERA-S1
95 | RTC = 'RTC'
96 | CSLC = 'CSLC'
97 | RTC_STATIC = 'RTC-STATIC'
98 | CSLC_STATIC = 'CSLC-STATIC'
99 | DISP_S1 = 'DISP-S1'
100 | TROPO_ZENITH = 'TROPO-ZENITH'
101 |
102 | # NISAR
103 | L0B = 'L0B'
104 | """alias for RRSD Level Zero B product types"""
105 |
106 | RRSD = 'RRSD'
107 |
108 | RSLC = 'RSLC'
109 | """Level 1 Range-Doppler Single Look Complex"""
110 | RIFG = 'RIFG'
111 | """Level 1 Range-Doppler Wrapped Interferrogram"""
112 | RUNW = 'RUNW'
113 | """Level 1 Range-Doppler Unwrapped Interferrogram"""
114 | ROFF = 'ROFF'
115 | """Level 1 Range-Doppler Pixel Offsets"""
116 | GSLC = 'GSLC'
117 | """Level 2 Geocoded Single Look Complex"""
118 | GCOV = 'GCOV'
119 | """Level 2 Geocoded Polarimetric Covariance"""
120 | GUNW = 'GUNW'
121 | """"Level 2 Geocoded Unwrapped Inteferrogram"""
122 | GOFF = 'GOFF'
123 | """Level 2 Geocoded Pixel Offsets"""
124 | SME2 = 'SME2'
125 | """Level 3 Soil Moisture EASE-Grid 2.0"""
126 |
127 |
--------------------------------------------------------------------------------
/tests/yml_tests/Resources/Fairbanks_SLC_Incomplete_Meta.yml:
--------------------------------------------------------------------------------
1 | {
2 | "type": "Feature",
3 | "geometry": {
4 | "coordinates": [
5 | [
6 | [
7 | -150.172562,
8 | 65.53125
9 | ],
10 | [
11 | -149.246063,
12 | 63.942123
13 | ],
14 | [
15 | -144.136368,
16 | 64.386414
17 | ],
18 | [
19 | -144.751495,
20 | 65.99025
21 | ],
22 | [
23 | -150.172562,
24 | 65.53125
25 | ]
26 | ]
27 | ],
28 | "type": "Polygon"
29 | },
30 | "properties": {
31 | "beamModeType": "IW",
32 | "browse": null,
33 | "bytes": 4193723581,
34 | "centerLat": 64.9861,
35 | "centerLon": -147.0909,
36 | "faradayRotation": null,
37 | "fileID": "S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081-SLC",
38 | "flightDirection": null,
39 | "groupID": "S1B_IWDV_0209_0216_024970_094",
40 | "granuleType": "SENTINEL_1B_FRAME",
41 | "insarStackId": null,
42 | "md5sum": "6dd7f6a56ed98ba7037dfeb833217d5b",
43 | "offNadirAngle": null,
44 | "orbit": 24970,
45 | "pathNumber": 94,
46 | "platform": "Sentinel-1B",
47 | "pointingAngle": null,
48 | "polarization": "VV+VH",
49 | "processingDate": "2021-01-02T03:20:31.000Z",
50 | "processingLevel": "SLC",
51 | "sceneName": "S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081",
52 | "sensor": "C-SAR",
53 | "startTime": "2021-01-02T03:20:31.000Z",
54 | "stopTime": "2021-01-02T03:20:58.000Z",
55 | "url": "https://datapool.asf.alaska.edu/SLC/SB/S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081.zip",
56 | "fileName": "S1B_IW_SLC__1SDV_20210102T032031_20210102T032058_024970_02F8C3_C081.zip",
57 | "frameNumber": 210
58 | },
59 | "baseline": {
60 | "stateVectors": {
61 | "positions": {
62 | "prePosition": [
63 | -2893767.065414,
64 | -1235752.268405,
65 | 6327528.043215
66 | ],
67 | "prePositionTime": "2021-01-02T03:20:43.000000",
68 | "postPosition": [
69 | -2845284.115433,
70 | -1186496.621016,
71 | 6358798.348458
72 | ],
73 | "postPositionTime": "2021-01-02T03:20:53.000000"
74 | },
75 | "velocities": {
76 | "preVelocity": [
77 | 4828.593801,
78 | 4922.268943,
79 | 3162.776438
80 | ],
81 | "preVelocityTime": "2021-01-02T03:20:43.000000",
82 | "postVelocity": [
83 | 4867.907153,
84 | 4928.758938,
85 | 3091.226142
86 | ],
87 | "postVelocityTime": "2021-01-02T03:20:53.000000"
88 | }
89 | },
90 | "ascendingNodeTime": "2021-01-02T03:02:58.934857"
91 | },
92 | "umm": {
93 | },
94 | "meta": {
95 | }
96 | }
--------------------------------------------------------------------------------
/tests/WKT/test_validate_wkt.py:
--------------------------------------------------------------------------------
1 | from numbers import Number
2 | import pytest
3 | from typing import List
4 |
5 | from shapely.wkt import loads
6 | from shapely.geometry.base import BaseMultipartGeometry
7 |
8 | from asf_search.WKT.validate_wkt import (
9 | validate_wkt,
10 | _search_wkt_prep,
11 | _get_clamped_and_wrapped_geometry,
12 | _get_convex_hull,
13 | _merge_overlapping_geometry,
14 | _counter_clockwise_reorientation,
15 | _simplify_aoi,
16 | _get_shape_coords,
17 | )
18 | from asf_search.exceptions import ASFWKTError
19 |
20 |
21 | def run_test_validate_wkt_invalid_wkt_error(wkt: str):
22 | with pytest.raises(ASFWKTError):
23 | validate_wkt(wkt)
24 |
25 |
26 | def run_test_validate_wkt_valid_wkt(wkt: str, validated_wkt: str):
27 | expected_aoi = loads(validated_wkt)
28 | actual_wrapped, actual_unwrapped, _ = validate_wkt(wkt)
29 |
30 | assert actual_wrapped.equals(
31 | expected_aoi
32 | ), f'expected, {expected_aoi.wkt}, got {actual_wrapped.wkt}'
33 |
34 | actual_from_geom_wrapped, actual_from_geom_unwrapped, _ = validate_wkt(loads(wkt))
35 | assert actual_from_geom_wrapped.equals(expected_aoi)
36 |
37 |
38 | def run_test_validate_wkt_clamp_geometry(
39 | wkt: str, clamped_wkt: str, clamped_count: Number, wrapped_count: Number
40 | ):
41 | resp = _get_clamped_and_wrapped_geometry(loads(wkt))
42 | assert resp[0].wkt == clamped_wkt
43 |
44 | if clamped_count > 0:
45 | assert resp[2][0].report.split(' ')[2] == str(clamped_count)
46 |
47 | if wrapped_count > 0:
48 | assert resp[2][1].report.split(' ')[2] == str(wrapped_count)
49 |
50 |
51 | def run_test_validate_wkt_convex_hull(wkt: str, corrected_wkt: str):
52 | shape = loads(wkt)
53 | assert corrected_wkt == _get_convex_hull(shape)[0].wkt
54 |
55 |
56 | def run_test_validate_wkt_merge_overlapping_geometry(wkt: str, merged_wkt: str):
57 | shape = loads(wkt)
58 |
59 | overlapping = _merge_overlapping_geometry(shape)
60 | if isinstance(overlapping, BaseMultipartGeometry):
61 | overlapping = overlapping.geoms
62 | assert overlapping[0].equals(loads(merged_wkt))
63 |
64 |
65 | def run_test_validate_wkt_counter_clockwise_reorientation(wkt: str, cc_wkt: str):
66 | shape = loads(wkt)
67 |
68 | assert cc_wkt == _counter_clockwise_reorientation(shape)[0].wkt
69 |
70 |
71 | def run_test_validate_wkt_get_shape_coords(wkt: str, coords: List[Number]):
72 | shape = loads(wkt)
73 | shape_coords = [[coord[0], coord[1]] for coord in _get_shape_coords(shape)]
74 |
75 | coords.sort()
76 | shape_coords.sort()
77 |
78 | assert len(shape_coords) == len(coords)
79 | assert shape_coords == coords
80 |
81 |
82 | def run_test_search_wkt_prep(wkt: str):
83 | if wkt == ' ':
84 | with pytest.raises(ASFWKTError):
85 | _search_wkt_prep(None)
86 |
87 | return
88 |
89 | shape = loads(wkt)
90 | ls = _search_wkt_prep(shape)
91 | assert ls.geom_type == shape.geom_type
92 | assert shape.wkt == wkt
93 |
94 |
95 | def run_test_simplify_aoi(wkt: str, simplified: str, repairs: List[str]):
96 | shape = loads(wkt)
97 | resp, shape_repairs = _simplify_aoi(shape)
98 |
99 | assert resp.equals(loads(simplified))
100 |
101 | for idx, repair in enumerate(repairs):
102 | assert shape_repairs[idx].report.startswith(repair)
103 |
--------------------------------------------------------------------------------
/asf_search/Products/NISARProduct.py:
--------------------------------------------------------------------------------
1 | from typing import Dict, Tuple, Union
2 | from asf_search import ASFSearchOptions, ASFSession, ASFStackableProduct
3 | from asf_search.CMR.translate import try_parse_frame_coverage, try_parse_bool, try_parse_int
4 |
5 | class NISARProduct(ASFStackableProduct):
6 | """
7 | Used for NISAR dataset products
8 |
9 | ASF Dataset Documentation Page: https://asf.alaska.edu/nisar/
10 | """
11 | _base_properties = {
12 | **ASFStackableProduct._base_properties,
13 | 'frameNumber': {
14 | 'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0],
15 | 'cast': try_parse_int,
16 | }, # Sentinel, ALOSm and NISAR product alt for frameNumber (ESA_FRAME)
17 | 'pgeVersion': {'path': ['PGEVersionClass', 'PGEVersion']},
18 | 'mainBandPolarization': {'path': ['AdditionalAttributes', ('Name', 'FREQUENCY_A_POLARIZATION'), 'Values']},
19 | 'sideBandPolarization': {'path': ['AdditionalAttributes', ('Name', 'FREQUENCY_B_POLARIZATION'), 'Values']},
20 | 'frameCoverage': {'path': ['AdditionalAttributes', ('Name', 'FULL_FRAME'), 'Values', 0], 'cast': try_parse_frame_coverage},
21 | 'jointObservation': {'path': ['AdditionalAttributes', ('Name', 'JOINT_OBSERVATION'), 'Values', 0], 'cast': try_parse_bool},
22 | 'rangeBandwidth': {'path': ['AdditionalAttributes', ('Name', 'RANGE_BANDWIDTH_CONCAT'), 'Values']},
23 | 'productionConfiguration': {'path': ['AdditionalAttributes', ('Name', 'PRODUCTION_PIPELINE'), 'Values', 0]},
24 | 'processingLevel': {'path': ['AdditionalAttributes', ('Name', 'PRODUCT_TYPE'), 'Values', 0]},
25 | 'bytes': {'path': ['DataGranule', 'ArchiveAndDistributionInformation']},
26 | 'collectionName': {'path': ["CollectionReference", "ShortName"]},
27 | }
28 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
29 | super().__init__(args, session)
30 | if self.properties.get('processingLevel') is None:
31 | self.properties.pop('processingLevel', None)
32 |
33 | self.properties['additionalUrls'] = self._get_additional_urls()
34 | self.properties['browse'] = [url for url in self._get_urls() if url.endswith('.png') or url.endswith('.jpg') or url.endswith('.jpeg')]
35 | self.properties['s3Urls'] = self._get_s3_uris()
36 |
37 | if self.properties.get('groupID') is None:
38 | self.properties['groupID'] = self.properties['sceneName']
39 | self.properties['bytes'] = {
40 | entry['Name']: {'bytes': entry['SizeInBytes'], 'format': entry['Format']}
41 | for entry in self.properties['bytes']
42 | }
43 | self.properties["conceptID"] = self.umm_get(self.meta, "collection-concept-id")
44 |
45 | @staticmethod
46 | def get_default_baseline_product_type() -> Union[str, None]:
47 | """
48 | Returns the product type to search for when building a baseline stack.
49 | """
50 | return None
51 |
52 | def is_valid_reference(self):
53 | return False
54 |
55 | def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions:
56 | """
57 | Build search options that can be used to find an insar stack for this product
58 |
59 | :return: ASFSearchOptions describing appropriate options
60 | for building a stack from this product
61 | """
62 | return None
63 |
64 | def get_sort_keys(self) -> Tuple[str, str]:
65 | keys = super().get_sort_keys()
66 |
67 | if keys[0] == '':
68 | return (self._read_property('processingDate', ''), keys[1])
69 |
70 | return keys
71 |
--------------------------------------------------------------------------------
/asf_search/ASFSearchOptions/validator_map.py:
--------------------------------------------------------------------------------
1 | from asf_search import ASF_LOGGER
2 |
3 | from .validators import (
4 | parse_string,
5 | parse_float,
6 | parse_int,
7 | parse_wkt,
8 | parse_date,
9 | parse_string_list,
10 | parse_int_list,
11 | parse_int_or_range_list,
12 | parse_float_or_range_list,
13 | parse_cmr_keywords_list,
14 | parse_session,
15 | parse_circle,
16 | parse_linestring,
17 | parse_point,
18 | parse_bbox,
19 | )
20 |
21 |
22 | def validate(key, value):
23 | if key not in validator_map:
24 | error_msg = f'Key "{key}" is not a valid search option.'
25 | # See if they just missed up case sensitivity:
26 | for valid_key in validator_map:
27 | if key.lower() == valid_key.lower():
28 | error_msg += f' (Did you mean "{valid_key}"?)'
29 | break
30 | ASF_LOGGER.error(error_msg)
31 | raise KeyError(error_msg)
32 | try:
33 | return validator_map[key](value)
34 | except ValueError as exc:
35 | ASF_LOGGER.exception(f'Failed to parse item in ASFSearchOptions: {key=} {value=} {exc=}')
36 | raise
37 |
38 |
39 | validator_map = {
40 | # Search parameters Parser
41 | 'maxResults': int,
42 | 'absoluteOrbit': parse_int_or_range_list,
43 | 'asfFrame': parse_int_or_range_list,
44 | 'bbox': parse_bbox,
45 | 'beamMode': parse_string_list,
46 | 'beamSwath': parse_string_list,
47 | 'campaign': parse_string,
48 | 'circle': parse_circle,
49 | 'linestring': parse_linestring,
50 | 'point': parse_point,
51 | 'maxDoppler': parse_float,
52 | 'minDoppler': parse_float,
53 | 'maxBaselinePerp': parse_float,
54 | 'minBaselinePerp': parse_float,
55 | 'maxInsarStackSize': parse_int,
56 | 'minInsarStackSize': parse_int,
57 | 'maxFaradayRotation': parse_float,
58 | 'minFaradayRotation': parse_float,
59 | 'flightDirection': parse_string,
60 | 'flightLine': parse_string,
61 | 'frame': parse_int_or_range_list,
62 | 'granule_list': parse_string_list,
63 | 'product_list': parse_string_list,
64 | 'intersectsWith': parse_wkt,
65 | 'lookDirection': parse_string,
66 | 'offNadirAngle': parse_float_or_range_list,
67 | 'platform': parse_string_list,
68 | 'polarization': parse_string_list,
69 | 'processingLevel': parse_string_list,
70 | 'relativeOrbit': parse_int_or_range_list,
71 | 'processingDate': parse_date,
72 | 'start': parse_date,
73 | 'end': parse_date,
74 | 'season': parse_int_list,
75 | 'groupID': parse_string_list,
76 | 'insarStackId': parse_string,
77 | 'instrument': parse_string_list,
78 | 'collections': parse_string_list,
79 | 'shortName': parse_string_list,
80 | 'dataset': parse_string_list,
81 | 'cmr_keywords': parse_cmr_keywords_list,
82 | # S1 Inteferrogram Filters
83 | 'temporalBaselineDays': parse_string_list,
84 | # Opera Burst Filters
85 | 'operaBurstID': parse_string_list,
86 | # SLC Burst Filters
87 | 'absoluteBurstID': parse_int_list,
88 | 'relativeBurstID': parse_int_list,
89 | 'fullBurstID': parse_string_list,
90 | # nisar paramaters
91 | 'frameCoverage': parse_string,
92 | 'jointObservation': bool,
93 | 'mainBandPolarization': parse_string_list,
94 | 'sideBandPolarization': parse_string_list,
95 | 'rangeBandwidth': parse_string_list,
96 | 'productionConfiguration': parse_string_list,
97 | # Config parameters Parser
98 | 'session': parse_session,
99 | 'host': parse_string,
100 | 'provider': parse_string,
101 | 'collectionAlias': bool,
102 | }
103 |
--------------------------------------------------------------------------------
/tests/ASFProduct/test_ASFSubproduct.py:
--------------------------------------------------------------------------------
1 | from asf_search import Products, search, ASFSearchOptions
2 | from asf_search.ASFSearchResults import ASFSearchResults
3 | import json
4 | import pytest
5 |
6 | def run_test_ASFSubproduct(scene_names: list[str], expected_subclass: str, opts: ASFSearchOptions):
7 | scenes = search(granule_list=scene_names, opts=opts)
8 |
9 | assert sorted([scene.properties['fileID'] for scene in scenes]) == sorted(scene_names)
10 |
11 | for scene in scenes:
12 | assert expected_subclass.upper() == scene.__class__.__name__ .upper(), f'Expected scene "{scene.properties["fileID"]}" to be of ASFProduct subclass {expected_subclass}. Got {scene.__class__.__name__}'
13 | if isinstance(scene, Products.OPERAS1Product):
14 | _test_OPERAS1Product(scene)
15 | if isinstance(scene, Products.S1BurstProduct):
16 | _test_S1BurstProduct(scene)
17 | if isinstance(scene, Products.SEASATProduct):
18 | _test_SEASATProduct(scene)
19 |
20 | for output_format in ['geojson', 'json', 'jsonlite', 'jsonlite2', 'csv', 'metalink', 'kml']:
21 | try:
22 | _get_output(scenes, output_format)
23 | except BaseException as exc:
24 | pytest.fail(f'Failed to serialized scenes {[scene.properties["fileID"] for scene in scenes]} as output format {output_format}. Original exception: {str(exc)}')
25 |
26 | def _test_OPERAS1Product(scene: Products.OPERAS1Product):
27 | processing_level = scene.properties['processingLevel']
28 |
29 | if processing_level in ['RTC', 'RTC-STATIC']:
30 | _check_properties_set(scene.properties, ['bistaticDelayCorrection'])
31 |
32 | if processing_level == 'RTC':
33 | _check_properties_set(scene.properties,['noiseCorrection', 'postProcessingFilter'])
34 |
35 | elif processing_level == 'DISP-S1':
36 | _check_properties_set(scene.properties, [
37 | 'frameNumber', 'OperaDispStackID', 'zarrUri', 'zarrStackUri',
38 | ])
39 |
40 | if processing_level == 'TROPO-ZENITH':
41 | assert scene.properties['centerLat'] is None
42 | assert scene.properties['centerLon'] is None
43 |
44 | def _test_SEASATProduct(scene: Products.SEASATProduct):
45 | assert isinstance(scene.properties['md5sum'], dict)
46 | assert isinstance(scene.properties['bytes'], dict)
47 |
48 | bytes_entries = scene.properties['bytes'].keys()
49 | _check_properties_set(scene.properties['md5sum'], bytes_entries)
50 |
51 | def _test_S1BurstProduct(scene: Products.S1BurstProduct):
52 | burst_properties = [
53 | "absoluteBurstID",
54 | "relativeBurstID",
55 | "fullBurstID",
56 | "burstIndex",
57 | "samplesPerBurst",
58 | "subswath",
59 | "azimuthTime",
60 | "azimuthAnxTime",
61 | ]
62 |
63 | _check_properties_set(scene.properties['burst'], burst_properties)
64 |
65 |
66 | def _check_properties_set(properties: dict, properties_list: list[str]):
67 | for prop in properties_list:
68 | assert properties[prop] is not None
69 |
70 | def _get_output(scenes: ASFSearchResults, output_format: str):
71 | match output_format.lower():
72 | case 'geojson':
73 | return scenes.geojson()
74 | case 'json':
75 | return json.loads(''.join(scenes.json()))
76 | case 'jsonlite':
77 | return json.loads(''.join(scenes.jsonlite()))
78 | case 'jsonlite2':
79 | return json.loads(''.join(scenes.jsonlite2()))
80 | case 'csv':
81 | return ''.join(scenes.csv())
82 | case 'metalink':
83 | return ''.join(scenes.metalink())
84 | case 'kml':
85 | return scenes.kml()
86 |
87 |
--------------------------------------------------------------------------------
/asf_search/export/jsonlite2.py:
--------------------------------------------------------------------------------
1 | import inspect
2 | import json
3 | from types import GeneratorType
4 |
5 | from asf_search import ASF_LOGGER
6 | from .jsonlite import JSONLiteStreamArray
7 |
8 | def results_to_jsonlite2(results):
9 | ASF_LOGGER.info('started translating results to jsonlite2 format')
10 |
11 | if len(results) == 0:
12 | yield from json.JSONEncoder(indent=2, sort_keys=True).iterencode({'results': []})
13 | return
14 |
15 | if not inspect.isgeneratorfunction(results) and not isinstance(results, GeneratorType):
16 | results = [results]
17 |
18 | streamer = JSONLite2StreamArray(results)
19 |
20 | for p in json.JSONEncoder(sort_keys=True, separators=(",", ":")).iterencode(
21 | {"results": streamer}
22 | ):
23 | yield p
24 |
25 |
26 | class JSONLite2StreamArray(JSONLiteStreamArray):
27 | def getItem(self, p):
28 | # pre-processing of the result is the same as in the base jsonlite streamer,
29 | # so use that and then rename/substitute fields
30 | p = super().getItem(p)
31 | result = {
32 | "b": [a.replace(p.get("granuleName"), "{gn}") for a in p.get("browse")]
33 | if p["browse"] is not None
34 | else p["browse"],
35 | "bm": p.get("beamMode"),
36 | "d": p.get("dataset"),
37 | "du": p.get("downloadUrl").replace(p.get("granuleName"), "{gn}"),
38 | "f": p.get("frame"),
39 | "fd": p.get("flightDirection"),
40 | "fl": p.get("flightLine"),
41 | "fn": p.get("fileName").replace(p.get("granuleName"), "{gn}"),
42 | "fr": p.get("faradayRotation"), # ALOS
43 | "gid": p.get("groupID").replace(p.get("granuleName"), "{gn}"),
44 | "gn": p.get("granuleName"),
45 | "i": p.get("instrument"),
46 | "in": p.get("canInSAR"),
47 | "mn": p.get("missionName"),
48 | "o": p.get("orbit"),
49 | "on": p.get("offNadirAngle"), # ALOS
50 | "p": p.get("path"),
51 | "pid": p.get("productID").replace(p.get("granuleName"), "{gn}"),
52 | "pa": p.get("pointingAngle"),
53 | "po": p.get("polarization"),
54 | "pt": p.get("productType"),
55 | "ptd": p.get("productTypeDisplay"),
56 | "s": p.get("sizeMB"),
57 | "ss": p.get("stackSize"), # Used for datasets with precalculated stacks
58 | "st": p.get("startTime"),
59 | "stp": p.get("stopTime"),
60 | "t": p.get("thumb").replace(p.get("granuleName"), "{gn}")
61 | if p.get("thumb") is not None
62 | else p.get("thumb"),
63 | "w": p.get("wkt"),
64 | "wu": p.get("wkt_unwrapped"),
65 | "pge": p.get("pgeVersion"),
66 | "adu": p.get("additionalUrls"),
67 | 's3u': p.get("s3Urls"),
68 | }
69 |
70 | if 'temporalBaseline' in p.keys():
71 | result['tb'] = p['temporalBaseline']
72 | if 'perpendicularBaseline' in p.keys():
73 | result['pb'] = p['perpendicularBaseline']
74 |
75 | if p.get('burst') is not None: # is a burst product
76 | result['s1b'] = p['burst']
77 | result['f'] = None
78 |
79 | if p.get('opera') is not None:
80 | result['s1o'] = p['opera']
81 |
82 | if p.get('nisar') is not None:
83 | result['nsr'] = p['nisar']
84 | result["cnm"] = p["collectionName"]
85 | result["cid"] = p["conceptID"]
86 |
87 | if p.get('ariaVersion') is not None:
88 | result['ariav'] = p.get('ariaVersion')
89 |
90 | return result
91 |
92 | def getOutputType(self) -> str:
93 | return "jsonlite2"
94 |
--------------------------------------------------------------------------------
/asf_search/search/collection_attributes.py:
--------------------------------------------------------------------------------
1 | from dataclasses import dataclass
2 | from typing import Optional
3 | from asf_search.CMR.datasets import collections_by_processing_level
4 | from asf_search.ASFSession import ASFSession
5 |
6 | from asf_search.exceptions import ASFSearchError, CMRError
7 |
8 |
9 | @dataclass(frozen=True)
10 | class AdditionalAttribute:
11 | """Wrapper dataclass around CMR Additional Attributes"""
12 |
13 | name: str
14 | """The `Name` of the additional attribute in CMR"""
15 | data_type: str
16 | """The `DataType` of the additional attribute in CMR"""
17 | description: str
18 | """The `Description` of the additional attribute in CMR"""
19 |
20 |
21 | def get_searchable_attributes(
22 | shortName: Optional[str] = None,
23 | conceptID: Optional[str] = None,
24 | processingLevel: Optional[str] = None,
25 | session: ASFSession = ASFSession(),
26 | ) -> dict[str, AdditionalAttribute]:
27 | """Using a provided processingLevel, collection shortName, or conceptID query CMR's `/collections` endpoint and
28 | return a dictionary of additional attributes mapping the attribute's name to the additional attribute entry in CMR
29 |
30 | ``` python
31 | from pprint import pp
32 | SLCRcord = asf.get_searchable_attributes(processingLevel='SLC')
33 | pp(SLCRcord.additionalAttributes)
34 | ```
35 | """
36 | query_data = None
37 | method = None
38 |
39 | if shortName is not None:
40 | method = {'type': 'shortName', 'value': shortName}
41 | query_data = [('shortName', shortName)]
42 | elif conceptID is not None:
43 | query_data = [('concept-id', conceptID)]
44 | method = {'type': 'conceptID', 'value': conceptID}
45 | elif processingLevel is not None:
46 | method = {'type': 'processingLevel', 'value': processingLevel}
47 | query_data = _get_concept_ids_for_processing_level(processingLevel)
48 | else:
49 | raise ValueError(
50 | 'Error: `get_collection_searchable_attributes()` expects `shortName`, `conceptID`, or `processingLevel`'
51 | )
52 |
53 | cmr_response = _query_cmr(session=session, query_data=query_data, method=method)
54 |
55 | if 'errors' in cmr_response:
56 | raise ValueError(f"CMR responded with an error. Original error(s): {' '.join(cmr_response['errors'])}")
57 | if len(cmr_response['items']) == 0:
58 | raise ValueError(
59 | f'Error: no collections found in CMR for given parameter `{method["type"]}`: "{method["value"]}" '
60 | )
61 |
62 | additionalAttributes = {}
63 |
64 | for entry in cmr_response['items']:
65 | umm = entry['umm']
66 | attributes = umm.get('AdditionalAttributes')
67 | if attributes is not None:
68 | for attribute in attributes:
69 | additionalAttributes[attribute.get('Name')] = AdditionalAttribute(
70 | name=attribute.get('Name'),
71 | description=attribute.get('Description'),
72 | data_type=attribute.get('DataType'),
73 | )
74 |
75 | return additionalAttributes
76 |
77 |
78 | def _get_concept_ids_for_processing_level(processing_level: str):
79 | collections = collections_by_processing_level.get(processing_level)
80 | if collections is None:
81 | raise ValueError(f'asf-search is missing concept-id aliases for processing level "{processing_level}". Please use `shortName` or `conceptID')
82 | return [('concept-id[]', collection) for collection in collections]
83 |
84 |
85 | def _query_cmr(session: ASFSession, query_data: list[tuple[str, str]], method: dict) -> dict:
86 | url = 'https://cmr.earthdata.nasa.gov/search/collections.umm_json'
87 |
88 | response = session.post(url=url, data=query_data)
89 |
90 | try:
91 | return response.json()
92 | except Exception as exc:
93 | raise ASFSearchError(
94 | f'Failed to find collection attributes for {method["type"]} "{method["value"]}". original exception: {str(exc)}'
95 | )
96 |
--------------------------------------------------------------------------------
/asf_search/search/search_count.py:
--------------------------------------------------------------------------------
1 | import datetime
2 | from typing import Literal, Sequence, Tuple, Union
3 | from copy import copy
4 | from asf_search.ASFSearchOptions import ASFSearchOptions
5 | from asf_search.CMR.subquery import build_subqueries
6 | from asf_search.CMR import translate_opts
7 | from asf_search.search.search_generator import get_page, preprocess_opts
8 | from asf_search import INTERNAL
9 |
10 |
11 | def search_count(
12 | absoluteOrbit: Union[
13 | int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]
14 | ] = None,
15 | asfFrame: Union[
16 | int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]
17 | ] = None,
18 | beamMode: Union[str, Sequence[str]] = None,
19 | beamSwath: Union[str, Sequence[str]] = None,
20 | campaign: Union[str, Sequence[str]] = None,
21 | maxDoppler: float = None,
22 | minDoppler: float = None,
23 | end: Union[datetime.datetime, str] = None,
24 | maxFaradayRotation: float = None,
25 | minFaradayRotation: float = None,
26 | flightDirection: str = None,
27 | flightLine: str = None,
28 | frame: Union[int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]] = None,
29 | granule_list: Union[str, Sequence[str]] = None,
30 | groupID: Union[str, Sequence[str]] = None,
31 | insarStackId: str = None,
32 | instrument: Union[str, Sequence[str]] = None,
33 | intersectsWith: str = None,
34 | lookDirection: Union[str, Sequence[str]] = None,
35 | offNadirAngle: Union[
36 | float, Tuple[float, float], Sequence[Union[float, Tuple[float, float]]]
37 | ] = None,
38 | platform: Union[str, Sequence[str]] = None,
39 | polarization: Union[str, Sequence[str]] = None,
40 | processingDate: Union[datetime.datetime, str] = None,
41 | processingLevel: Union[str, Sequence[str]] = None,
42 | product_list: Union[str, Sequence[str]] = None,
43 | relativeOrbit: Union[
44 | int, Tuple[int, int], range, Sequence[Union[int, Tuple[int, int], range]]
45 | ] = None,
46 | season: Tuple[int, int] = None,
47 | start: Union[datetime.datetime, str] = None,
48 | absoluteBurstID: Union[int, Sequence[int]] = None,
49 | relativeBurstID: Union[int, Sequence[int]] = None,
50 | fullBurstID: Union[str, Sequence[str]] = None,
51 | temporalBaselineDays: Union[str, Sequence[str]] = None,
52 | operaBurstID: Union[str, Sequence[str]] = None,
53 | frameCoverage: Literal["FULL", "PARTIAL"] = None,
54 | mainBandPolarization: Union[str, Sequence[str]] = None,
55 | sideBandPolarization: Union[str, Sequence[str]] = None,
56 | rangeBandwidth: Union[str, Sequence[str]] = None,
57 | productionConfiguration: Union[Literal["PR", "UR"], Sequence[Literal["PR", "UR"]]] = None,
58 | jointObservation: bool = None,
59 | dataset: Union[str, Sequence[str]] = None,
60 | collections: Union[str, Sequence[str]] = None,
61 | shortName: Union[str, Sequence[str]] = None,
62 | cmr_keywords: Union[Tuple[str, str], Sequence[Tuple[str, str]]] = None,
63 | maxResults: int = None,
64 | opts: ASFSearchOptions = None,
65 | ) -> int:
66 | # Create a kwargs dict, that's all of the 'not None' items, and merge it with opts:
67 | kwargs = locals()
68 | opts = ASFSearchOptions() if kwargs['opts'] is None else copy(opts)
69 | del kwargs['opts']
70 |
71 | kwargs = dict((k, v) for k, v in kwargs.items() if v is not None)
72 | kw_opts = ASFSearchOptions(**kwargs)
73 |
74 | # Anything passed in as kwargs has priority over anything in opts:
75 | opts.merge_args(**dict(kw_opts))
76 |
77 | preprocess_opts(opts)
78 |
79 | url = '/'.join(s.strip('/') for s in [f'https://{opts.host}', f'{INTERNAL.CMR_GRANULE_PATH}'])
80 |
81 | count = 0
82 | for query in build_subqueries(opts):
83 | translated_opts = translate_opts(query)
84 | idx = translated_opts.index(('page_size', INTERNAL.CMR_PAGE_SIZE))
85 | translated_opts[idx] = ('page_size', 0)
86 |
87 | response = get_page(session=opts.session, url=url, translated_opts=translated_opts)
88 | count += response.json()['hits']
89 | return count
90 |
--------------------------------------------------------------------------------
/tests/ASFProduct/test_ASFProduct.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | import unittest
3 |
4 | from asf_search import (
5 | ASFProduct,
6 | ASFSearchResults,
7 | ASFSearchOptions,
8 | ASFSession,
9 | FileDownloadType,
10 | )
11 | from unittest.mock import patch
12 | from shapely.geometry import shape
13 | from shapely.ops import orient
14 |
15 | import requests
16 |
17 | from asf_search.search.search_generator import as_ASFProduct
18 |
19 |
20 | def run_test_ASFProduct(product_json):
21 | if product_json is None:
22 | product = ASFProduct()
23 | geojson = product.geojson()
24 | assert geojson['type'] == 'Feature'
25 | assert geojson['geometry'] == {'coordinates': None, 'type': 'Polygon'}
26 | for val in geojson['properties'].values():
27 | assert val is None
28 |
29 | return
30 |
31 | product = as_ASFProduct(product_json, ASFSession())
32 |
33 | geojson = product.geojson()
34 |
35 | if geojson['geometry']['coordinates'] is not None:
36 | expected_shape = orient(shape(product_json['geometry']))
37 | output_shape = orient(shape(geojson['geometry']))
38 | assert output_shape.equals(expected_shape)
39 | elif product.meta != {}:
40 | assert product.properties == product_json['properties']
41 | assert product.geometry == product_json['geometry']
42 |
43 | assert product.umm == product_json['umm']
44 | assert product.meta == product_json['meta']
45 |
46 |
47 | def run_test_stack(reference, pre_processed_stack, processed_stack):
48 | product = as_ASFProduct(reference, ASFSession())
49 |
50 | with patch('asf_search.baseline_search.search') as search_mock:
51 | temp = ASFSearchResults([as_ASFProduct(prod, ASFSession()) for prod in pre_processed_stack])
52 | for idx, prod in enumerate(temp):
53 | prod.baseline = pre_processed_stack[idx]['baseline']
54 | search_mock.return_value = temp
55 | stack = product.stack()
56 |
57 | stack = [
58 | product
59 | for product in stack
60 | if product.properties['temporalBaseline'] is not None
61 | and product.properties['perpendicularBaseline'] is not None
62 | ]
63 |
64 | for idx, secondary in enumerate(stack):
65 | if idx > 0:
66 | assert (
67 | secondary.properties['temporalBaseline']
68 | >= stack[idx - 1].properties['temporalBaseline']
69 | )
70 |
71 | assert (
72 | secondary.properties['temporalBaseline']
73 | == processed_stack[idx]['properties']['temporalBaseline']
74 | )
75 | assert (
76 | secondary.properties['perpendicularBaseline']
77 | == processed_stack[idx]['properties']['perpendicularBaseline']
78 | )
79 |
80 |
81 | def run_test_product_get_stack_options(reference, options):
82 | product = as_ASFProduct(reference, ASFSession())
83 | expected_options = dict(ASFSearchOptions(**options))
84 |
85 | product_options = dict(product.get_stack_opts())
86 | assert product_options == dict(expected_options)
87 |
88 |
89 | def run_test_ASFProduct_download(reference, filename, filetype, additional_urls):
90 | product = as_ASFProduct(reference, ASFSession())
91 | product.properties['additionalUrls'] = additional_urls
92 | with patch('asf_search.ASFSession.get') as mock_get:
93 | resp = requests.Response()
94 | resp.status_code = 200
95 | mock_get.return_value = resp
96 | resp.iter_content = lambda chunk_size: []
97 |
98 | with patch('builtins.open', unittest.mock.mock_open()):
99 | if filename is not None and (
100 | (filetype == FileDownloadType.ADDITIONAL_FILES and len(additional_urls) > 1)
101 | or (filetype == FileDownloadType.ALL_FILES and len(additional_urls) > 0)
102 | ):
103 | with pytest.warns(Warning):
104 | product.download('./', filename=filename, fileType=filetype)
105 | else:
106 | product.download('./', filename=filename, fileType=filetype)
107 |
--------------------------------------------------------------------------------
/examples/hello_world.py:
--------------------------------------------------------------------------------
1 | """
2 | Simple example script showing a few basic uses of asf_search
3 | """
4 |
5 | import json
6 | import asf_search as asf
7 |
8 | print('=' * 80)
9 | print('Constants')
10 | print(f'asf.BEAMMODE.IW: {asf.BEAMMODE.IW}')
11 | print(f'asf.POLARIZATION.HH_HV: {asf.POLARIZATION.HH_HV}')
12 | print(f'asf.PLATFORM.SENTINEL1: {asf.PLATFORM.SENTINEL1}')
13 |
14 | print('=' * 80)
15 | print(f'Health check: {json.dumps(asf.health(), indent=2)}')
16 |
17 | print('=' * 80)
18 | results = asf.search(platform=[asf.PLATFORM.SENTINEL1], maxResults=2)
19 | print(f'Basic search example: {results}')
20 |
21 | print('=' * 80)
22 | results = asf.granule_search(['ALPSRS279162400', 'ALPSRS279162200'])
23 | print(f'Granule search example: {results}')
24 |
25 | print('=' * 80)
26 | results = asf.product_search(['ALAV2A279102730', 'ALAV2A279133150'])
27 | print(f'Product search example: {results}')
28 |
29 | print('=' * 80)
30 | wkt = 'POLYGON((-135.7 58.2,-136.6 58.1,-135.8 56.9,-134.6 56.1,-134.9 58.0,-135.7 58.2))'
31 | results = asf.geo_search(platform=[asf.PLATFORM.SENTINEL1], intersectsWith=wkt, maxResults=2)
32 | print(f'Geographic search example: {results}')
33 |
34 | print('=' * 80)
35 | results = asf.search(
36 | platform=[asf.PLATFORM.SENTINEL1],
37 | frame=[100, 150, (200, 205)],
38 | relativeOrbit=[100, 105, (110, 115)],
39 | processingLevel=[asf.PRODUCT_TYPE.SLC],
40 | )
41 | print(f'Path/frame/platform/product type example: {results}')
42 |
43 | print('=' * 80)
44 | results = asf.stack_from_id(
45 | 'S1B_WV_SLC__1SSV_20210126T234925_20210126T235632_025332_030462_C733-SLC'
46 | )
47 | print(f'Baseline stack search example, ephemeris-based: {results}')
48 |
49 | print('=' * 80)
50 | try:
51 | results = asf.stack_from_id('nonexistent-scene')
52 | except asf.ASFSearchError as e:
53 | print(f'Stacking a non-existent scene throws an exception: {e}')
54 |
55 | print('=' * 80)
56 | try:
57 | results = asf.stack_from_id('UA_atchaf_06309_21024_020_210401_L090_CX_01-PROJECTED')
58 | except asf.ASFBaselineError as e:
59 | print(f'Not everything can be stacked: {e}')
60 |
61 | print('=' * 80)
62 | results = asf.stack_from_id('ALPSRP279071390-RTC_HI_RES')
63 | print(f'Baseline stack search example, pre-calculated: {results}')
64 |
65 | print('=' * 80)
66 | results = results[0].stack()
67 | print(f'Baseline stacks can also be made from an ASFProduct: {results}')
68 |
69 | print('=' * 80)
70 | print(f'ASFSearchResults work like lists: {results[3:5]}')
71 |
72 | print('=' * 80)
73 | print(f'ASFSearchResults serializes to geojson: {results[3:5]}')
74 |
75 | print('=' * 80)
76 | product = results[2]
77 | print(f'ASFProduct serializes to geojson: {product}')
78 |
79 |
80 | print('=' * 80)
81 | wkt = 'POLYGON((-160 65,-150 65,-160 60,-150 60,-160 65))' # Self-intersecting bowtie
82 | try:
83 | results = asf.geo_search(platform=[asf.PLATFORM.SENTINEL1], intersectsWith=wkt)
84 | except asf.ASFWKTError as e:
85 | print(f'Exception example: {e}')
86 |
87 | print('=' * 80)
88 | print('A few more exception examples:')
89 | try:
90 | asf.search(offNadirAngle=[tuple([1])])
91 | except ValueError as e:
92 | print(f'Tuple too short: {e}')
93 | try:
94 | asf.search(offNadirAngle=[(1, 2, 3)])
95 | except ValueError as e:
96 | print(f'Tuple too long: {e}')
97 | try:
98 | asf.search(offNadirAngle=[('a', 2)])
99 | except ValueError as e:
100 | print(f'Tuple non-numeric min: {e}')
101 | try:
102 | asf.search(offNadirAngle=[(1, 'b')])
103 | except ValueError as e:
104 | print(f'Tuple non-numeric max: {e}')
105 | try:
106 | asf.search(offNadirAngle=[(float('NaN'), 2)])
107 | except ValueError as e:
108 | print(f'Tuple non-finite min: {e}')
109 | try:
110 | asf.search(offNadirAngle=[1, (float('Inf'))])
111 | except ValueError as e:
112 | print(f'Tuple non-finite max: {e}')
113 | try:
114 | asf.search(offNadirAngle=[(2, 1)])
115 | except ValueError as e:
116 | print(f'Tuple min > max: {e}')
117 | try:
118 | asf.search(offNadirAngle=[float('Inf')])
119 | except ValueError as e:
120 | print(f'Bare value non-finite: {e}')
121 | try:
122 | asf.search(offNadirAngle=['a'])
123 | except ValueError as e:
124 | print(f'Bare value non-numeric: {e}')
125 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_ASFProduct.yml:
--------------------------------------------------------------------------------
1 | empty: &empty {
2 | geometry: {},
3 | properties: {},
4 | meta:
5 | {
6 | "concept-type": ' ',
7 | "concept-id": ' ',
8 | "revision-id": 0,
9 | "native-id": ' ',
10 | "provider-id": ' ',
11 | "format": ' ',
12 | "revision-date": ' '
13 | },
14 | umm:
15 | {
16 | "TemporalExtent": {},
17 | "OrbitCalculatedSpatialDomains": [],
18 | "GranuleUR": " ",
19 | "AdditionalAttributes": [],
20 | "SpatialExtent": {},
21 | "ProviderDates": [],
22 | "CollectionReference": {},
23 | "RelatedUrls": [],
24 | "DataGranule": {},
25 | "Platforms": [],
26 | },
27 | }
28 |
29 | tests:
30 | - Test Empty ASFProduct:
31 | products: null
32 |
33 | - Test S1 ASFProduct SLC missing meta and umm:
34 | products: Fairbanks_SLC_Incomplete_Meta.yml
35 |
36 | - Test S1 ASFProduct SLC missing statevectors:
37 | products: Fairbanks_SLC_no_stateVectors.yml
38 |
39 | - Test SMAP ASFProduct:
40 | products: SMAP_response.yml
41 |
42 | - Test S1 ASFProduct:
43 | products: Fairbanks_SLC.yml
44 |
45 | - Test ASFProduct_Stack S1:
46 | product: Fairbanks_SLC.yml
47 | preprocessed_stack: Fairbanks_S1_stack_preprocessed.yml
48 | processed_stack: Fairbanks_S1_stack.yml
49 |
50 | - Test ASFProduct_Stack ERS:
51 | product: Fairbanks_L1.yml
52 | preprocessed_stack: Fairbanks_ers_stack preprocessed.yml
53 | processed_stack: Fairbanks_ers_stack.yml
54 |
55 | - Test ASFProduct_Stack S1 Incomplete:
56 | product: Fairbanks_L1.yml
57 | preprocessed_stack: Fairbanks_S1_stack_preprocessed_incomplete.yml
58 | processed_stack: Fairbanks_S1_stack incomplete.yml
59 |
60 | - Test ASFProduct-get-stack-options S1:
61 | product: Fairbanks_L1.yml
62 | options: {
63 | processingLevel: ['L0'],
64 | insarStackId: '1736495'
65 | }
66 |
67 | - Test ASFProduct-download-file default_file no additional files:
68 | product: Fairbanks_SLC.yml
69 | filename: null
70 | filetype: 1
71 | additionalUrls: []
72 |
73 | - Test ASFProduct-download-file additional_files no additional files:
74 | product: Fairbanks_SLC.yml
75 | filename: null
76 | filetype: 2
77 | additionalUrls: []
78 |
79 | - Test ASFProduct-download-file all_files no additional files:
80 | product: Fairbanks_SLC.yml
81 | filename: null
82 | filetype: 3
83 | additionalUrls: []
84 |
85 |
86 | - Test ASFProduct-download-file default_file:
87 | product: Fairbanks_SLC.yml
88 | filename: null
89 | filetype: 1
90 | additionalUrls: ['test.xml', 'test.tiff']
91 |
92 | - Test ASFProduct-download-file additional_files:
93 | product: Fairbanks_SLC.yml
94 | filename: null
95 | filetype: 2
96 | additionalUrls: ['test.xml', 'test.tiff']
97 |
98 | - Test ASFProduct-download-file all_files:
99 | product: Fairbanks_SLC.yml
100 | filename: null
101 | filetype: 3
102 | additionalUrls: ['test.xml', 'test.tiff']
103 |
104 | - Test ASFProduct-download-file default_file custom filename:
105 | product: Fairbanks_SLC.yml
106 | filename: custom_name.txt
107 | filetype: 1
108 | additionalUrls: ['test.xml', 'test.tiff']
109 |
110 | - Test ASFProduct-download-file additional_files custom filename:
111 | product: Fairbanks_SLC.yml
112 | filename: custom_name.txt
113 | filetype: 1
114 | additionalUrls: ['test.xml']
115 |
116 | - Test ASFProduct-download-file additional_files custom filename:
117 | product: Fairbanks_SLC.yml
118 | filename: custom_name.txt
119 | filetype: 2
120 | additionalUrls: ['test.xml']
121 |
122 | - Test ASFProduct-download-file multiple additional_files custom filename:
123 | product: Fairbanks_SLC.yml
124 | filename: custom_name.txt
125 | filetype: 2
126 | additionalUrls: ['test.xml', 'test.tiff']
127 |
128 | - Test ASFProduct-download-file all_files custom filename:
129 | product: Fairbanks_SLC.yml
130 | filename: custom_name.txt
131 | filetype: 3
132 | additionalUrls: ['test.xml', 'test.tiff']
133 |
134 |
135 | - Test ASFProduct-download-file all_files custom filename no additional:
136 | product: Fairbanks_SLC.yml
137 | filename: custom_name.txt
138 | filetype: 3
139 | additionalUrls: []
140 |
--------------------------------------------------------------------------------
/tests/yml_tests/Resources/RADARSAT.yml:
--------------------------------------------------------------------------------
1 | {"properties": {"beamModeType": "FN4", "browse": null, "bytes": 102067357, "centerLat": 63.9796, "centerLon": -145.5593, "faradayRotation": null, "fileID": "R1_16844_FN4_F160-L0", "flightDirection": "ASCENDING", "groupID": null, "granuleType": "R1_FINE_FRAME", "insarStackId": "1920010", "md5sum": "95bcc7a9b7b9044a806fd5426b9e2ca5", "offNadirAngle": -1.0, "orbit": 16844, "pathNumber": 296, "platform": "RADARSAT-1", "pointingAngle": null, "polarization": "HH", "processingDate": "2011-01-29T12:52:03.000Z", "processingLevel": "L0", "sceneName": "R1_16844_FN4_F160", "sensor": "SAR", "startTime": "1999-01-26T03:28:47.000Z", "stopTime": "1999-01-26T03:28:55.000Z", "url": "https://datapool.asf.alaska.edu/L0/R1/R1_16844_FN4_L0_F160.zip", "pgeVersion": null, "fileName": "R1_16844_FN4_L0_F160.zip", "frameNumber": 1280}, "meta": {"concept-type": "granule", "concept-id": "G1207619851-ASF", "revision-id": 1, "native-id": "R1_16844_FN4_F160-L0", "provider-id": "ASF", "format": "application/echo10+xml", "revision-date": "2015-11-04T12:14:30.975Z"}, "umm": {"TemporalExtent": {"RangeDateTime": {"BeginningDateTime": "1999-01-26T03:28:47.000Z", "EndingDateTime": "1999-01-26T03:28:55.000Z"}}, "OrbitCalculatedSpatialDomains": [{"OrbitNumber": 16844}], "GranuleUR": "R1_16844_FN4_F160-L0", "AdditionalAttributes": [{"Name": "FLIGHT_LINE", "Values": ["NULL"]}, {"Name": "OFF_NADIR_ANGLE", "Values": ["-1"]}, {"Name": "MD5SUM", "Values": ["95bcc7a9b7b9044a806fd5426b9e2ca5"]}, {"Name": "GRANULE_TYPE", "Values": ["R1_FINE_FRAME"]}, {"Name": "ASCENDING_DESCENDING", "Values": ["ASCENDING"]}, {"Name": "FAR_END_LAT", "Values": ["64.2358"]}, {"Name": "INSAR_STACK_SIZE", "Values": ["68"]}, {"Name": "BEAM_MODE_TYPE", "Values": ["FN4"]}, {"Name": "INSAR_BASELINE", "Values": ["0"]}, {"Name": "CENTER_FRAME_ID", "Values": ["1280"]}, {"Name": "CENTER_ESA_FRAME", "Values": ["1280"]}, {"Name": "ACQUISITION_DATE", "Values": ["1999-01-26T03:28:55Z"]}, {"Name": "MISSION_NAME", "Values": ["NULL"]}, {"Name": "CENTER_LON", "Values": ["-145.5593"]}, {"Name": "NEAR_START_LAT", "Values": ["63.7212"]}, {"Name": "BEAM_MODE", "Values": ["Fine"]}, {"Name": "BEAM_MODE_DESC", "Values": ["Radarsat-1 Fine Resolution Beam 4 SAR"]}, {"Name": "PROCESSING_TYPE", "Values": ["L0"]}, {"Name": "PROCESSING_DESCRIPTION", "Values": ["Raw signal SAR data."]}, {"Name": "FRAME_NUMBER", "Values": ["160"]}, {"Name": "PROCESSING_LEVEL", "Values": ["L0"]}, {"Name": "PROCESSING_DATE", "Values": ["2011-01-29 12:52:03.423081"]}, {"Name": "NEAR_START_LON", "Values": ["-146.0694"]}, {"Name": "DOPPLER", "Values": ["-4802.147"]}, {"Name": "FAR_START_LAT", "Values": ["63.785"]}, {"Name": "NEAR_END_LON", "Values": ["-146.2387"]}, {"Name": "PROCESSING_TYPE_DISPLAY", "Values": ["Level Zero"]}, {"Name": "POLARIZATION", "Values": ["HH"]}, {"Name": "FAR_START_LON", "Values": ["-144.8891"]}, {"Name": "THUMBNAIL_URL", "Values": ["none"]}, {"Name": "ASF_PLATFORM", "Values": ["RADARSAT-1"]}, {"Name": "INSAR_STACK_ID", "Values": ["1920010"]}, {"Name": "LOOK_DIRECTION", "Values": ["R"]}, {"Name": "PATH_NUMBER", "Values": ["296"]}, {"Name": "NEAR_END_LAT", "Values": ["64.1716"]}, {"Name": "FARADAY_ROTATION", "Values": ["NA"]}, {"Name": "FAR_END_LON", "Values": ["-145.0392"]}, {"Name": "BYTES", "Values": ["102067357"]}, {"Name": "CENTER_LAT", "Values": ["63.9796"]}], "SpatialExtent": {"HorizontalSpatialDomain": {"Geometry": {"GPolygons": [{"Boundary": {"Points": [{"Longitude": -144.889079, "Latitude": 63.785042}, {"Longitude": -145.039155, "Latitude": 64.235808}, {"Longitude": -146.238682, "Latitude": 64.171604}, {"Longitude": -146.069403, "Latitude": 63.721179}, {"Longitude": -144.889079, "Latitude": 63.785042}]}}]}}}, "ProviderDates": [{"Date": "2011-01-29T12:52:03.000Z", "Type": "Insert"}, {"Date": "2011-01-29T12:52:03.000Z", "Type": "Update"}], "CollectionReference": {"EntryTitle": "RADARSAT-1_LEVEL0"}, "RelatedUrls": [{"Format": "Not provided", "Type": "GET DATA", "URL": "https://datapool.asf.alaska.edu/L0/R1/R1_16844_FN4_L0_F160.zip"}], "DataGranule": {"DayNightFlag": "Unspecified", "Identifiers": [{"Identifier": "R1_16844_FN4_F160", "IdentifierType": "ProducerGranuleId"}], "ProductionDateTime": "2011-01-29T12:52:03.000Z", "ArchiveAndDistributionInformation": [{"Name": "Not provided", "Size": 97.33, "SizeUnit": "MB", "Format": "Not provided"}]}, "Platforms": [{"ShortName": "RADARSAT-1", "Instruments": [{"ShortName": "SAR", "ComposedOf": [{"ShortName": "FN4"}]}]}]}, "geometry": {"coordinates": [[[-144.889079, 63.785042], [-145.039155, 64.235808], [-146.238682, 64.171604], [-146.069403, 63.721179], [-144.889079, 63.785042]]], "type": "Polygon"}, "baseline": {"insarBaseline": 0.0}}
--------------------------------------------------------------------------------
/asf_search/Products/ALOS2Product.py:
--------------------------------------------------------------------------------
1 | from copy import copy
2 | from typing import Dict, Union
3 |
4 | from asf_search import ASFSearchOptions, ASFSession, ASFStackableProduct
5 | from asf_search.CMR.translate import try_parse_float, try_parse_int, try_round_float
6 | from asf_search.constants import PRODUCT_TYPE
7 |
8 | class ALOS2Product(ASFStackableProduct):
9 | """
10 | Used for ALOS Palsar and Avnir dataset products
11 |
12 | ASF Dataset Documentation Page: https://asf.alaska.edu/datasets/daac/alos-palsar/
13 | """
14 |
15 | _base_properties = {
16 | **ASFStackableProduct._base_properties,
17 | 'frameNumber': {
18 | 'path': ['AdditionalAttributes', ('Name', 'FRAME_NUMBER'), 'Values', 0],
19 | 'cast': try_parse_int,
20 | }, # Sentinel and ALOS product alt for frameNumber (ESA_FRAME)
21 | 'center_lat': {
22 | 'path': ['AdditionalAttributes', ('Name', 'CENTER_LAT'), 'Values', 0],
23 | 'cast': try_parse_float,
24 | },
25 | 'center_lon': {
26 | 'path': ['AdditionalAttributes', ('Name', 'CENTER_LON'), 'Values', 0],
27 | 'cast': try_parse_float,
28 | },'faradayRotation': {
29 | 'path': ['AdditionalAttributes', ('Name', 'FARADAY_ROTATION'), 'Values', 0],
30 | 'cast': try_parse_float,
31 | },
32 | 'offNadirAngle': {
33 | 'path': ['AdditionalAttributes', ('Name', 'OFF_NADIR_ANGLE'), 'Values', 0],
34 | 'cast': try_parse_float,
35 | },
36 | 'bytes': {
37 | 'path': ['DataGranule', 'ArchiveAndDistributionInformation', 0, 'SizeInBytes'],
38 | 'cast': try_round_float,
39 | },
40 | 'beamModeType': {'path': ['AdditionalAttributes', ('Name', 'BEAM_MODE_TYPE'), 'Values', 0]},
41 | 'polarization': {
42 | 'path': ['AdditionalAttributes', ('Name', 'POLARIZATION'), 'Values']
43 | }, # dual polarization is in list rather than a 'VV+VH' style format
44 | }
45 |
46 | baseline_type = ASFStackableProduct.BaselineCalcType.CALCULATED
47 |
48 |
49 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
50 | super().__init__(args, session)
51 | self.properties.pop('md5sum')
52 | self.properties.pop('granuleType')
53 | self.properties.pop('processingLevel')
54 |
55 | self.baseline = self.get_baseline_calc_properties()
56 |
57 | def get_baseline_calc_properties(self) -> Dict:
58 | """
59 | :returns properties required for SLC baseline stack calculations
60 | """
61 | return {'stateVectors': self.get_state_vectors()}
62 |
63 | def get_stack_opts(self, opts: ASFSearchOptions = None) -> ASFSearchOptions:
64 | """
65 | Returns the search options asf-search will use internally
66 | to build an SLC baseline stack from
67 |
68 | :param opts: additional criteria for limiting
69 | :returns ASFSearchOptions used for build Sentinel-1 SLC Stack
70 | """
71 | stack_opts = ASFSearchOptions() if opts is None else copy(opts)
72 |
73 | stack_opts.beamMode = [self.properties['beamModeType']]
74 | stack_opts.flightDirection = self.properties['flightDirection']
75 | stack_opts.relativeOrbit = [int(self.properties['pathNumber'])] # path
76 | stack_opts.dataset = 'ALOS-2'
77 |
78 | if any(e in ['HH', 'HH+HV'] for e in self.properties['polarization']):
79 | stack_opts.polarization = ['HH', 'HH+HV']
80 | else:
81 | stack_opts.polarization = ['VV', 'VV+VH']
82 |
83 | stack_opts.intersectsWith = self.centroid().wkt
84 |
85 | return stack_opts
86 |
87 | def get_state_vectors(self) -> Dict:
88 | """
89 | Used in spatio-temporal perpendicular baseline calculations for non-pre-calculated stacks
90 |
91 | :returns dictionary of pre/post positions, velocities, and times"""
92 |
93 | position = [
94 | float(val)
95 | for val in self.umm_get(
96 | self.umm, 'AdditionalAttributes', ('Name', 'SV_POSITION'), 'Values'
97 | )
98 | ]
99 | velocity = [
100 | float(val)
101 | for val in self.umm_get(
102 | self.umm, 'AdditionalAttributes', ('Name', 'SV_VELOCITY'), 'Values'
103 | )
104 | ]
105 |
106 | return dict(position=position, velocity=velocity)
107 |
108 | def is_valid_reference(self):
109 | return self.has_baseline()
110 |
--------------------------------------------------------------------------------
/asf_search/export/metalink.py:
--------------------------------------------------------------------------------
1 | import inspect
2 | import os
3 | from types import GeneratorType
4 | from urllib import parse
5 | import xml.etree.ElementTree as ETree
6 |
7 | from asf_search import ASF_LOGGER
8 | from asf_search.export.export_translators import ASFSearchResults_to_properties_list
9 |
10 |
11 | def results_to_metalink(results):
12 | ASF_LOGGER.info('Started translating results to metalink format')
13 |
14 | if inspect.isgeneratorfunction(results) or isinstance(results, GeneratorType):
15 | return MetalinkStreamArray(results)
16 |
17 | return MetalinkStreamArray([results])
18 |
19 |
20 | class MetalinkStreamArray(list):
21 | def __init__(self, results):
22 | self.pages = results
23 | self.len = 1
24 | self.header = (
25 | ''
26 | '\n'
27 | 'Alaska Satellite Facilityhttp://www.asf.alaska.edu/\n' # noqa F401
28 | ''
29 | )
30 |
31 | self.footer = '\n\n'
32 |
33 | def get_additional_fields(self, product):
34 | return {}
35 |
36 | def __iter__(self):
37 | return self.streamPages()
38 |
39 | def __len__(self):
40 | return self.len
41 |
42 | def streamPages(self):
43 | yield self.header
44 |
45 | completed = False
46 | for page_idx, page in enumerate(self.pages):
47 | ASF_LOGGER.info(f'Streaming {len(page)} products from page {page_idx}')
48 | completed = page.searchComplete
49 |
50 | properties_list = ASFSearchResults_to_properties_list(page, self.get_additional_fields)
51 | yield from [self.getItem(p) for p in properties_list]
52 |
53 | if not completed:
54 | ASF_LOGGER.warn('Failed to download all results from CMR')
55 |
56 | yield self.footer
57 |
58 | ASF_LOGGER.info(f'Finished streaming {self.getOutputType()} results')
59 |
60 | def getOutputType(self) -> str:
61 | return 'metalink'
62 |
63 | def getItem(self, p):
64 | file = ETree.Element('file', attrib={'name': p['fileName']})
65 | resources = ETree.Element('resources')
66 |
67 | url = ETree.Element('url', attrib={'type': 'http'})
68 | url.text = p['url']
69 | resources.append(url)
70 | file.append(resources)
71 |
72 | if p.get('md5sum') and p.get('md5sum') != 'NA':
73 | verification = ETree.Element('verification')
74 | if isinstance(p.get('md5sum'), dict):
75 | a = parse.urlparse(p['url'])
76 | file_name = os.path.basename(a.path)
77 | md5_entry = p['md5sum'].get(file_name)
78 | h = ETree.Element('hash', {'type': 'md5'})
79 | if md5_entry is not None:
80 | h.text=md5_entry
81 | verification.append(h)
82 | else:
83 | h = ETree.Element('hash', {'type': 'md5'})
84 | h.text = p['md5sum']
85 | verification.append(h)
86 | file.append(verification)
87 |
88 | if p['bytes'] and p['bytes'] != 'NA':
89 | size = ETree.Element('size')
90 | if isinstance(p.get('bytes'), dict):
91 | a = parse.urlparse(p['url'])
92 | file_name = os.path.basename(a.path)
93 | bytes_entry = p['bytes'].get(file_name)
94 | if bytes_entry is not None:
95 | size.text=str(bytes_entry['bytes'])
96 | else:
97 | size.text = str(p['bytes'])
98 | else:
99 | size.text = str(p['bytes'])
100 | file.append(size)
101 |
102 | return '\n' + (8 * ' ') + ETree.tostring(file, encoding='unicode')
103 |
104 | def indent(self, elem, level=0):
105 | # Only Python 3.9+ has a built-in indent function for element tree.
106 | # https://stackoverflow.com/a/33956544
107 | i = '\n' + level * ' '
108 | if len(elem):
109 | if not elem.text or not elem.text.strip():
110 | elem.text = i + ' '
111 | if not elem.tail or not elem.tail.strip():
112 | elem.tail = i
113 | for elem in elem:
114 | self.indent(elem, level + 1)
115 | if not elem.tail or not elem.tail.strip():
116 | elem.tail = i
117 | else:
118 | if level and (not elem.tail or not elem.tail.strip()):
119 | elem.tail = i
120 |
--------------------------------------------------------------------------------
/asf_search/baseline/stack.py:
--------------------------------------------------------------------------------
1 | from asf_search import ASFProduct, ASFStackableProduct, ASFSearchResults
2 | from typing import Tuple, List, Union
3 | import pytz
4 | from .calc import calculate_perpendicular_baselines
5 |
6 | try:
7 | from ciso8601 import parse_datetime
8 | except ImportError:
9 | from dateutil.parser import parse as parse_datetime
10 |
11 | def get_baseline_from_stack(
12 | reference: ASFProduct, stack: ASFSearchResults
13 | ) -> Tuple[ASFSearchResults, List[dict]]:
14 | warnings = []
15 |
16 | if len(stack) == 0:
17 | raise ValueError("No products found matching stack parameters")
18 |
19 | stack = [
20 | product
21 | for product in stack
22 | if not product.properties.get("processingLevel", '').lower().startswith("metadata") and
23 | product.baseline is not None
24 | ]
25 | reference, stack, reference_warnings = check_reference(reference, stack)
26 |
27 | if reference_warnings is not None:
28 | warnings.append(reference_warnings)
29 |
30 | stack = calculate_temporal_baselines(reference, stack)
31 |
32 | if reference.baseline_type == ASFStackableProduct.BaselineCalcType.PRE_CALCULATED:
33 | stack = offset_perpendicular_baselines(reference, stack)
34 | else:
35 | stack = calculate_perpendicular_baselines(
36 | reference.properties["sceneName"], stack
37 | )
38 |
39 | missing_state_vectors = _count_missing_state_vectors(stack)
40 | if missing_state_vectors > 0:
41 | warnings.append(
42 | {
43 | "MISSING STATE VECTORS":
44 | f'{missing_state_vectors} scenes in stack missing State Vectors, '
45 | 'perpendicular baseline not calculated for these scenes'
46 | }
47 | )
48 |
49 | return ASFSearchResults(stack), warnings
50 |
51 |
52 | def _count_missing_state_vectors(stack) -> int:
53 | return len([scene for scene in stack if scene.baseline.get("noStateVectors")])
54 |
55 |
56 | def find_new_reference(stack: ASFSearchResults) -> Union[ASFProduct, None]:
57 | for product in stack:
58 | if product.is_valid_reference():
59 | return product
60 | return None
61 |
62 |
63 | def check_reference(reference: ASFProduct, stack: ASFSearchResults):
64 | warnings = None
65 | if reference.properties["sceneName"] not in [
66 | product.properties["sceneName"] for product in stack
67 | ]: # Somehow the reference we built the stack from is missing?! Just pick one
68 | reference = stack[0]
69 | warnings = [
70 | {
71 | 'NEW_REFERENCE':
72 | 'A new reference scene had to be selected in order to calculate baseline values.'
73 | }
74 | ]
75 |
76 | # non-s1 is_valid_reference raise an error, while we try to find a valid s1 reference
77 | # do we want this behaviour for pre-calc stacks?
78 | if not reference.is_valid_reference():
79 | reference = find_new_reference(stack)
80 | if reference is None:
81 | raise ValueError(
82 | "No valid state vectors on any scenes in stack, this is fatal"
83 | )
84 |
85 | return reference, stack, warnings
86 |
87 |
88 | def calculate_temporal_baselines(reference: ASFProduct, stack: ASFSearchResults):
89 | """
90 | Calculates temporal baselines for a stack of products based on a reference scene
91 | and injects those values into the stack.
92 |
93 | :param reference: The reference product from which to calculate temporal baselines.
94 | :param stack: The stack to operate on.
95 | :return: None, as the operation occurs in-place on the stack provided.
96 | """
97 | reference_time = parse_datetime(reference.properties["startTime"])
98 | if reference_time.tzinfo is None:
99 | reference_time = pytz.utc.localize(reference_time)
100 |
101 | for secondary in stack:
102 | secondary_time = parse_datetime(secondary.properties["startTime"])
103 | if secondary_time.tzinfo is None:
104 | secondary_time = pytz.utc.localize(secondary_time)
105 | secondary.properties["temporalBaseline"] = (
106 | secondary_time.date() - reference_time.date()
107 | ).days
108 |
109 | return stack
110 |
111 |
112 | def offset_perpendicular_baselines(reference: ASFProduct, stack: ASFSearchResults):
113 | reference_offset = float(reference.baseline["insarBaseline"])
114 |
115 | for product in stack:
116 | product.properties["perpendicularBaseline"] = round(
117 | float(product.baseline["insarBaseline"]) - reference_offset
118 | )
119 |
120 | return stack
121 |
--------------------------------------------------------------------------------
/asf_search/Products/S1BurstProduct.py:
--------------------------------------------------------------------------------
1 | import copy
2 | from typing import Dict, Union
3 | from asf_search import ASFSearchOptions, ASFSession
4 | from asf_search.Products import S1Product
5 | from asf_search.CMR.translate import try_parse_date
6 | from asf_search.CMR.translate import try_parse_int
7 | from asf_search.constants import PRODUCT_TYPE
8 |
9 |
10 | class S1BurstProduct(S1Product):
11 | """
12 | S1Product Subclass made specifically for Sentinel-1 SLC-BURST products
13 |
14 | Key features/properties:
15 | - `properties['burst']` contains SLC-BURST Specific fields
16 | such as `fullBurstID` and `burstIndex`
17 | - `properties['additionalUrls']` contains BURST-XML url
18 | - SLC-BURST specific stacking params
19 |
20 | ASF Dataset Documentation Page:
21 | https://asf.alaska.edu/datasets/data-sets/derived-data-sets/sentinel-1-bursts/
22 | """
23 |
24 | _base_properties = {
25 | **S1Product._base_properties,
26 | 'bytes': {'path': ['AdditionalAttributes', ('Name', 'BYTE_LENGTH'), 'Values', 0]},
27 | 'absoluteBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_ABSOLUTE'), 'Values', 0], 'cast': try_parse_int},
28 | 'relativeBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_RELATIVE'), 'Values', 0], 'cast': try_parse_int},
29 | 'fullBurstID': {'path': ['AdditionalAttributes', ('Name', 'BURST_ID_FULL'), 'Values', 0]},
30 | 'burstIndex': {'path': ['AdditionalAttributes', ('Name', 'BURST_INDEX'), 'Values', 0], 'cast': try_parse_int},
31 | 'samplesPerBurst': {'path': ['AdditionalAttributes', ('Name', 'SAMPLES_PER_BURST'), 'Values', 0], 'cast': try_parse_int},
32 | 'subswath': {'path': ['AdditionalAttributes', ('Name', 'SUBSWATH_NAME'), 'Values', 0]},
33 | 'azimuthTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_TIME'), 'Values', 0], 'cast': try_parse_date},
34 | 'azimuthAnxTime': {'path': ['AdditionalAttributes', ('Name', 'AZIMUTH_ANX_TIME'), 'Values', 0]},
35 | }
36 |
37 | def __init__(self, args: Dict = {}, session: ASFSession = ASFSession()):
38 | super().__init__(args, session)
39 | self.properties["sceneName"] = self.properties["fileID"]
40 | self.properties.pop('frameNumber', None)
41 | # Gathers burst properties into `burst` specific dict
42 | # rather than properties dict to limit breaking changes
43 | self.properties["burst"] = {
44 | "absoluteBurstID": self.properties.pop("absoluteBurstID"),
45 | "relativeBurstID": self.properties.pop("relativeBurstID"),
46 | "fullBurstID": self.properties.pop("fullBurstID"),
47 | "burstIndex": self.properties.pop("burstIndex"),
48 | "samplesPerBurst": self.properties.pop("samplesPerBurst"),
49 | "subswath": self.properties.pop("subswath"),
50 | "azimuthTime": self.properties.pop("azimuthTime"),
51 | "azimuthAnxTime": self.properties.pop("azimuthAnxTime"),
52 | }
53 |
54 | urls = self.umm_get(
55 | self.umm, "RelatedUrls", ("Type", [("USE SERVICE API", "URL")]), 0
56 | )
57 | if urls is not None:
58 | self.properties["url"] = urls[0]
59 | self.properties["fileName"] = (
60 | self.properties["fileID"] + "." + urls[0].split(".")[-1]
61 | )
62 | self.properties["additionalUrls"] = [urls[1]] # xml-metadata url
63 |
64 | def get_stack_opts(self, opts: ASFSearchOptions = None):
65 | """
66 | Returns the search options asf-search will use internally
67 | to build an SLC-BURST baseline stack from
68 |
69 | :param opts: additional criteria for limiting
70 | :returns ASFSearchOptions used for build Sentinel-1 SLC-BURST Stack
71 | """
72 | stack_opts = ASFSearchOptions() if opts is None else copy(opts)
73 |
74 | stack_opts.processingLevel = self.get_default_baseline_product_type()
75 | stack_opts.fullBurstID = self.properties["burst"]["fullBurstID"]
76 | stack_opts.polarization = [self.properties["polarization"]]
77 | return stack_opts
78 |
79 | def _get_additional_filenames_and_urls(self, default_filename: str = None):
80 | # Burst XML filenames are just numbers, this makes it more indentifiable
81 | if default_filename is None:
82 | default_filename = self.properties["fileName"]
83 |
84 | file_name = f"{'.'.join(default_filename.split('.')[:-1])}.xml"
85 |
86 | return [(file_name, self.properties["additionalUrls"][0])]
87 |
88 | @staticmethod
89 | def get_default_baseline_product_type() -> Union[str, None]:
90 | """
91 | Returns the product type to search for when building a baseline stack.
92 | """
93 | return PRODUCT_TYPE.BURST
94 |
--------------------------------------------------------------------------------
/asf_search/Pair.py:
--------------------------------------------------------------------------------
1 | import importlib.util
2 | import math
3 |
4 | from .ASFProduct import ASFProduct
5 | from .baseline import calculate_perpendicular_baselines
6 | from .exceptions import CoherenceEstimationError
7 | import pytz
8 |
9 | _COHERENCE_OPT_DEPS = ['zarr', 's3fs', 'rioxarray', 'xarray']
10 | try:
11 | for spec in _COHERENCE_OPT_DEPS:
12 | if importlib.util.find_spec(spec) is None:
13 | raise ImportError
14 |
15 | import fsspec
16 | import xarray as xr
17 |
18 | except ImportError:
19 | fsspec = None
20 | xr = None
21 |
22 | try:
23 | from ciso8601 import parse_datetime
24 | except ImportError:
25 | from dateutil.parser import parse as parse_datetime
26 |
27 |
28 | class Pair:
29 | """
30 | A Pair is comprised of a reference scene and a secondary scene. These scenes typically intersect geographically,
31 | but that is not a requirement. When a pair is created, its perpendicular and temporal baselines are calculated
32 | and stored in the self.perpendicular_baseline and self.temporal_baseline member variables.
33 |
34 | Two pairs are equivalent if they have matching reference and secondary dates
35 | """
36 | def __init__(self, ref: ASFProduct, sec: ASFProduct):
37 | self.ref = ref
38 | self.sec = sec
39 | self.id = (ref.properties['sceneName'], sec.properties['sceneName'])
40 |
41 | self.perpendicular_baseline = calculate_perpendicular_baselines(
42 | ref.properties['sceneName'],
43 | [sec, ref])[0].properties['perpendicularBaseline']
44 |
45 | self.ref_time = parse_datetime(ref.properties["startTime"])
46 | if self.ref_time.tzinfo is None:
47 | self.ref_time = pytz.utc.localize(self.ref_time)
48 | self.sec_time = parse_datetime(sec.properties["startTime"])
49 | if self.sec_time.tzinfo is None:
50 | self.sec_time = pytz.utc.localize(self.sec_time)
51 |
52 | self.temporal_baseline = self.sec_time.date() - self.ref_time.date()
53 |
54 | def __repr__(self) -> str:
55 | return f"Pair({self.id[0]}, {self.id[1]})"
56 |
57 | def __eq__(self, other):
58 | if not isinstance(other, Pair):
59 | return NotImplemented
60 | return self.id == other.id
61 |
62 | def __hash__(self) -> int:
63 | return hash(self.id)
64 |
65 | def estimate_s1_mean_coherence(self) -> float:
66 | '''
67 | Estimates mean coherence for a Pair of Sentinel-1 scenes or bursts using the 11367x4367 overview of the 2019-2020
68 | VV COH data from the Global Seasonal Sentinel-1 Interferometric Coherence and Backscatter Dataset:
69 | https://asf.alaska.edu/datasets/daac/global-seasonal-sentinel-1-interferometric-coherence-and-backscatter-dataset/
70 |
71 | To support effecient in-place subsetting and access, the VV COH data has been saved to a public Zarr Store in AWS S3:
72 | s3://asf-search-coh/global_coh_100ppd_11367x4367
73 |
74 | Returns:
75 | '''
76 | if xr is None or fsspec is None:
77 | raise ImportError(
78 | 'The `estimate_s1_mean_coherence()` method requires the optional asf-search '
79 | f'dependencies {_COHERENCE_OPT_DEPS}, '
80 | 'but they could not be found in the current python environment. '
81 | 'Enable this method by including the appropriate pip or conda install. '
82 | 'Ex: `python -m pip install asf-search[coherence]`'
83 | )
84 |
85 | month = parse_datetime(self.ref.properties["startTime"]).month
86 | if month in [12, 1, 2]:
87 | season = 'winter'
88 | elif month in [3, 4, 5]:
89 | season = 'spring'
90 | elif month in [6, 7, 8]:
91 | season = 'summer'
92 | elif month in [9, 10, 11]:
93 | season = 'fall'
94 |
95 | temporal = math.ceil(self.temporal_baseline.days / 6) * 6
96 | if temporal > 48:
97 | msg = (f"""Coherence dataset includes temporal baselines up to 48 days.
98 | Temporal baseline: {self.temporal_baseline.days} days""")
99 | raise CoherenceEstimationError(msg)
100 |
101 | uri = f"s3://asf-search-coh/global_coh_100ppd_11367x4367_Zarrv2/Global_{season}_vv_COH{temporal}_100ppd.zarr"
102 | coords = self.ref.geometry['coordinates'][0]
103 | lons, lats = zip(*coords)
104 | minx, miny, maxx, maxy = min(lons), min(lats), max(lons), max(lats)
105 |
106 | ds = xr.open_zarr(
107 | fsspec.get_mapper(uri, s3={'anon': True}),
108 | consolidated=False
109 | )
110 | ds = ds.rio.write_crs("EPSG:4326", inplace=False)
111 | subset = ds.rio.clip_box(minx=minx, miny=miny, maxx=maxx, maxy=maxy)
112 | return subset.coherence.mean().compute().item()
113 |
--------------------------------------------------------------------------------
/asf_search/download/download.py:
--------------------------------------------------------------------------------
1 | from typing import Iterable
2 | from multiprocessing import Pool
3 | import os.path
4 | from urllib import parse
5 | from requests import Response
6 | from requests.exceptions import HTTPError
7 | import warnings
8 |
9 | from asf_search.exceptions import ASFAuthenticationError, ASFDownloadError
10 | from asf_search import ASFSession
11 | from tenacity import retry, stop_after_delay, retry_if_result, wait_fixed
12 |
13 | try:
14 | from remotezip import RemoteZip
15 | except ImportError:
16 | RemoteZip = None
17 |
18 |
19 | def _download_url(arg):
20 | url, path, session = arg
21 | download_url(url=url, path=path, session=session)
22 |
23 |
24 | def download_urls(urls: Iterable[str], path: str, session: ASFSession = None, processes: int = 1):
25 | """
26 | Downloads all products from the specified URLs to the specified location.
27 |
28 | :param urls: List of URLs from which to download
29 | :param path: Local path in which to save the product
30 | :param session: The session to use, in most cases should be authenticated beforehand
31 | :param processes: Number of download processes to use. Defaults to 1 (i.e. sequential download)
32 | :return:
33 | """
34 | if session is None:
35 | session = ASFSession()
36 |
37 | if processes <= 1:
38 | for url in urls:
39 | download_url(url=url, path=path, session=session)
40 | else:
41 | pool = Pool(processes=processes)
42 | args = [(url, path, session) for url in urls]
43 | pool.map(_download_url, args)
44 | pool.close()
45 | pool.join()
46 |
47 |
48 | def download_url(url: str, path: str, filename: str = None, session: ASFSession = None) -> None:
49 | """
50 | Downloads a product from the specified URL to the specified location and (optional) filename.
51 |
52 | :param url: URL from which to download
53 | :param path: Local path in which to save the product
54 | :param filename: Optional filename to be used, extracted from the URL by default
55 | :param session: The session to use, in most cases should be authenticated beforehand
56 | :return:
57 | """
58 |
59 | if filename is None:
60 | filename = os.path.split(parse.urlparse(url).path)[1]
61 |
62 | if not os.path.isdir(path):
63 | raise ASFDownloadError(f'Error downloading {url}: directory not found: {path}')
64 |
65 | if os.path.isfile(os.path.join(path, filename)):
66 | warnings.warn(f'File already exists, skipping download: {os.path.join(path, filename)}')
67 | return
68 |
69 | if session is None:
70 | session = ASFSession()
71 |
72 | response = _try_get_response(session=session, url=url)
73 |
74 | with open(os.path.join(path, filename), 'wb') as f:
75 | for chunk in response.iter_content(chunk_size=8192):
76 | f.write(chunk)
77 |
78 |
79 | def remotezip(url: str, session: ASFSession) -> 'RemoteZip': # type: ignore # noqa: F821
80 | """
81 | :param url: the url to the zip product
82 | :param session: the authenticated ASFSession to read and download from the zip file
83 | """
84 | if RemoteZip is None:
85 | raise ImportError(
86 | 'Could not find remotezip package in current python environment. '
87 | '"remotezip" is an optional dependency of asf-search required '
88 | 'for the `remotezip()` method. '
89 | 'Enable by including the appropriate pip or conda install. '
90 | 'Ex: `python3 -m pip install asf-search[extras]`'
91 | )
92 |
93 | session.hooks['response'].append(strip_auth_if_aws)
94 | return RemoteZip(url, session=session)
95 |
96 |
97 | def strip_auth_if_aws(r, *args, **kwargs):
98 | if (
99 | 300 <= r.status_code <= 399
100 | and 'amazonaws.com' in parse.urlparse(r.headers['location']).netloc
101 | ):
102 | location = r.headers['location']
103 | r.headers.clear()
104 | r.headers['location'] = location
105 |
106 |
107 | # if it's an unprocessed burst product it'll return a 202 and we'll have to query again
108 | # https://sentinel1-burst-docs.asf.alaska.edu/
109 | def _is_burst_processing(response: Response):
110 | return response.status_code == 202
111 |
112 |
113 | @retry(
114 | reraise=True,
115 | retry=retry_if_result(_is_burst_processing),
116 | wait=wait_fixed(1),
117 | stop=stop_after_delay(90),
118 | )
119 | def _try_get_response(session: ASFSession, url: str):
120 | response = session.get(url, stream=True, hooks={'response': strip_auth_if_aws})
121 |
122 | try:
123 | response.raise_for_status()
124 | except HTTPError as e:
125 | if 400 <= response.status_code <= 499:
126 | raise ASFAuthenticationError(f'HTTP {e.response.status_code}: {e.response.text}')
127 |
128 | raise e
129 |
130 | return response
131 |
--------------------------------------------------------------------------------
/asf_search/CMR/field_map.py:
--------------------------------------------------------------------------------
1 | field_map = {
2 | # API parameter CMR keyword CMR format strings
3 | 'absoluteOrbit': {'key': 'orbit_number', 'fmt': '{0}'},
4 | 'asfFrame': {'key': 'attribute[]', 'fmt': 'int,FRAME_NUMBER,{0}'},
5 | 'maxBaselinePerp': {'key': 'attribute[]', 'fmt': 'float,INSAR_BASELINE,,{0}'},
6 | 'minBaselinePerp': {'key': 'attribute[]', 'fmt': 'float,INSAR_BASELINE,{0},'},
7 | 'bbox': {'key': 'bounding_box', 'fmt': '{0}'},
8 | 'beamMode': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE,{0}'},
9 | 'beamSwath': {'key': 'attribute[]', 'fmt': 'string,BEAM_MODE_TYPE,{0}'},
10 | 'campaign': {'key': 'attribute[]', 'fmt': 'string,MISSION_NAME,{0}'},
11 | 'circle': {'key': 'circle', 'fmt': '{0}'},
12 | 'maxDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,,{0}'},
13 | 'minDoppler': {'key': 'attribute[]', 'fmt': 'float,DOPPLER,{0},'},
14 | 'maxFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,,{0}'}, # noqa F401
15 | 'minFaradayRotation': {'key': 'attribute[]', 'fmt': 'float,FARADAY_ROTATION,{0},'}, # noqa F401
16 | 'flightDirection': {'key': 'attribute[]', 'fmt': 'string,ASCENDING_DESCENDING,{0}'}, # noqa F401
17 | 'flightLine': {'key': 'attribute[]', 'fmt': 'string,FLIGHT_LINE,{0}'},
18 | 'frame': {'key': 'attribute[]', 'fmt': 'int,CENTER_ESA_FRAME,{0}'},
19 | 'granule_list': {'key': 'readable_granule_name[]', 'fmt': '{0}'},
20 | 'groupID': {'key': 'attribute[]', 'fmt': 'string,GROUP_ID,{0}'},
21 | 'insarStackId': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_ID,{0}'},
22 | 'linestring': {'key': 'line', 'fmt': '{0}'},
23 | 'lookDirection': {'key': 'attribute[]', 'fmt': 'string,LOOK_DIRECTION,{0}'},
24 | 'maxInsarStackSize': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_SIZE,,{0}'},
25 | 'minInsarStackSize': {'key': 'attribute[]', 'fmt': 'int,INSAR_STACK_SIZE,{0},'},
26 | 'instrument': {'key': 'instrument[]', 'fmt': '{0}'},
27 | 'offNadirAngle': {'key': 'attribute[]', 'fmt': 'float,OFF_NADIR_ANGLE,{0}'},
28 | 'platform': {'key': 'platform[]', 'fmt': '{0}'},
29 | 'polarization': {'key': 'attribute[]', 'fmt': 'string,POLARIZATION,{0}'},
30 | 'point': {'key': 'point', 'fmt': '{0}'},
31 | 'polygon': {'key': 'polygon', 'fmt': '{0}'},
32 | 'processingDate': {'key': 'updated_since', 'fmt': '{0}'},
33 | 'processingLevel': {'key': 'attribute[]', 'fmt': 'string,PROCESSING_TYPE,{0}'},
34 | 'product_list': {'key': 'granule_ur[]', 'fmt': '{0}'},
35 | 'provider': {'key': 'provider', 'fmt': '{0}'},
36 | 'relativeOrbit': {'key': 'attribute[]', 'fmt': 'int,PATH_NUMBER,{0}'},
37 | 'temporal': {'key': 'temporal', 'fmt': '{0}'},
38 | 'collections': {'key': 'echo_collection_id[]', 'fmt': '{0}'},
39 | 'shortName': {'key': 'shortName', 'fmt': '{0}'},
40 | 'temporalBaselineDays': {'key': 'attribute[]', 'fmt': 'int,TEMPORAL_BASELINE_DAYS,{0}'}, # noqa F401
41 | # SLC BURST fields
42 | 'absoluteBurstID': {'key': 'attribute[]', 'fmt': 'int,BURST_ID_ABSOLUTE,{0}'},
43 | 'relativeBurstID': {'key': 'attribute[]', 'fmt': 'int,BURST_ID_RELATIVE,{0}'},
44 | 'fullBurstID': {'key': 'attribute[]', 'fmt': 'string,BURST_ID_FULL,{0}'},
45 | # OPERA-S1 field
46 | 'operaBurstID': {'key': 'attribute[]', 'fmt': 'string,OPERA_BURST_ID,{0}'},
47 | # NISAR fields
48 | 'mainBandPolarization': {'key': 'attribute[]', 'fmt': 'string,FREQUENCY_A_POLARIZATION_CONCAT,{0}'},
49 | 'sideBandPolarization': {'key': 'attribute[]', 'fmt': 'string,FREQUENCY_B_POLARIZATION_CONCAT,{0}'},
50 | 'frameCoverage': {'key': 'attribute[]', 'fmt': 'string,FULL_FRAME,{0}'},
51 | 'jointObservation': {'key': 'attribute[]', 'fmt': 'string,JOINT_OBSERVATION,{0}'},
52 | 'rangeBandwidth': {'key': 'attribute[]', 'fmt': 'string,RANGE_BANDWIDTH_CONCAT,{0}'},
53 | 'productionConfiguration': {'key': 'attribute[]', 'fmt': 'string,PRODUCTION_PIPELINE,{0}'},
54 | }
55 |
--------------------------------------------------------------------------------
/tests/BaselineSearch/test_baseline_search.py:
--------------------------------------------------------------------------------
1 | from copy import deepcopy
2 | from unittest.mock import patch
3 | from asf_search.ASFSearchOptions import ASFSearchOptions
4 | from asf_search.Products import ARIAS1GUNWProduct
5 | from asf_search.exceptions import ASFBaselineError, ASFSearchError
6 | from asf_search.ASFSearchResults import ASFSearchResults
7 | from asf_search import ASFSession, DATASET, BEAMMODE, POLARIZATION, PRODUCT_TYPE
8 | from asf_search.search.baseline_search import stack_from_id, stack_from_product
9 | from asf_search.baseline.stack import calculate_temporal_baselines
10 | import pytest
11 |
12 | from asf_search.search.search_generator import as_ASFProduct
13 | from asf_enumeration import aria_s1_gunw
14 |
15 | def run_test_get_preprocessed_stack_params(product):
16 | reference = as_ASFProduct(product, ASFSession())
17 | params = reference.get_stack_opts()
18 |
19 | original_properties = product['properties']
20 |
21 | assert params.processingLevel == [reference.get_default_baseline_product_type()]
22 | assert params.insarStackId == original_properties['insarStackId']
23 | assert len(dict(params)) == 2
24 |
25 |
26 | def run_test_get_unprocessed_stack_params(product):
27 | reference = as_ASFProduct(product, ASFSession())
28 | params = reference.get_stack_opts()
29 |
30 | original_properties = product['properties']
31 | assert original_properties['polarization'] in params.polarization
32 |
33 | if reference.properties['processingLevel'] == 'BURST':
34 | assert [reference.properties['polarization']] == params.polarization
35 | assert [reference.properties['burst']['fullBurstID']] == params.fullBurstID
36 | elif reference.properties['sceneName'].startswith('S1-GUNW'):
37 | assert params.platform == ['SA', 'SB', 'SC']
38 | assert DATASET.SENTINEL1 in params.dataset
39 | assert params.processingLevel == [PRODUCT_TYPE.SLC]
40 | assert params.beamMode == [BEAMMODE.IW]
41 | assert params.polarization == [POLARIZATION.VV, POLARIZATION.VV_VH]
42 | assert params.flightDirection.upper() == reference.properties['flightDirection'].upper()
43 | assert params.relativeOrbit == [reference.properties['pathNumber']]
44 | else:
45 | assert (
46 | ['VV', 'VV+VH'] == params.polarization
47 | if reference.properties['polarization'] in ['VV', 'VV+VH']
48 | else ['HH', 'HH+HV'] == params.polarization
49 | )
50 | assert len(dict(params)) == 7
51 |
52 |
53 | def run_get_stack_opts_invalid_insarStackId(product):
54 | invalid_reference = as_ASFProduct(product, ASFSession())
55 |
56 | invalid_reference.properties['insarStackId'] = '0'
57 |
58 | with pytest.raises(ASFBaselineError):
59 | invalid_reference.get_stack_opts()
60 |
61 |
62 | def run_test_calc_temporal_baselines(reference, stack):
63 | reference = as_ASFProduct(reference, ASFSession())
64 | stack = ASFSearchResults([as_ASFProduct(product, ASFSession()) for product in stack])
65 | stackLength = len(stack)
66 |
67 | calculate_temporal_baselines(reference, stack)
68 |
69 | assert len(stack) == stackLength
70 | for secondary in stack:
71 | assert 'temporalBaseline' in secondary.properties
72 |
73 |
74 | def run_test_stack_from_product(reference, stack):
75 | reference = as_ASFProduct(reference, ASFSession())
76 |
77 | with patch('asf_search.baseline_search.search') as search_mock:
78 | search_mock.return_value = ASFSearchResults(
79 | [as_ASFProduct(product, ASFSession()) for product in stack]
80 | )
81 |
82 | stack = stack_from_product(reference)
83 |
84 | for idx, secondary in enumerate(stack):
85 | if idx > 0:
86 | assert (
87 | secondary.properties['temporalBaseline']
88 | >= stack[idx - 1].properties['temporalBaseline']
89 | )
90 |
91 |
92 | def run_test_stack_from_id(stack_id: str, reference, stack, opts: ASFSearchOptions):
93 | temp = deepcopy(stack)
94 |
95 | with patch('asf_search.baseline_search.product_search') as mock_product_search:
96 | mock_product_search.return_value = ASFSearchResults(
97 | [as_ASFProduct(product, ASFSession()) for product in stack]
98 | )
99 |
100 | if not stack_id:
101 | with pytest.raises(ASFSearchError):
102 | stack_from_id(stack_id)
103 | else:
104 | with patch('asf_search.baseline_search.search') as search_mock:
105 | search_mock.return_value = ASFSearchResults(
106 | [as_ASFProduct(product, ASFSession()) for product in temp]
107 | )
108 |
109 | returned_stack = stack_from_id(stack_id, opts=opts)
110 | stack_files = set(x['properties']['fileID'] for x in stack)
111 | filtered_stack = [x for x in returned_stack if x.properties['fileID'] in stack_files]
112 | for idx, secondary in enumerate(filtered_stack):
113 | if idx > 0:
114 | assert (
115 | secondary.properties['temporalBaseline']
116 | >= stack[idx - 1]['properties']['temporalBaseline']
117 | )
--------------------------------------------------------------------------------
/asf_search/ASFSearchResults.py:
--------------------------------------------------------------------------------
1 | from collections import UserList
2 | from multiprocessing import Pool
3 | import json
4 | from typing import List
5 | from asf_search import ASFSession, ASFSearchOptions
6 | from asf_search.download.file_download_type import FileDownloadType
7 | from asf_search.exceptions import ASFSearchError
8 |
9 | from asf_search import ASF_LOGGER
10 | from asf_search.export.csv import results_to_csv
11 | from asf_search.export.jsonlite import results_to_jsonlite
12 | from asf_search.export.jsonlite2 import results_to_jsonlite2
13 | from asf_search.export.json import results_to_json
14 | from asf_search.export.kml import results_to_kml
15 | from asf_search.export.metalink import results_to_metalink
16 |
17 |
18 | class ASFSearchResults(UserList):
19 | def __init__(self, *args, opts: ASFSearchOptions = None):
20 | super().__init__(*args)
21 | # Store it JUST so the user can access it (There might be zero products)
22 | # Each product will use their own reference to opts (but points to the same obj)
23 | self.searchOptions = opts
24 | self.searchComplete = False
25 |
26 | def geojson(self):
27 | return {
28 | 'type': 'FeatureCollection',
29 | 'features': [product.geojson() for product in self],
30 | }
31 |
32 | def csv(self):
33 | return results_to_csv(self)
34 |
35 | def kml(self):
36 | return results_to_kml(self)
37 |
38 | def metalink(self):
39 | return results_to_metalink(self)
40 |
41 | def json(self):
42 | return results_to_json(self)
43 |
44 | def jsonlite(self):
45 | return results_to_jsonlite(self)
46 |
47 | def jsonlite2(self):
48 | return results_to_jsonlite2(self)
49 |
50 | def find_urls(self, extension: str = None, pattern: str = r'.*', directAccess: bool = False) -> List[str]:
51 | """Returns a flat list of all https or s3 urls from all results matching an extension and/or regex pattern
52 | param extension: the file extension to search for. (Defaults to `None`)
53 | - Example: '.tiff'
54 | param pattern: A regex pattern to search each url for.(Defaults to `False`)
55 | - Example: `r'(QA_)+'` to find urls with 'QA_' at least once
56 | param directAccess: should search in s3 bucket urls (Defaults to `False`)
57 | """
58 | urls = []
59 |
60 | for product in self:
61 | urls.extend(product.find_urls(extension=extension, pattern=pattern, directAccess=directAccess))
62 |
63 | return sorted(list(set(urls)))
64 |
65 | def __str__(self):
66 | return json.dumps(self.geojson(), indent=2, sort_keys=True)
67 |
68 | def download(
69 | self,
70 | path: str,
71 | session: ASFSession = None,
72 | processes: int = 1,
73 | fileType=FileDownloadType.DEFAULT_FILE,
74 | ) -> None:
75 | """
76 | Iterates over each ASFProduct and downloads them to the specified path.
77 |
78 | Parameters
79 | ----------
80 | path:
81 | The directory into which the products should be downloaded.
82 | session:
83 | The session to use
84 | Defaults to the session used to fetch the results, or a new one if none was used.
85 | processes:
86 | Number of download processes to use. Defaults to 1 (i.e. sequential download)
87 |
88 | """
89 | ASF_LOGGER.info(f'Started downloading ASFSearchResults of size {len(self)}.')
90 | if processes == 1:
91 | for product in self:
92 | product.download(path=path, session=session, fileType=fileType)
93 | else:
94 | ASF_LOGGER.info(f'Using {processes} threads - starting up pool.')
95 | pool = Pool(processes=processes)
96 | args = [(product, path, session, fileType) for product in self]
97 | pool.map(_download_product, args)
98 | pool.close()
99 | pool.join()
100 | ASF_LOGGER.info(f'Finished downloading ASFSearchResults of size {len(self)}.')
101 |
102 | def raise_if_incomplete(self) -> None:
103 | if not self.searchComplete:
104 | msg = (
105 | 'Results are incomplete due to a search error. '
106 | 'See logging for more details. (ASFSearchResults.raise_if_incomplete called)'
107 | )
108 |
109 | ASF_LOGGER.error(msg)
110 | raise ASFSearchError(msg)
111 |
112 | def get_products_by_subclass_type(self) -> dict:
113 | """
114 | Organizes results into dictionary by ASFProduct subclass name
115 | : return: Dict of ASFSearchResults, organized by ASFProduct subclass names
116 | """
117 | subclasses = {}
118 |
119 | for product in self.data:
120 | product_type = product.get_classname()
121 |
122 | if subclasses.get(product_type) is None:
123 | subclasses[product_type] = ASFSearchResults([])
124 |
125 | subclasses[product_type].append(product)
126 |
127 | return subclasses
128 |
129 |
130 | def _download_product(args) -> None:
131 | product, path, session, fileType = args
132 | product.download(path=path, session=session, fileType=fileType)
133 |
--------------------------------------------------------------------------------
/asf_search/ASFSearchOptions/ASFSearchOptions.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | import json
3 |
4 | from .validator_map import validator_map, validate
5 | from .config import config
6 | from asf_search import ASF_LOGGER
7 |
8 |
9 | class ASFSearchOptions:
10 | def __init__(self, **kwargs):
11 | """
12 | Initialize the object, creating the list of attributes
13 | based on the contents of validator_map, and assign them based on kwargs
14 |
15 | :param kwargs: any search options to be set immediately
16 | """
17 | # init the built in attrs:
18 | for key in validator_map:
19 | self.__setattr__(key, None)
20 |
21 | # Apply any parameters passed in:
22 | for key, value in kwargs.items():
23 | self.__setattr__(key, value)
24 |
25 | def __setattr__(self, key, value):
26 | """
27 | Set a search option, restricting to the keys in validator_map only,
28 | and applying validation to the value before setting
29 |
30 | :param key: the name of the option to be set
31 | :param value: the value to which to set the named option
32 | """
33 | # self.* calls custom __setattr__ method, creating inf loop. Use super().*
34 | # Let values always be None, even if their validator doesn't agree. Used to delete them too:
35 | if key in validator_map:
36 | if value is None: # always maintain config on required fields
37 | if key in config:
38 | super().__setattr__(key, config[key])
39 | else:
40 | super().__setattr__(key, None)
41 | else:
42 | super().__setattr__(key, validate(key, value))
43 | else:
44 | msg = f"key '{key}' is not a valid search option (setattr)"
45 | ASF_LOGGER.error(msg)
46 | raise KeyError(msg)
47 |
48 | def __delattr__(self, item):
49 | """
50 | Clear a search option by setting its value to None
51 |
52 | :param item: the name of the option to clear
53 | """
54 | if item in validator_map:
55 | self.__setattr__(item, None)
56 | else:
57 | msg = f"key '{item}' is not a valid search option (delattr)"
58 | ASF_LOGGER.error(msg)
59 | raise KeyError(msg)
60 |
61 | def __iter__(self):
62 | """
63 | Filters search parameters, only returning populated fields. Used when casting to a dict.
64 | """
65 |
66 | for key in validator_map:
67 | if not self._is_val_default(key):
68 | value = self.__getattribute__(key)
69 | yield key, value
70 |
71 | def __str__(self):
72 | """
73 | What to display if `print(opts)` is called.
74 | """
75 | return json.dumps(dict(self), indent=4, default=str)
76 |
77 | # Default is set to '...', since 'None' is a very valid value here
78 | def pop(self, key, default=...):
79 | """
80 | Removes 'key' from self and returns it's value. Throws KeyError if doesn't exist
81 |
82 | :param key: name of key to return value of, and delete
83 | """
84 | if key not in validator_map:
85 | msg = f"key '{key}' is not a valid key for ASFSearchOptions. (pop)"
86 | ASF_LOGGER.error(msg)
87 | raise KeyError(msg)
88 |
89 | if self._is_val_default(key):
90 | if default != ...:
91 | return default
92 | msg = f"key '{key}' is set to empty/None. (pop)"
93 | ASF_LOGGER.error(msg)
94 | raise KeyError(msg)
95 |
96 | # Success, delete and return it:
97 | val = getattr(self, key)
98 | self.__delattr__(key)
99 | return val
100 |
101 | def reset_search(self):
102 | """
103 | Resets all populated search options, excluding config options (host, session, etc)
104 | """
105 | for key, _ in self:
106 | if key not in config:
107 | super().__setattr__(key, None)
108 |
109 | def merge_args(self, **kwargs) -> None:
110 | """
111 | Merges all keyword args into this ASFSearchOptions object.
112 | Emits a warning for any options that are over-written by the operation.
113 |
114 | :param kwargs: The search options to merge into the object
115 | :return: None
116 | """
117 | for key in kwargs:
118 | # Spit out warning if the value is something other than the default:
119 | if not self._is_val_default(key):
120 | msg = (
121 | 'While merging search options, '
122 | f'existing option {key}:{getattr(self, key, None)} '
123 | f'overwritten by kwarg with value {kwargs[key]}'
124 | )
125 | ASF_LOGGER.warning(msg)
126 | warnings.warn(msg)
127 | self.__setattr__(key, kwargs[key])
128 |
129 | def _is_val_default(self, key) -> bool:
130 | """
131 | Returns bool on if the key's current value is the same as it's default value
132 |
133 | :param key: The key to check
134 | :return: bool
135 | """
136 | default_val = config[key] if key in config else None
137 | current_val = getattr(self, key, None)
138 | return current_val == default_val
139 |
--------------------------------------------------------------------------------
/tests/yml_tests/test_ASFSearchOptions.yml:
--------------------------------------------------------------------------------
1 | tests:
2 | - test-validator-map-validate parse-string campaign:
3 | key: campaign
4 | value: 123
5 | output: '123'
6 |
7 | - test-validator-map-validate parse-float maxDoppler:
8 | key: maxDoppler
9 | value: '123.0'
10 | output: 123.0
11 |
12 | - test-validators parse_float:
13 | validator: parse_float
14 | input: '123.0'
15 | output: 123.0
16 | error: null
17 |
18 | - test-validators parse_float error invalid float:
19 | validator: parse_float
20 | input: 'asf'
21 | output: 123.0
22 | error: Invalid float
23 |
24 | - test-validators parse_float error infinity:
25 | validator: parse_float
26 | input: 'inf'
27 | output: 123.0
28 | error: Float values must be finite
29 |
30 | - test-validators parse_string from string:
31 | validator: parse_string
32 | input: 'inf'
33 | output: 'inf'
34 | error: null
35 |
36 | - test-validators parse_string from number:
37 | validator: parse_string
38 | input: 123.0
39 | output: '123.0'
40 | error: null
41 |
42 | - test-validators parse_int_range ints:
43 | validator: parse_int_range
44 | input: { tuple: [123.0, 125.0] }
45 | output: { tuple: [123.0, 125.0] }
46 | error: null
47 |
48 | - test-validators parse_int_range floats:
49 | validator: parse_int_range
50 | input: { tuple: [123.5, 125.5] }
51 | output: { tuple: [123.0, 125.0] }
52 | error: null
53 |
54 | - test-validators parse_float_range ints:
55 | validator: parse_float_range
56 | input: { tuple: [123.0, 125.0] }
57 | output: { tuple: [123.0, 125.0] }
58 | error: null
59 |
60 | - test-validators parse_float_range floats:
61 | validator: parse_float_range
62 | input: { tuple: [123.5, 125.5] }
63 | output: { tuple: [123.5, 125.5] }
64 | error: null
65 |
66 | - test-validators parse_string_list strings:
67 | validator: parse_string_list
68 | input: ['asf', 'nasa']
69 | output: ['asf', 'nasa']
70 | error: null
71 |
72 | - test-validators parse_string_list numbers:
73 | validator: parse_string_list
74 | input: [123, 0.123, 0.0]
75 | output: ['123', '0.123', '0.0']
76 | error: null
77 |
78 | - test-validators parse_int_list strings:
79 | validator: parse_int_list
80 | input: ['1', '2', '123']
81 | output: [1, 2, 123]
82 | error: null
83 |
84 | - test-validators parse_int_list int:
85 | validator: parse_int_list
86 | input: [1, 2, 123]
87 | output: [1, 2, 123]
88 | error: null
89 |
90 | - test-validators parse_int_list floats:
91 | validator: parse_int_list
92 | input: [1.0, 2.0, 123.5]
93 | output: [1, 2, 123]
94 | error: null
95 |
96 | - test-validators parse_int_list error strings:
97 | validator: parse_int_list
98 | input: ['asf', 'nasa']
99 | output: null
100 | error: Invalid int list
101 |
102 | - test-validators parse_bbox_list:
103 | validator: parse_bbox_list
104 | input: [0.0, 0.0, 1.1, 2.5]
105 | output: [0.0, 0.0, 1.1, 2.5]
106 | error: null
107 |
108 | - test-validators parse_bbox_list error strings:
109 | validator: parse_bbox_list
110 | input: [0.0, 0.0, 1.1, 2.5, 5.5]
111 | output: null
112 | error: Invalid coordinate list
113 |
114 | - test-validators parse_bbox_list error strings:
115 | validator: parse_bbox_list
116 | input: [0.0, 0.0, 1.1, 2.5, 5.5, 0.0]
117 | output: null
118 | error: Invalid bbox
119 |
120 | - test-ASFSearchOptions - create blank object:
121 | exception: Null
122 | # At least once, make sure they all exist but are None:
123 | expect_output:
124 | platform: Null
125 | instrument: Null
126 | absoluteOrbit: Null
127 | asfFrame: Null
128 | beamMode: Null
129 | campaign: Null
130 | maxDoppler: Null
131 | minDoppler: Null
132 | maxFaradayRotation: Null
133 | minFaradayRotation: Null
134 | flightDirection: Null
135 | flightLine: Null
136 | frame: Null
137 | granule_list: Null
138 | groupID: Null
139 | lookDirection: Null
140 | offNadirAngle: Null
141 | polarization: Null
142 | processingLevel: Null
143 | product_list: Null
144 | relativeOrbit: Null
145 | processingDate: Null
146 | start: Null
147 | end: Null
148 | season: Null
149 | maxResults: Null
150 |
151 | - test-ASFSearchOptions - create with bad key:
152 | doesNotExist: whatevs
153 | exception: KeyError
154 |
155 | - test-ASFSearchOptions - create with bad value:
156 | asfFrame: potato
157 | exception: ValueError
158 |
159 | - test-ASFSearchOptions - create with valid dates:
160 | exception: Null
161 | start: "2022-01-01"
162 | end: "2022-02-02"
163 | expect_output:
164 | start: "2022-01-01T00:00:00Z"
165 | end: "2022-02-02T00:00:00Z"
166 |
167 | - test-ASFSearchOptions - test with defaults NOT null:
168 | exception: Null
169 | # not sure how to check session...
170 | expect_output:
171 | host: cmr.earthdata.nasa.gov
172 | provider: ASF
173 |
174 | - test-ASFSearchOptions - test with defaults override:
175 | exception: Null
176 | host: does-not-exist.asf.alaska.edu
177 | provider: TOTALLY NOT ASF
178 | # not sure how to check session...
179 | expect_output:
180 | host: does-not-exist.asf.alaska.edu
181 | provider: TOTALLY NOT ASF
182 |
183 | - test-ASFSearchOptions - Circle works:
184 | exception: Null
185 | circle: [0, 0, 100]
186 | expect_output:
187 | circle: [0, 0, 100]
188 |
--------------------------------------------------------------------------------
/asf_search/search/baseline_search.py:
--------------------------------------------------------------------------------
1 | from typing import Optional, Type
2 | from asf_search.baseline.stack import get_baseline_from_stack
3 | from asf_search import ASF_LOGGER
4 | from copy import copy
5 |
6 | from asf_search.search import search, product_search
7 | from asf_search.ASFSearchOptions import ASFSearchOptions
8 | from asf_search.ASFSearchResults import ASFSearchResults
9 | from asf_search import ASFProduct, ARIAS1GUNWProduct
10 | from asf_search.constants import PLATFORM, DATASET
11 | from asf_search.exceptions import ASFSearchError
12 |
13 |
14 | precalc_platforms = [
15 | PLATFORM.ALOS,
16 | PLATFORM.RADARSAT,
17 | PLATFORM.ERS1,
18 | PLATFORM.ERS2,
19 | PLATFORM.JERS,
20 | ]
21 |
22 |
23 | def stack_from_product(
24 | reference: ASFProduct,
25 | opts: ASFSearchOptions = None,
26 | ASFProductSubclass: Type[ASFProduct] = None,
27 | ) -> ASFSearchResults:
28 | """
29 | Finds a baseline stack from a reference ASFProduct
30 |
31 | Parameters
32 | ----------
33 | reference:
34 | Reference scene to base the stack on,
35 | and from which to calculate perpendicular/temporal baselines
36 | opts:
37 | An ASFSearchOptions object describing the search parameters to be used.
38 | Search parameters specified outside this object will override in event of a conflict.
39 | ASFProductSubclass:
40 | An ASFProduct subclass constructor.
41 |
42 | Returns
43 | -------
44 | `asf_search.ASFSearchResults`
45 | list of search results of subclass ASFProduct or of provided ASFProductSubclass
46 | """
47 |
48 | opts = ASFSearchOptions() if opts is None else copy(opts)
49 |
50 | opts.merge_args(**dict(reference.get_stack_opts()))
51 |
52 | stack = search(opts=opts)
53 |
54 | is_complete = stack.searchComplete
55 |
56 | if ASFProductSubclass is not None:
57 | _cast_results_to_subclass(stack, ASFProductSubclass)
58 |
59 | stack, warnings = get_baseline_from_stack(reference=reference, stack=stack)
60 |
61 | _post_process_stack(stack, warnings, is_complete)
62 |
63 |
64 | return stack
65 |
66 |
67 | def stack_from_id(
68 | reference_id: str,
69 | opts: Optional[ASFSearchOptions] = None,
70 | useSubclass: Optional[Type[ASFProduct]] = None,
71 | ) -> ASFSearchResults:
72 | """
73 | Finds a baseline stack from a reference product ID
74 |
75 | Parameters
76 | ----------
77 | reference_id:
78 | Reference product to base the stack from,
79 | and from which to calculate perpendicular/temporal baselines
80 | opts:
81 | An ASFSearchOptions object describing the search parameters to be used.
82 | Search parameters specified outside this object will override in event of a conflict.
83 | ASFProductSubclass:
84 | An ASFProduct subclass constructor.
85 |
86 | Returns
87 | -------
88 | `asf_search.ASFSearchResults`
89 | list of search results of subclass ASFProduct or of provided ASFProductSubclass
90 | """
91 |
92 | opts = ASFSearchOptions() if opts is None else copy(opts)
93 |
94 | if opts.dataset is not None and DATASET.ARIA_S1_GUNW in opts.dataset:
95 | reference_results = ARIAS1GUNWProduct.get_aria_groups_for_frame(reference_id)
96 |
97 | if len(reference_results) == 0:
98 | reference = None
99 | else:
100 | reference = reference_results[0]
101 |
102 | stack, warnings = get_baseline_from_stack(reference=reference, stack=reference_results)
103 | _post_process_stack(stack, warnings, reference_results.searchComplete)
104 |
105 | return stack
106 | else:
107 | reference_results = product_search(product_list=reference_id, opts=opts)
108 |
109 | if len(reference_results) <= 0:
110 | raise ASFSearchError(f'Reference product not found: {reference_id}')
111 | reference = reference_results[0]
112 |
113 | if useSubclass is not None:
114 | reference = _cast_to_subclass(reference, useSubclass)
115 |
116 | return reference.stack(opts=opts, useSubclass=useSubclass)
117 |
118 |
119 | def _cast_results_to_subclass(stack: ASFSearchResults, ASFProductSubclass: Type[ASFProduct]):
120 | """
121 | Converts results from default ASFProduct subclasses to custom ones
122 | """
123 | for idx, product in enumerate(stack):
124 | stack[idx] = _cast_to_subclass(product, ASFProductSubclass)
125 |
126 |
127 | def _cast_to_subclass(product: ASFProduct, subclass: Type[ASFProduct]) -> ASFProduct:
128 | """
129 | Casts this ASFProduct object as a new object of return type subclass.
130 |
131 | example:
132 | ```
133 | class MyCustomClass(ASFProduct):
134 | _base_properties = {
135 | **ASFProduct._base_properties,
136 | 'some_unique_property': {'path': ['AdditionalAttributes', 'UNIQUE_PROPERTY', ...]}
137 | }
138 |
139 | # subclass as constructor
140 | customReference = reference.cast_to_subclass(MyCustomClass)
141 | print(customReference.properties['some_unique_property'])
142 | ```
143 |
144 | :param subclass: The ASFProduct subclass constructor to call on the product
145 | :returns return product as `ASFProduct` subclass
146 | """
147 |
148 | try:
149 | if isinstance(subclass, type(ASFProduct)):
150 | return subclass(
151 | args={'umm': product.umm, 'meta': product.meta}, session=product.session
152 | )
153 | except Exception as e:
154 | raise ValueError(f'Unable to use provided subclass {type(subclass)}, \nError Message: {e}')
155 |
156 | raise ValueError(f'Expected ASFProduct subclass constructor, got {type(subclass)}')
157 |
158 | def _post_process_stack(stack: ASFSearchResults, warnings: list, is_complete: bool):
159 | """Marks whether the search completed gathering results, logs any warnings, and sorts stack by temporal baseline"""
160 | stack.searchComplete = is_complete # preserve final outcome of earlier search()
161 | for warning in warnings:
162 | ASF_LOGGER.warning(f'{warning}')
163 | stack.sort(key=lambda product: product.properties['temporalBaseline'])
164 |
--------------------------------------------------------------------------------