├── usgs_m2m
├── __init__.py
├── usgsErrors.py
├── otherMethods.py
├── checkResponse.py
├── usgsDataTypes.py
└── usgsMethods.py
├── MANIFEST.in
├── setup.py
├── pyproject.toml
├── README.md
├── examples
└── UsageExample.py
└── LICENSE
/usgs_m2m/__init__.py:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | prune tests
2 | prune !misc
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | from setuptools import setup
2 |
3 | setup()
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools",
4 | "setuptools-scm"
5 | ]
6 | build-backend = "setuptools.build_meta"
7 |
8 | [project]
9 | name = "usgs_m2m"
10 | authors = [
11 | { name = "MrChebur" },
12 | ]
13 | description = "Python wrapper of the USGS/EROS Inventory Service Machine-to-Machine API"
14 | readme = "README.md"
15 | version = "0.1.0"
16 | requires-python = ">= 3.9"
17 | dependencies = ["requests",
18 | "tqdm"
19 | ]
20 |
21 | [project.urls]
22 | Homepage = "https://github.com/MrChebur/usgs-machine-to-machine-API"
23 | Issues = "https://github.com/MrChebur/usgs-machine-to-machine-API/issues"
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | 
2 | Simple Python wrapper of the USGS/EROS Inventory Service Machine-to-Machine API as described in the [documents](https://m2m.cr.usgs.gov/api/docs/json/).
3 |
4 | ----------------------------------------
5 | **UPDATE 2024.12.06**
6 | * All USGS methods, data types and errors are now consistent with the USGS API.
7 | * All USGS data types are now classes. Use `.dict` to convert them to a dictionary.
8 | * Type hits added to USGS methods and data types.
9 | * The old USGS `login` method will be deprecated in February 2025 (a warning has been added).
10 | ----------------------------------------
11 | > [!IMPORTANT]
12 | >
13 | > You must have machine-to-machine access to execute queries.
14 | > You can order access [here](https://ers.cr.usgs.gov/profile/access).
15 | ----------------------------------------
16 | **INSTALLATION**
17 | ```
18 | pip install https://github.com/MrChebur/usgs-machine-to-machine-API/archive/master.zip
19 | ```
20 | ----------------------------------------
21 | **CODE EXAMPLES**
22 |
23 | See another code example [here](https://github.com/MrChebur/usgs-machine-to-machine-API/blob/master/examples/UsageExample.py).
24 | ```python
25 | from usgs_m2m.usgsMethods import API as M2M
26 |
27 | api = M2M()
28 | api.loginToken('usgs_username', 'usgs_token')
29 | permissions = api.permissions()
30 | print(permissions)
31 | # {
32 | # 'requestId': '00000000',
33 | # 'version': 'stable',
34 | # 'sessionId': '00000000',
35 | # 'data': ['user', 'download', 'order'],
36 | # 'errorCode': None,
37 | # 'errorMessage': None
38 | # }
39 | ```
40 | ----------------------------------------
--------------------------------------------------------------------------------
/usgs_m2m/usgsErrors.py:
--------------------------------------------------------------------------------
1 | """
2 | Implementation date: 30.07.2020
3 | Revision date: 20.11.2024
4 |
5 | Simple error wrappers.
6 |
7 | According to document:
8 | https://m2m.cr.usgs.gov/api/docs/exceptioncodes/
9 | """
10 |
11 |
12 | # ======================= Module `General` =======================
13 |
14 | class ENDPOINT_UNAVAILABLE(Exception):
15 | pass
16 |
17 |
18 | class UNKNOWN(Exception):
19 | pass
20 |
21 |
22 | class INPUT_FORMAT(Exception):
23 | pass
24 |
25 |
26 | class INPUT_PARAMETER_INVALID(Exception):
27 | pass
28 |
29 |
30 | class INPUT_INVALID(Exception):
31 | pass
32 |
33 |
34 | class NOT_FOUND(Exception):
35 | pass
36 |
37 |
38 | class SERVER_ERROR(Exception):
39 | pass
40 |
41 |
42 | class VERSION_UNKNOWN(Exception):
43 | pass
44 |
45 |
46 | # ======================= Module `Authentication` =======================
47 | class AUTH_INVALID(Exception):
48 | pass
49 |
50 |
51 | class AUTH_UNAUTHROIZED(Exception):
52 | pass
53 |
54 |
55 | class AUTH_KEY_INVALID(Exception):
56 | pass
57 |
58 |
59 | # ======================= Module `Rate Limit` =======================
60 |
61 | class RATE_LIMIT(Exception):
62 | pass
63 |
64 |
65 | class RATE_LIMIT_USER_DL(Exception):
66 | pass
67 |
68 |
69 | # ======================= Module `Download` =======================
70 |
71 |
72 | class DOWNLOAD_ERROR(Exception):
73 | pass
74 |
75 |
76 | # ======================= Module `Export` =======================
77 |
78 | class EXPORT_ERROR(Exception):
79 | pass
80 |
81 |
82 | # ======================= Module `Inventory` =======================
83 |
84 |
85 | class DATASET_ERROR(Exception):
86 | pass
87 |
88 |
89 | class DATASET_UNAUTHORIZED(Exception):
90 | pass
91 |
92 |
93 | class DATASET_AUTH(Exception):
94 | pass
95 |
96 |
97 | class DATASET_INVALID(Exception):
98 | pass
99 |
100 |
101 | class DATASET_CUSTOM_CLEAR_ERROR(Exception):
102 | pass
103 |
104 |
105 | class DATASET_CUSTOM_GET_ERROR(Exception):
106 | pass
107 |
108 |
109 | class DATASET_CUSTOMS_GET_ERROR(Exception):
110 | pass
111 |
112 |
113 | class DATASET_CUSTOM_SET_ERROR(Exception):
114 | pass
115 |
116 |
117 | class DATASET_CUSTOMS_SET_ERROR(Exception):
118 | pass
119 |
120 |
121 | class SEARCH_CREATE_ERROR(Exception):
122 | pass
123 |
124 |
125 | class SEARCH_ERROR(Exception):
126 | pass
127 |
128 |
129 | class SEARCH_EXECUTE_ERROR(Exception):
130 | pass
131 |
132 |
133 | class SEARCH_FAILED(Exception):
134 | pass
135 |
136 |
137 | class SEARCH_RESULT_ERROR(Exception):
138 | pass
139 |
140 |
141 | class SEARCH_UNAVAILABLE(Exception):
142 | pass
143 |
144 |
145 | class SEARCH_UPDATE_ERROR(Exception):
146 | pass
147 |
148 |
149 | # ======================= Module `Orders` =======================
150 |
151 | class ORDER_ERROR(Exception):
152 | pass
153 |
154 |
155 | class ORDER_AUTH(Exception):
156 | pass
157 |
158 |
159 | class ORDER_INVALID(Exception):
160 | pass
161 |
162 |
163 | class RESTORE_ORDER_ERROR(Exception):
164 | pass
165 |
166 |
167 | # ======================= Module `Subscription` s=======================
168 |
169 |
170 | class SUBSCRIPTION_ERROR(Exception):
171 | pass
172 |
--------------------------------------------------------------------------------
/usgs_m2m/otherMethods.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 |
4 | import requests
5 | import logging
6 | from datetime import datetime
7 | from tqdm import tqdm
8 |
9 | from .usgsDataTypes import DownloadInput
10 |
11 |
12 | # noinspection PyPep8Naming
13 |
14 | class otherMethods:
15 | """
16 | Implementation date: 06.08.2020
17 |
18 | Other methods to handle stuff like data download.
19 | """
20 |
21 | @classmethod
22 | def download(cls, api, datasetName, entityId, productName, output_dir):
23 | downloadOptions = api.downloadOptions(datasetName=datasetName, entityIds=entityId)
24 | datasetId, productId = None, None
25 | for downloadOption in downloadOptions['data']:
26 | if downloadOption['productName'] == productName and downloadOption['available']:
27 | datasetId = downloadOption['datasetId']
28 | productId = downloadOption['id']
29 | break
30 | if (datasetId, productId) == (None, None):
31 | logging.error(f"{datetime.now()} Can't find productName={productName} in datasetName={datasetName}")
32 |
33 | # download = DownloadResponse(entityId=entityId, datasetId=datasetId, productId=productId,
34 | # productName=productName).dict
35 | download = DownloadInput(entityId=entityId, productId=productId).dict
36 |
37 | # (entityId)|(datasetId)|(productId)|(productName)
38 | downloadRequest = api.downloadRequest(downloads=[download], returnAvailable=True)
39 |
40 | if downloadRequest['data']['failed']:
41 | logging.warning(f'{datetime.now()} downloadRequest failed see respond:\n{downloadRequest}')
42 |
43 | availableDownloads = downloadRequest['data']['availableDownloads'] + \
44 | downloadRequest['data']['preparingDownloads']
45 | results_list = []
46 | for availableDownload in availableDownloads:
47 | url = availableDownload['url']
48 | path = cls._download(url, output_dir, connect_timeout=21, read_timeout=30)
49 | results_list.append(path)
50 | return results_list
51 |
52 | @classmethod
53 | def _download(cls, url, output_dir, chunk_size=1024, connect_timeout=None, read_timeout=None):
54 | """
55 | :param url:
56 | :param output_dir:
57 | :param chunk_size:
58 | :return: file_path: (str) - if successful, None - if interrupted, 'Skip' - if landsat file is offline,
59 | """
60 |
61 | with requests.get(url, stream=True, allow_redirects=True, timeout=(connect_timeout, read_timeout)) as r:
62 | try:
63 | expected_file_size = int(r.headers['Content-Length'])
64 | except KeyError: # if `Content-Length` header is absent - it means file is not downloadable now
65 | return 'Skip'
66 | with tqdm(desc="Downloading", total=expected_file_size, unit_scale=True, unit='B') as progressbar:
67 | try:
68 | file_name = r.headers['Content-Disposition'].split('"')[1]
69 | except KeyError: # if `Content-Disposition` header is absent - it means file is not downloadable now
70 | return 'Skip'
71 | # except TimeoutError: # try gain once - if server does not response in time
72 | # time.sleep(10) # todo: not sure - is this good idea to wait here 10 seconds?
73 | # file_name = r.headers['Content-Disposition'].split('"')[1]
74 |
75 | file_path = os.path.join(output_dir, file_name)
76 | with open(file_path, 'wb') as f:
77 | for chunk in r.iter_content(chunk_size=chunk_size):
78 | if chunk:
79 | progressbar.update(len(chunk))
80 | f.write(chunk)
81 |
82 | if cls.is_download_ok(expected_file_size, file_path):
83 | return file_path
84 | else:
85 | # todo: Test the ability to continue files downloading.
86 | # todo: If it is possible to continue downloading, do not delete the file, but try to continue the interrupted download.
87 | os.remove(file_path)
88 | return None
89 |
90 | @classmethod
91 | def is_download_ok(cls, expected_file_size, filename):
92 | if os.path.isfile(filename):
93 | actual_file_size = os.path.getsize(filename)
94 | if expected_file_size == actual_file_size:
95 | return True
96 | return False
97 |
98 | @classmethod
99 | def request_filesize(cls, api, datasetName, productName, entityId):
100 | """
101 | :param api: (usgsMethods) Instance of usgsMethods()
102 | :param datasetName: (str) In example: 'LANDSAT_8_C1', 'LANDSAT_OT_C2_L1'
103 | :param productName: (str) In example: 'Level-1 GeoTIFF Data Product', 'Landsat Collection 2 Level-1 Product Bundle'
104 | :param entityId: (str) entityId
105 | :return: (int) Product file size
106 | """
107 | downloadOptions = api.downloadOptions(datasetName=datasetName, entityIds=entityId)
108 | if downloadOptions['data'] is None:
109 | print(f"Error: Can't request files size. downloadOptions['data'] is {downloadOptions['data']}")
110 | return None
111 | for downloadOption in downloadOptions['data']:
112 | if productName == downloadOption['productName']:
113 | file_size = int(downloadOption['filesize'])
114 | return file_size
115 |
--------------------------------------------------------------------------------
/usgs_m2m/checkResponse.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from .usgsErrors import *
3 |
4 |
5 | def _check_response(response):
6 | _check_if_response_is_none(response)
7 | _check_http_response(response)
8 | _check_usgs_error(response)
9 |
10 |
11 | def _check_if_response_is_none(response):
12 | if response is None:
13 | raise TypeError
14 |
15 |
16 | def _check_http_response(response):
17 | try:
18 | response.raise_for_status()
19 | return True
20 | except requests.exceptions.HTTPError as error:
21 | print("HTTP Error:", error)
22 | print(response)
23 | except requests.exceptions.ConnectionError as error:
24 | print("Error Connecting:", error)
25 | print(response)
26 | except requests.exceptions.Timeout as error:
27 | print("Timeout Error:", error)
28 | print(response)
29 | except requests.exceptions.RequestException as error:
30 | print("Oops: Something Else", error)
31 | print(response)
32 |
33 |
34 | def _check_usgs_error(response):
35 | json = response.json()
36 | errorCode = json['errorCode']
37 | errorMessage = json['errorMessage']
38 |
39 | if errorCode is None:
40 | return True
41 |
42 | elif errorCode == 'ENDPOINT_UNAVAILABLE':
43 | print(json)
44 | raise ENDPOINT_UNAVAILABLE(f'{errorCode}: {errorMessage}')
45 |
46 | elif errorCode == 'UNKNOWN':
47 | print(json)
48 | raise UNKNOWN(f'{errorCode}: {errorMessage}')
49 |
50 | elif errorCode == 'INPUT_FORMAT':
51 | print(json)
52 | raise INPUT_FORMAT(f'{errorCode}: {errorMessage}')
53 |
54 | elif errorCode == 'INPUT_PARAMETER_INVALID':
55 | print(json)
56 | raise INPUT_PARAMETER_INVALID(f'{errorCode}: {errorMessage}')
57 |
58 | elif errorCode == 'INPUT_INVALID':
59 | print(json)
60 | raise INPUT_INVALID(f'{errorCode}: {errorMessage}')
61 |
62 | elif errorCode == 'NOT_FOUND':
63 | print(json)
64 | raise NOT_FOUND(f'{errorCode}: {errorMessage}')
65 |
66 | elif errorCode == 'SERVER_ERROR':
67 | print(json)
68 | raise SERVER_ERROR(f'{errorCode}: {errorMessage}')
69 |
70 | elif errorCode == 'VERSION_UNKNOWN':
71 | print(json)
72 | raise VERSION_UNKNOWN(f'{errorCode}: {errorMessage}')
73 |
74 | elif errorCode == 'AUTH_INVALID':
75 | print(json)
76 | raise AUTH_INVALID(f'{errorCode}: {errorMessage}')
77 |
78 | elif errorCode == 'AUTH_UNAUTHROIZED':
79 | print(json)
80 | raise AUTH_UNAUTHROIZED(f'{errorCode}: {errorMessage}')
81 |
82 | elif errorCode == 'AUTH_KEY_INVALID':
83 | print(json)
84 | raise AUTH_KEY_INVALID(f'{errorCode}: {errorMessage}')
85 |
86 | elif errorCode == 'RATE_LIMIT':
87 | print(json)
88 | raise RATE_LIMIT(f'{errorCode}: {errorMessage}')
89 |
90 | elif errorCode == 'RATE_LIMIT_USER_DL':
91 | print(json)
92 | raise RATE_LIMIT_USER_DL(f'{errorCode}: {errorMessage}')
93 |
94 | elif errorCode == 'DOWNLOAD_ERROR':
95 | print(json)
96 | raise DOWNLOAD_ERROR(f'{errorCode}: {errorMessage}')
97 |
98 | elif errorCode == 'EXPORT_ERROR':
99 | print(json)
100 | raise EXPORT_ERROR(f'{errorCode}: {errorMessage}')
101 |
102 | elif errorCode == 'DATASET_ERROR':
103 | print(json)
104 | raise DATASET_ERROR(f'{errorCode}: {errorMessage}')
105 |
106 | elif errorCode == 'DATASET_UNAUTHORIZED':
107 | print(json)
108 | raise DATASET_UNAUTHORIZED(f'{errorCode}: {errorMessage}')
109 |
110 | elif errorCode == 'DATASET_AUTH':
111 | print(json)
112 | raise DATASET_AUTH(f'{errorCode}: {errorMessage}')
113 |
114 | elif errorCode == 'DATASET_INVALID':
115 | print(json)
116 | raise DATASET_INVALID(f'{errorCode}: {errorMessage}')
117 |
118 | elif errorCode == 'DATASET_CUSTOM_CLEAR_ERROR':
119 | print(json)
120 | raise DATASET_CUSTOM_CLEAR_ERROR(f'{errorCode}: {errorMessage}')
121 |
122 | elif errorCode == 'DATASET_CUSTOM_GET_ERROR':
123 | print(json)
124 | raise DATASET_CUSTOM_GET_ERROR(f'{errorCode}: {errorMessage}')
125 |
126 | elif errorCode == 'DATASET_CUSTOMS_GET_ERROR':
127 | print(json)
128 | raise DATASET_CUSTOMS_GET_ERROR(f'{errorCode}: {errorMessage}')
129 |
130 | elif errorCode == 'DATASET_CUSTOM_SET_ERROR':
131 | print(json)
132 | raise DATASET_CUSTOM_SET_ERROR(f'{errorCode}: {errorMessage}')
133 |
134 | elif errorCode == 'DATASET_CUSTOMS_SET_ERROR':
135 | print(json)
136 | raise DATASET_CUSTOMS_SET_ERROR(f'{errorCode}: {errorMessage}')
137 |
138 | elif errorCode == 'SEARCH_CREATE_ERROR':
139 | print(json)
140 | raise SEARCH_CREATE_ERROR(f'{errorCode}: {errorMessage}')
141 |
142 | elif errorCode == 'SEARCH_ERROR':
143 | print(json)
144 | raise SEARCH_ERROR(f'{errorCode}: {errorMessage}')
145 |
146 | elif errorCode == 'SEARCH_EXECUTE_ERROR':
147 | print(json)
148 | raise SEARCH_EXECUTE_ERROR(f'{errorCode}: {errorMessage}')
149 |
150 | elif errorCode == 'SEARCH_FAILED':
151 | print(json)
152 | raise SEARCH_FAILED(f'{errorCode}: {errorMessage}')
153 |
154 | elif errorCode == 'SEARCH_RESULT_ERROR':
155 | print(json)
156 | raise SEARCH_RESULT_ERROR(f'{errorCode}: {errorMessage}')
157 |
158 | elif errorCode == 'SEARCH_UNAVAILABLE':
159 | print(json)
160 | raise SEARCH_UNAVAILABLE(f'{errorCode}: {errorMessage}')
161 |
162 | elif errorCode == 'SEARCH_UPDATE_ERROR':
163 | print(json)
164 | raise SEARCH_UPDATE_ERROR(f'{errorCode}: {errorMessage}')
165 |
166 | elif errorCode == 'ORDER_ERROR':
167 | print(json)
168 | raise ORDER_ERROR(f'{errorCode}: {errorMessage}')
169 |
170 | elif errorCode == 'ORDER_AUTH':
171 | print(json)
172 | raise ORDER_AUTH(f'{errorCode}: {errorMessage}')
173 |
174 | elif errorCode == 'ORDER_INVALID':
175 | print(json)
176 | raise ORDER_INVALID(f'{errorCode}: {errorMessage}')
177 |
178 | elif errorCode == 'RESTORE_ORDER_ERROR':
179 | print(json)
180 | raise RESTORE_ORDER_ERROR(f'{errorCode}: {errorMessage}')
181 |
182 | elif errorCode == 'SUBSCRIPTION_ERROR':
183 | print(json)
184 | raise SUBSCRIPTION_ERROR(f'{errorCode}: {errorMessage}')
185 |
186 | else:
187 | print(json)
188 | raise UNKNOWN(f'{errorCode}: {errorMessage}')
189 |
--------------------------------------------------------------------------------
/examples/UsageExample.py:
--------------------------------------------------------------------------------
1 | import json
2 | from pathlib import Path
3 |
4 | from usgs_m2m.usgsMethods import API as M2M
5 | from usgs_m2m.otherMethods import otherMethods
6 | from usgs_m2m.usgsDataTypes import (GeoJson,
7 | SpatialFilterGeoJson,
8 | AcquisitionFilter,
9 | SceneFilter,
10 | )
11 |
12 |
13 | def example_search_scene_and_download_quicklook():
14 | print()
15 | # In order not to store the login/password in the code - auth with json-formatted text file:
16 | # {"username": "username", "password": "password"}
17 | txt_path = r"E:\kupriyanov\!auth\query_usgs_auth.json"
18 |
19 | with open(txt_path, 'r') as file:
20 | json_data = json.load(file)
21 | usgs_username = json_data['usgs_username1']
22 | usgs_token = json_data['usgs_token1']
23 |
24 | api = M2M() # instance created
25 | api.loginToken(usgs_username, usgs_token) # this is new login method
26 | api.loud_mode = True
27 |
28 | # Region of interest coordinate. Too long coordinates list may throw 404 HTTP errors!
29 | # Examples:
30 | # 'Point' [lat ,lon]
31 | # 'Polygon' [[ [lat ,lon], ... ]]
32 | ROI = [[
33 | [59.19852, 63.06039],
34 | [59.62473, 64.80140],
35 | [62.05751, 65.70580],
36 | [62.86149, 65.26510],
37 | [63.24590, 64.51990],
38 | [65.99469, 64.58008],
39 | [66.97107, 64.50871],
40 | [67.49873, 64.08241],
41 | [68.96698, 64.44441],
42 | [70.34046, 64.31480],
43 | [71.58613, 63.35573],
44 | [73.10955, 63.37922],
45 | [76.69435, 63.02787],
46 | [77.98457, 62.51861],
47 | [79.89941, 62.77741],
48 | [81.03670, 63.14609],
49 | [83.96038, 62.48975],
50 | [85.97549, 61.48612],
51 | [84.16387, 60.85305],
52 | [82.12599, 60.53366],
53 | [77.11535, 60.73926],
54 | [76.67393, 59.58814],
55 | [74.99998, 58.69959],
56 | [72.52650, 59.15018],
57 | [69.39066, 59.91733],
58 | [66.74067, 58.64882],
59 | [65.70599, 58.65047],
60 | [61.15117, 61.67129],
61 | [59.40793, 62.09941],
62 | [59.19852, 63.06039],
63 | ]]
64 |
65 | datasetName = 'landsat_ot_c2_l1'
66 |
67 | geoJson = GeoJson(type='Polygon', coordinates=ROI).dict
68 | spatialFilter = SpatialFilterGeoJson(filterType='geojson', geoJson=geoJson).dict
69 | acquisitionFilter = AcquisitionFilter(start="2020-07-30", end="2020-07-31").dict
70 | sceneFilter = SceneFilter(acquisitionFilter=acquisitionFilter,
71 | cloudCoverFilter=None,
72 | datasetName=datasetName,
73 | ingestFilter=None,
74 | metadataFilter=None,
75 | seasonalFilter=None,
76 | spatialFilter=spatialFilter).dict
77 | # print('\nsceneFilter=')
78 | # pprint(sceneFilter)
79 | #
80 | # When using polygons in the sceneSearch method, images that do not lie within the boundaries of the polygon are returned.
81 | # This is due to the fact that the contours of the images lie at the border of 180/-180 degrees in the projection WGS 84 (EPSG: 4326).
82 | sceneSearchResult = api.sceneSearch(datasetName=datasetName,
83 | maxResults=1,
84 | startingNumber=None,
85 | metadataType='full',
86 | sortField=None, # "Acquisition Date", '5e83d0b92ff6b5e8' - doesn't work
87 | sortDirection='ASC',
88 | sceneFilter=sceneFilter,
89 | compareListName=None,
90 | bulkListName=None,
91 | orderListName=None,
92 | excludeListName=None)
93 | # print('\nsceneSearchResult=')
94 | # pprint(sceneSearchResult)
95 |
96 | dataset_info = api.dataset(datasetName=datasetName)
97 | dataset_alias = dataset_info['data']['datasetAlias']
98 | print(f'dataset_alias={dataset_alias}')
99 |
100 | entityId = None
101 | for searchResult in sceneSearchResult['data']['results']:
102 | entityId = searchResult['entityId']
103 | print(f'Scene name (entityId): {entityId}')
104 |
105 | # pprint(dataset_info)
106 |
107 | print(f'\nSearching {dataset_alias} dataset products...')
108 | products = api.datasetBulkProducts(dataset_alias)
109 | # pprint(products)
110 |
111 | product_name_natural_colors = None
112 | for product in products['data']:
113 | product_name = product['productName']
114 | if 'geotiff' in product_name.lower() and 'natural' in product_name.lower():
115 | product_name_natural_colors = product_name
116 |
117 | print(f'Downloading product: {product_name_natural_colors}')
118 | results = otherMethods.download(api,
119 | datasetName=datasetName,
120 | entityId=entityId,
121 | output_dir=Path(r'.\\').resolve(),
122 | productName=product_name_natural_colors)
123 | print(results)
124 | api.logout()
125 |
126 |
127 | def example_print_scene_size():
128 | # In order not to store the login/password in the code - auth with json-formatted text file:
129 | # {"username": "username", "password": "password"}
130 | txt_path = r"E:\kupriyanov\!auth\query_usgs_auth.json"
131 | with open(txt_path, 'r') as file:
132 | json_data = json.load(file)
133 | usgs_username = json_data['usgs_username1']
134 | usgs_password = json_data['usgs_password1']
135 |
136 | api = M2M() # instance created
137 | api.login(usgs_username, usgs_password) # login method will be deprecated in February 2025
138 | api.loud_mode = True
139 |
140 | datasetName = 'LANDSAT_OT_C2_L1'
141 |
142 | filesize_usgs = otherMethods.request_filesize(api,
143 | datasetName,
144 | productName='Landsat Collection 2 Level-1 Product Bundle',
145 | entityId='LC81650162022019LGN00')
146 | print(f'filesize_usgs={filesize_usgs} bytes')
147 | api.logout()
148 | print('Done!')
149 |
150 |
151 | if __name__ == '__main__':
152 | print('\nExecuting `example_print_scene_size()`')
153 | example_print_scene_size()
154 |
155 | print('\nExecuting `example_search_scene_and_download_quicklook()`')
156 | example_search_scene_and_download_quicklook()
157 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | GNU GENERAL PUBLIC LICENSE
2 | Version 3, 29 June 2007
3 |
4 | Copyright (C) 2007 Free Software Foundation, Inc.
5 | Everyone is permitted to copy and distribute verbatim copies
6 | of this license document, but changing it is not allowed.
7 |
8 | Preamble
9 |
10 | The GNU General Public License is a free, copyleft license for
11 | software and other kinds of works.
12 |
13 | The licenses for most software and other practical works are designed
14 | to take away your freedom to share and change the works. By contrast,
15 | the GNU General Public License is intended to guarantee your freedom to
16 | share and change all versions of a program--to make sure it remains free
17 | software for all its users. We, the Free Software Foundation, use the
18 | GNU General Public License for most of our software; it applies also to
19 | any other work released this way by its authors. You can apply it to
20 | your programs, too.
21 |
22 | When we speak of free software, we are referring to freedom, not
23 | price. Our General Public Licenses are designed to make sure that you
24 | have the freedom to distribute copies of free software (and charge for
25 | them if you wish), that you receive source code or can get it if you
26 | want it, that you can change the software or use pieces of it in new
27 | free programs, and that you know you can do these things.
28 |
29 | To protect your rights, we need to prevent others from denying you
30 | these rights or asking you to surrender the rights. Therefore, you have
31 | certain responsibilities if you distribute copies of the software, or if
32 | you modify it: responsibilities to respect the freedom of others.
33 |
34 | For example, if you distribute copies of such a program, whether
35 | gratis or for a fee, you must pass on to the recipients the same
36 | freedoms that you received. You must make sure that they, too, receive
37 | or can get the source code. And you must show them these terms so they
38 | know their rights.
39 |
40 | Developers that use the GNU GPL protect your rights with two steps:
41 | (1) assert copyright on the software, and (2) offer you this License
42 | giving you legal permission to copy, distribute and/or modify it.
43 |
44 | For the developers' and authors' protection, the GPL clearly explains
45 | that there is no warranty for this free software. For both users' and
46 | authors' sake, the GPL requires that modified versions be marked as
47 | changed, so that their problems will not be attributed erroneously to
48 | authors of previous versions.
49 |
50 | Some devices are designed to deny users access to install or run
51 | modified versions of the software inside them, although the manufacturer
52 | can do so. This is fundamentally incompatible with the aim of
53 | protecting users' freedom to change the software. The systematic
54 | pattern of such abuse occurs in the area of products for individuals to
55 | use, which is precisely where it is most unacceptable. Therefore, we
56 | have designed this version of the GPL to prohibit the practice for those
57 | products. If such problems arise substantially in other domains, we
58 | stand ready to extend this provision to those domains in future versions
59 | of the GPL, as needed to protect the freedom of users.
60 |
61 | Finally, every program is threatened constantly by software patents.
62 | States should not allow patents to restrict development and use of
63 | software on general-purpose computers, but in those that do, we wish to
64 | avoid the special danger that patents applied to a free program could
65 | make it effectively proprietary. To prevent this, the GPL assures that
66 | patents cannot be used to render the program non-free.
67 |
68 | The precise terms and conditions for copying, distribution and
69 | modification follow.
70 |
71 | TERMS AND CONDITIONS
72 |
73 | 0. Definitions.
74 |
75 | "This License" refers to version 3 of the GNU General Public License.
76 |
77 | "Copyright" also means copyright-like laws that apply to other kinds of
78 | works, such as semiconductor masks.
79 |
80 | "The Program" refers to any copyrightable work licensed under this
81 | License. Each licensee is addressed as "you". "Licensees" and
82 | "recipients" may be individuals or organizations.
83 |
84 | To "modify" a work means to copy from or adapt all or part of the work
85 | in a fashion requiring copyright permission, other than the making of an
86 | exact copy. The resulting work is called a "modified version" of the
87 | earlier work or a work "based on" the earlier work.
88 |
89 | A "covered work" means either the unmodified Program or a work based
90 | on the Program.
91 |
92 | To "propagate" a work means to do anything with it that, without
93 | permission, would make you directly or secondarily liable for
94 | infringement under applicable copyright law, except executing it on a
95 | computer or modifying a private copy. Propagation includes copying,
96 | distribution (with or without modification), making available to the
97 | public, and in some countries other activities as well.
98 |
99 | To "convey" a work means any kind of propagation that enables other
100 | parties to make or receive copies. Mere interaction with a user through
101 | a computer network, with no transfer of a copy, is not conveying.
102 |
103 | An interactive user interface displays "Appropriate Legal Notices"
104 | to the extent that it includes a convenient and prominently visible
105 | feature that (1) displays an appropriate copyright notice, and (2)
106 | tells the user that there is no warranty for the work (except to the
107 | extent that warranties are provided), that licensees may convey the
108 | work under this License, and how to view a copy of this License. If
109 | the interface presents a list of user commands or options, such as a
110 | menu, a prominent item in the list meets this criterion.
111 |
112 | 1. Source Code.
113 |
114 | The "source code" for a work means the preferred form of the work
115 | for making modifications to it. "Object code" means any non-source
116 | form of a work.
117 |
118 | A "Standard Interface" means an interface that either is an official
119 | standard defined by a recognized standards body, or, in the case of
120 | interfaces specified for a particular programming language, one that
121 | is widely used among developers working in that language.
122 |
123 | The "System Libraries" of an executable work include anything, other
124 | than the work as a whole, that (a) is included in the normal form of
125 | packaging a Major Component, but which is not part of that Major
126 | Component, and (b) serves only to enable use of the work with that
127 | Major Component, or to implement a Standard Interface for which an
128 | implementation is available to the public in source code form. A
129 | "Major Component", in this context, means a major essential component
130 | (kernel, window system, and so on) of the specific operating system
131 | (if any) on which the executable work runs, or a compiler used to
132 | produce the work, or an object code interpreter used to run it.
133 |
134 | The "Corresponding Source" for a work in object code form means all
135 | the source code needed to generate, install, and (for an executable
136 | work) run the object code and to modify the work, including scripts to
137 | control those activities. However, it does not include the work's
138 | System Libraries, or general-purpose tools or generally available free
139 | programs which are used unmodified in performing those activities but
140 | which are not part of the work. For example, Corresponding Source
141 | includes interface definition files associated with source files for
142 | the work, and the source code for shared libraries and dynamically
143 | linked subprograms that the work is specifically designed to require,
144 | such as by intimate data communication or control flow between those
145 | subprograms and other parts of the work.
146 |
147 | The Corresponding Source need not include anything that users
148 | can regenerate automatically from other parts of the Corresponding
149 | Source.
150 |
151 | The Corresponding Source for a work in source code form is that
152 | same work.
153 |
154 | 2. Basic Permissions.
155 |
156 | All rights granted under this License are granted for the term of
157 | copyright on the Program, and are irrevocable provided the stated
158 | conditions are met. This License explicitly affirms your unlimited
159 | permission to run the unmodified Program. The output from running a
160 | covered work is covered by this License only if the output, given its
161 | content, constitutes a covered work. This License acknowledges your
162 | rights of fair use or other equivalent, as provided by copyright law.
163 |
164 | You may make, run and propagate covered works that you do not
165 | convey, without conditions so long as your license otherwise remains
166 | in force. You may convey covered works to others for the sole purpose
167 | of having them make modifications exclusively for you, or provide you
168 | with facilities for running those works, provided that you comply with
169 | the terms of this License in conveying all material for which you do
170 | not control copyright. Those thus making or running the covered works
171 | for you must do so exclusively on your behalf, under your direction
172 | and control, on terms that prohibit them from making any copies of
173 | your copyrighted material outside their relationship with you.
174 |
175 | Conveying under any other circumstances is permitted solely under
176 | the conditions stated below. Sublicensing is not allowed; section 10
177 | makes it unnecessary.
178 |
179 | 3. Protecting Users' Legal Rights From Anti-Circumvention Law.
180 |
181 | No covered work shall be deemed part of an effective technological
182 | measure under any applicable law fulfilling obligations under article
183 | 11 of the WIPO copyright treaty adopted on 20 December 1996, or
184 | similar laws prohibiting or restricting circumvention of such
185 | measures.
186 |
187 | When you convey a covered work, you waive any legal power to forbid
188 | circumvention of technological measures to the extent such circumvention
189 | is effected by exercising rights under this License with respect to
190 | the covered work, and you disclaim any intention to limit operation or
191 | modification of the work as a means of enforcing, against the work's
192 | users, your or third parties' legal rights to forbid circumvention of
193 | technological measures.
194 |
195 | 4. Conveying Verbatim Copies.
196 |
197 | You may convey verbatim copies of the Program's source code as you
198 | receive it, in any medium, provided that you conspicuously and
199 | appropriately publish on each copy an appropriate copyright notice;
200 | keep intact all notices stating that this License and any
201 | non-permissive terms added in accord with section 7 apply to the code;
202 | keep intact all notices of the absence of any warranty; and give all
203 | recipients a copy of this License along with the Program.
204 |
205 | You may charge any price or no price for each copy that you convey,
206 | and you may offer support or warranty protection for a fee.
207 |
208 | 5. Conveying Modified Source Versions.
209 |
210 | You may convey a work based on the Program, or the modifications to
211 | produce it from the Program, in the form of source code under the
212 | terms of section 4, provided that you also meet all of these conditions:
213 |
214 | a) The work must carry prominent notices stating that you modified
215 | it, and giving a relevant date.
216 |
217 | b) The work must carry prominent notices stating that it is
218 | released under this License and any conditions added under section
219 | 7. This requirement modifies the requirement in section 4 to
220 | "keep intact all notices".
221 |
222 | c) You must license the entire work, as a whole, under this
223 | License to anyone who comes into possession of a copy. This
224 | License will therefore apply, along with any applicable section 7
225 | additional terms, to the whole of the work, and all its parts,
226 | regardless of how they are packaged. This License gives no
227 | permission to license the work in any other way, but it does not
228 | invalidate such permission if you have separately received it.
229 |
230 | d) If the work has interactive user interfaces, each must display
231 | Appropriate Legal Notices; however, if the Program has interactive
232 | interfaces that do not display Appropriate Legal Notices, your
233 | work need not make them do so.
234 |
235 | A compilation of a covered work with other separate and independent
236 | works, which are not by their nature extensions of the covered work,
237 | and which are not combined with it such as to form a larger program,
238 | in or on a volume of a storage or distribution medium, is called an
239 | "aggregate" if the compilation and its resulting copyright are not
240 | used to limit the access or legal rights of the compilation's users
241 | beyond what the individual works permit. Inclusion of a covered work
242 | in an aggregate does not cause this License to apply to the other
243 | parts of the aggregate.
244 |
245 | 6. Conveying Non-Source Forms.
246 |
247 | You may convey a covered work in object code form under the terms
248 | of sections 4 and 5, provided that you also convey the
249 | machine-readable Corresponding Source under the terms of this License,
250 | in one of these ways:
251 |
252 | a) Convey the object code in, or embodied in, a physical product
253 | (including a physical distribution medium), accompanied by the
254 | Corresponding Source fixed on a durable physical medium
255 | customarily used for software interchange.
256 |
257 | b) Convey the object code in, or embodied in, a physical product
258 | (including a physical distribution medium), accompanied by a
259 | written offer, valid for at least three years and valid for as
260 | long as you offer spare parts or customer support for that product
261 | model, to give anyone who possesses the object code either (1) a
262 | copy of the Corresponding Source for all the software in the
263 | product that is covered by this License, on a durable physical
264 | medium customarily used for software interchange, for a price no
265 | more than your reasonable cost of physically performing this
266 | conveying of source, or (2) access to copy the
267 | Corresponding Source from a network server at no charge.
268 |
269 | c) Convey individual copies of the object code with a copy of the
270 | written offer to provide the Corresponding Source. This
271 | alternative is allowed only occasionally and noncommercially, and
272 | only if you received the object code with such an offer, in accord
273 | with subsection 6b.
274 |
275 | d) Convey the object code by offering access from a designated
276 | place (gratis or for a charge), and offer equivalent access to the
277 | Corresponding Source in the same way through the same place at no
278 | further charge. You need not require recipients to copy the
279 | Corresponding Source along with the object code. If the place to
280 | copy the object code is a network server, the Corresponding Source
281 | may be on a different server (operated by you or a third party)
282 | that supports equivalent copying facilities, provided you maintain
283 | clear directions next to the object code saying where to find the
284 | Corresponding Source. Regardless of what server hosts the
285 | Corresponding Source, you remain obligated to ensure that it is
286 | available for as long as needed to satisfy these requirements.
287 |
288 | e) Convey the object code using peer-to-peer transmission, provided
289 | you inform other peers where the object code and Corresponding
290 | Source of the work are being offered to the general public at no
291 | charge under subsection 6d.
292 |
293 | A separable portion of the object code, whose source code is excluded
294 | from the Corresponding Source as a System Library, need not be
295 | included in conveying the object code work.
296 |
297 | A "User Product" is either (1) a "consumer product", which means any
298 | tangible personal property which is normally used for personal, family,
299 | or household purposes, or (2) anything designed or sold for incorporation
300 | into a dwelling. In determining whether a product is a consumer product,
301 | doubtful cases shall be resolved in favor of coverage. For a particular
302 | product received by a particular user, "normally used" refers to a
303 | typical or common use of that class of product, regardless of the status
304 | of the particular user or of the way in which the particular user
305 | actually uses, or expects or is expected to use, the product. A product
306 | is a consumer product regardless of whether the product has substantial
307 | commercial, industrial or non-consumer uses, unless such uses represent
308 | the only significant mode of use of the product.
309 |
310 | "Installation Information" for a User Product means any methods,
311 | procedures, authorization keys, or other information required to install
312 | and execute modified versions of a covered work in that User Product from
313 | a modified version of its Corresponding Source. The information must
314 | suffice to ensure that the continued functioning of the modified object
315 | code is in no case prevented or interfered with solely because
316 | modification has been made.
317 |
318 | If you convey an object code work under this section in, or with, or
319 | specifically for use in, a User Product, and the conveying occurs as
320 | part of a transaction in which the right of possession and use of the
321 | User Product is transferred to the recipient in perpetuity or for a
322 | fixed term (regardless of how the transaction is characterized), the
323 | Corresponding Source conveyed under this section must be accompanied
324 | by the Installation Information. But this requirement does not apply
325 | if neither you nor any third party retains the ability to install
326 | modified object code on the User Product (for example, the work has
327 | been installed in ROM).
328 |
329 | The requirement to provide Installation Information does not include a
330 | requirement to continue to provide support service, warranty, or updates
331 | for a work that has been modified or installed by the recipient, or for
332 | the User Product in which it has been modified or installed. Access to a
333 | network may be denied when the modification itself materially and
334 | adversely affects the operation of the network or violates the rules and
335 | protocols for communication across the network.
336 |
337 | Corresponding Source conveyed, and Installation Information provided,
338 | in accord with this section must be in a format that is publicly
339 | documented (and with an implementation available to the public in
340 | source code form), and must require no special password or key for
341 | unpacking, reading or copying.
342 |
343 | 7. Additional Terms.
344 |
345 | "Additional permissions" are terms that supplement the terms of this
346 | License by making exceptions from one or more of its conditions.
347 | Additional permissions that are applicable to the entire Program shall
348 | be treated as though they were included in this License, to the extent
349 | that they are valid under applicable law. If additional permissions
350 | apply only to part of the Program, that part may be used separately
351 | under those permissions, but the entire Program remains governed by
352 | this License without regard to the additional permissions.
353 |
354 | When you convey a copy of a covered work, you may at your option
355 | remove any additional permissions from that copy, or from any part of
356 | it. (Additional permissions may be written to require their own
357 | removal in certain cases when you modify the work.) You may place
358 | additional permissions on material, added by you to a covered work,
359 | for which you have or can give appropriate copyright permission.
360 |
361 | Notwithstanding any other provision of this License, for material you
362 | add to a covered work, you may (if authorized by the copyright holders of
363 | that material) supplement the terms of this License with terms:
364 |
365 | a) Disclaiming warranty or limiting liability differently from the
366 | terms of sections 15 and 16 of this License; or
367 |
368 | b) Requiring preservation of specified reasonable legal notices or
369 | author attributions in that material or in the Appropriate Legal
370 | Notices displayed by works containing it; or
371 |
372 | c) Prohibiting misrepresentation of the origin of that material, or
373 | requiring that modified versions of such material be marked in
374 | reasonable ways as different from the original version; or
375 |
376 | d) Limiting the use for publicity purposes of names of licensors or
377 | authors of the material; or
378 |
379 | e) Declining to grant rights under trademark law for use of some
380 | trade names, trademarks, or service marks; or
381 |
382 | f) Requiring indemnification of licensors and authors of that
383 | material by anyone who conveys the material (or modified versions of
384 | it) with contractual assumptions of liability to the recipient, for
385 | any liability that these contractual assumptions directly impose on
386 | those licensors and authors.
387 |
388 | All other non-permissive additional terms are considered "further
389 | restrictions" within the meaning of section 10. If the Program as you
390 | received it, or any part of it, contains a notice stating that it is
391 | governed by this License along with a term that is a further
392 | restriction, you may remove that term. If a license document contains
393 | a further restriction but permits relicensing or conveying under this
394 | License, you may add to a covered work material governed by the terms
395 | of that license document, provided that the further restriction does
396 | not survive such relicensing or conveying.
397 |
398 | If you add terms to a covered work in accord with this section, you
399 | must place, in the relevant source files, a statement of the
400 | additional terms that apply to those files, or a notice indicating
401 | where to find the applicable terms.
402 |
403 | Additional terms, permissive or non-permissive, may be stated in the
404 | form of a separately written license, or stated as exceptions;
405 | the above requirements apply either way.
406 |
407 | 8. Termination.
408 |
409 | You may not propagate or modify a covered work except as expressly
410 | provided under this License. Any attempt otherwise to propagate or
411 | modify it is void, and will automatically terminate your rights under
412 | this License (including any patent licenses granted under the third
413 | paragraph of section 11).
414 |
415 | However, if you cease all violation of this License, then your
416 | license from a particular copyright holder is reinstated (a)
417 | provisionally, unless and until the copyright holder explicitly and
418 | finally terminates your license, and (b) permanently, if the copyright
419 | holder fails to notify you of the violation by some reasonable means
420 | prior to 60 days after the cessation.
421 |
422 | Moreover, your license from a particular copyright holder is
423 | reinstated permanently if the copyright holder notifies you of the
424 | violation by some reasonable means, this is the first time you have
425 | received notice of violation of this License (for any work) from that
426 | copyright holder, and you cure the violation prior to 30 days after
427 | your receipt of the notice.
428 |
429 | Termination of your rights under this section does not terminate the
430 | licenses of parties who have received copies or rights from you under
431 | this License. If your rights have been terminated and not permanently
432 | reinstated, you do not qualify to receive new licenses for the same
433 | material under section 10.
434 |
435 | 9. Acceptance Not Required for Having Copies.
436 |
437 | You are not required to accept this License in order to receive or
438 | run a copy of the Program. Ancillary propagation of a covered work
439 | occurring solely as a consequence of using peer-to-peer transmission
440 | to receive a copy likewise does not require acceptance. However,
441 | nothing other than this License grants you permission to propagate or
442 | modify any covered work. These actions infringe copyright if you do
443 | not accept this License. Therefore, by modifying or propagating a
444 | covered work, you indicate your acceptance of this License to do so.
445 |
446 | 10. Automatic Licensing of Downstream Recipients.
447 |
448 | Each time you convey a covered work, the recipient automatically
449 | receives a license from the original licensors, to run, modify and
450 | propagate that work, subject to this License. You are not responsible
451 | for enforcing compliance by third parties with this License.
452 |
453 | An "entity transaction" is a transaction transferring control of an
454 | organization, or substantially all assets of one, or subdividing an
455 | organization, or merging organizations. If propagation of a covered
456 | work results from an entity transaction, each party to that
457 | transaction who receives a copy of the work also receives whatever
458 | licenses to the work the party's predecessor in interest had or could
459 | give under the previous paragraph, plus a right to possession of the
460 | Corresponding Source of the work from the predecessor in interest, if
461 | the predecessor has it or can get it with reasonable efforts.
462 |
463 | You may not impose any further restrictions on the exercise of the
464 | rights granted or affirmed under this License. For example, you may
465 | not impose a license fee, royalty, or other charge for exercise of
466 | rights granted under this License, and you may not initiate litigation
467 | (including a cross-claim or counterclaim in a lawsuit) alleging that
468 | any patent claim is infringed by making, using, selling, offering for
469 | sale, or importing the Program or any portion of it.
470 |
471 | 11. Patents.
472 |
473 | A "contributor" is a copyright holder who authorizes use under this
474 | License of the Program or a work on which the Program is based. The
475 | work thus licensed is called the contributor's "contributor version".
476 |
477 | A contributor's "essential patent claims" are all patent claims
478 | owned or controlled by the contributor, whether already acquired or
479 | hereafter acquired, that would be infringed by some manner, permitted
480 | by this License, of making, using, or selling its contributor version,
481 | but do not include claims that would be infringed only as a
482 | consequence of further modification of the contributor version. For
483 | purposes of this definition, "control" includes the right to grant
484 | patent sublicenses in a manner consistent with the requirements of
485 | this License.
486 |
487 | Each contributor grants you a non-exclusive, worldwide, royalty-free
488 | patent license under the contributor's essential patent claims, to
489 | make, use, sell, offer for sale, import and otherwise run, modify and
490 | propagate the contents of its contributor version.
491 |
492 | In the following three paragraphs, a "patent license" is any express
493 | agreement or commitment, however denominated, not to enforce a patent
494 | (such as an express permission to practice a patent or covenant not to
495 | sue for patent infringement). To "grant" such a patent license to a
496 | party means to make such an agreement or commitment not to enforce a
497 | patent against the party.
498 |
499 | If you convey a covered work, knowingly relying on a patent license,
500 | and the Corresponding Source of the work is not available for anyone
501 | to copy, free of charge and under the terms of this License, through a
502 | publicly available network server or other readily accessible means,
503 | then you must either (1) cause the Corresponding Source to be so
504 | available, or (2) arrange to deprive yourself of the benefit of the
505 | patent license for this particular work, or (3) arrange, in a manner
506 | consistent with the requirements of this License, to extend the patent
507 | license to downstream recipients. "Knowingly relying" means you have
508 | actual knowledge that, but for the patent license, your conveying the
509 | covered work in a country, or your recipient's use of the covered work
510 | in a country, would infringe one or more identifiable patents in that
511 | country that you have reason to believe are valid.
512 |
513 | If, pursuant to or in connection with a single transaction or
514 | arrangement, you convey, or propagate by procuring conveyance of, a
515 | covered work, and grant a patent license to some of the parties
516 | receiving the covered work authorizing them to use, propagate, modify
517 | or convey a specific copy of the covered work, then the patent license
518 | you grant is automatically extended to all recipients of the covered
519 | work and works based on it.
520 |
521 | A patent license is "discriminatory" if it does not include within
522 | the scope of its coverage, prohibits the exercise of, or is
523 | conditioned on the non-exercise of one or more of the rights that are
524 | specifically granted under this License. You may not convey a covered
525 | work if you are a party to an arrangement with a third party that is
526 | in the business of distributing software, under which you make payment
527 | to the third party based on the extent of your activity of conveying
528 | the work, and under which the third party grants, to any of the
529 | parties who would receive the covered work from you, a discriminatory
530 | patent license (a) in connection with copies of the covered work
531 | conveyed by you (or copies made from those copies), or (b) primarily
532 | for and in connection with specific products or compilations that
533 | contain the covered work, unless you entered into that arrangement,
534 | or that patent license was granted, prior to 28 March 2007.
535 |
536 | Nothing in this License shall be construed as excluding or limiting
537 | any implied license or other defenses to infringement that may
538 | otherwise be available to you under applicable patent law.
539 |
540 | 12. No Surrender of Others' Freedom.
541 |
542 | If conditions are imposed on you (whether by court order, agreement or
543 | otherwise) that contradict the conditions of this License, they do not
544 | excuse you from the conditions of this License. If you cannot convey a
545 | covered work so as to satisfy simultaneously your obligations under this
546 | License and any other pertinent obligations, then as a consequence you may
547 | not convey it at all. For example, if you agree to terms that obligate you
548 | to collect a royalty for further conveying from those to whom you convey
549 | the Program, the only way you could satisfy both those terms and this
550 | License would be to refrain entirely from conveying the Program.
551 |
552 | 13. Use with the GNU Affero General Public License.
553 |
554 | Notwithstanding any other provision of this License, you have
555 | permission to link or combine any covered work with a work licensed
556 | under version 3 of the GNU Affero General Public License into a single
557 | combined work, and to convey the resulting work. The terms of this
558 | License will continue to apply to the part which is the covered work,
559 | but the special requirements of the GNU Affero General Public License,
560 | section 13, concerning interaction through a network will apply to the
561 | combination as such.
562 |
563 | 14. Revised Versions of this License.
564 |
565 | The Free Software Foundation may publish revised and/or new versions of
566 | the GNU General Public License from time to time. Such new versions will
567 | be similar in spirit to the present version, but may differ in detail to
568 | address new problems or concerns.
569 |
570 | Each version is given a distinguishing version number. If the
571 | Program specifies that a certain numbered version of the GNU General
572 | Public License "or any later version" applies to it, you have the
573 | option of following the terms and conditions either of that numbered
574 | version or of any later version published by the Free Software
575 | Foundation. If the Program does not specify a version number of the
576 | GNU General Public License, you may choose any version ever published
577 | by the Free Software Foundation.
578 |
579 | If the Program specifies that a proxy can decide which future
580 | versions of the GNU General Public License can be used, that proxy's
581 | public statement of acceptance of a version permanently authorizes you
582 | to choose that version for the Program.
583 |
584 | Later license versions may give you additional or different
585 | permissions. However, no additional obligations are imposed on any
586 | author or copyright holder as a result of your choosing to follow a
587 | later version.
588 |
589 | 15. Disclaimer of Warranty.
590 |
591 | THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
592 | APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
593 | HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
594 | OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
595 | THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
596 | PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
597 | IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
598 | ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
599 |
600 | 16. Limitation of Liability.
601 |
602 | IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
603 | WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
604 | THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
605 | GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
606 | USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
607 | DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
608 | PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
609 | EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
610 | SUCH DAMAGES.
611 |
612 | 17. Interpretation of Sections 15 and 16.
613 |
614 | If the disclaimer of warranty and limitation of liability provided
615 | above cannot be given local legal effect according to their terms,
616 | reviewing courts shall apply local law that most closely approximates
617 | an absolute waiver of all civil liability in connection with the
618 | Program, unless a warranty or assumption of liability accompanies a
619 | copy of the Program in return for a fee.
620 |
621 | END OF TERMS AND CONDITIONS
622 |
623 | How to Apply These Terms to Your New Programs
624 |
625 | If you develop a new program, and you want it to be of the greatest
626 | possible use to the public, the best way to achieve this is to make it
627 | free software which everyone can redistribute and change under these terms.
628 |
629 | To do so, attach the following notices to the program. It is safest
630 | to attach them to the start of each source file to most effectively
631 | state the exclusion of warranty; and each file should have at least
632 | the "copyright" line and a pointer to where the full notice is found.
633 |
634 |
635 | Copyright (C)
636 |
637 | This program is free software: you can redistribute it and/or modify
638 | it under the terms of the GNU General Public License as published by
639 | the Free Software Foundation, either version 3 of the License, or
640 | (at your option) any later version.
641 |
642 | This program is distributed in the hope that it will be useful,
643 | but WITHOUT ANY WARRANTY; without even the implied warranty of
644 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
645 | GNU General Public License for more details.
646 |
647 | You should have received a copy of the GNU General Public License
648 | along with this program. If not, see .
649 |
650 | Also add information on how to contact you by electronic and paper mail.
651 |
652 | If the program does terminal interaction, make it output a short
653 | notice like this when it starts in an interactive mode:
654 |
655 | Copyright (C)
656 | This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
657 | This is free software, and you are welcome to redistribute it
658 | under certain conditions; type `show c' for details.
659 |
660 | The hypothetical commands `show w' and `show c' should show the appropriate
661 | parts of the General Public License. Of course, your program's commands
662 | might be different; for a GUI interface, you would use an "about box".
663 |
664 | You should also get your employer (if you work as a programmer) or school,
665 | if any, to sign a "copyright disclaimer" for the program, if necessary.
666 | For more information on this, and how to apply and follow the GNU GPL, see
667 | .
668 |
669 | The GNU General Public License does not permit incorporating your program
670 | into proprietary programs. If your program is a subroutine library, you
671 | may consider it more useful to permit linking proprietary applications with
672 | the library. If this is what you want to do, use the GNU Lesser General
673 | Public License instead of this License. But first, please read
674 | .
675 |
--------------------------------------------------------------------------------
/usgs_m2m/usgsDataTypes.py:
--------------------------------------------------------------------------------
1 | """
2 | Implementation date: 22.11.2024
3 |
4 | This API complies with the data types given in:
5 | https://m2m.cr.usgs.gov/api/docs/datatypes/
6 | """
7 | from typing import Literal
8 |
9 |
10 | class AbstractDataType:
11 | """
12 | Use `self.dict` to return the class attributes as a dictionary.
13 | """
14 |
15 | def __init__(self):
16 | self.dict = self._as_dict()
17 |
18 | def _as_dict(self) -> dict:
19 | attributes = self.__dict__.items()
20 | attributes_filtered = dict((key, value) for key, value in attributes if not key.startswith('_'))
21 | object.__setattr__(self, '_dict', attributes_filtered) # avoid recursion self._dict = attributes_filtered
22 | return attributes_filtered
23 |
24 | def __setattr__(self, name, value):
25 | object.__setattr__(self, name, value) # avoid recursion self.name = value
26 | object.__setattr__(self, '_dict', self._as_dict()) # avoid recursion self._dict = self._as_dict()
27 |
28 |
29 | # ===================== USGS data types are below this line =====================
30 |
31 | # To make it easier to check for updates and new data types, all classes are listed in order of appearance on the
32 | # source page: https://m2m.cr.usgs.gov/api/docs/datatypes/, but this is against the python rule: "classes must be
33 | # defined before they can be used". So some classes are moved and comments are left where they were originally.
34 |
35 |
36 | class AcquisitionFilter(AbstractDataType):
37 | """
38 | :param start: (string) The date the scene began acquisition - ISO 8601 Formatted Date
39 | :param end: (string) The date the scene ended acquisition - ISO 8601 Formatted Date
40 | """
41 |
42 | def __init__(self,
43 | start: str | None = None,
44 | end: str | None = None):
45 | self.start = start
46 | self.end = end
47 |
48 | self.dict = self._as_dict()
49 |
50 |
51 | class CloudCoverFilter(AbstractDataType):
52 | """
53 | :param min: (int) Used to limit results by minimum cloud cover (for supported datasets)
54 | :param max: (int) Used to limit results by maximum cloud cover (for supported datasets)
55 | :param includeUnknown: (boolean) Used to determine if scenes with unknown cloud cover values should be included in
56 | the results
57 | """
58 |
59 | def __init__(self,
60 | min: int | None = None,
61 | max: int | None = None,
62 | includeUnknown: bool | None = None):
63 | self.min = min
64 | self.max = max
65 | self.includeUnknown = includeUnknown
66 |
67 | self.dict = self._as_dict()
68 |
69 |
70 | class Coordinate(AbstractDataType):
71 | """
72 | :param latitude: (double) Decimal degree coordinate in EPSG:4326 projection
73 | :param longitude: (double) Decimal degree coordinate in EPSG:4326 projection
74 | """
75 |
76 | def __init__(self,
77 | latitude: float,
78 | longitude: float):
79 | self.latitude = latitude
80 | self.longitude = longitude
81 |
82 | self.dict = self._as_dict()
83 |
84 |
85 | class DateRange(AbstractDataType):
86 | """
87 | :param startDate: (string) Used to apply a temporal filter on the data - ISO 8601 Formatted Date
88 | :param endDate: (string) Used to apply a temporal filter on the data - ISO 8601 Formatted Date
89 | """
90 |
91 | def __init__(self,
92 | startDate: str | None = None,
93 | endDate: str | None = None):
94 | self.startDate = startDate
95 | self.endDate = endDate
96 |
97 | self.dict = self._as_dict()
98 |
99 |
100 | # replaced
101 | class IngestUpdateTemplate(AbstractDataType):
102 | """
103 | :param templateId: (string) value must be 'ingestUpdate'
104 | :param darId: (string) The number of data acquisition request
105 | :param sceneIds: (string[]) An array of Scene IDs
106 | :param viewName: (string) The view name of the dataset
107 | :param idField: (string) Used to determine the ID being used in EE (EE_DISPLAY_ID by default)
108 | """
109 |
110 | def __init__(self,
111 | templateId: Literal['ingestUpdate'] = 'ingestUpdate',
112 | darId: str | None = None,
113 | sceneIds: list[str] | None = None,
114 | viewName: str | None = None,
115 | idField: str | None = None):
116 | self.templateId = templateId
117 | self.darId = darId
118 | self.sceneIds = sceneIds
119 | self.viewName = viewName
120 | self.idField = idField
121 |
122 | self.dict = self._as_dict()
123 |
124 |
125 | class TemplateConfiguration(IngestUpdateTemplate):
126 | """
127 | This is an abstract data model, use ingestUpdateTemplate
128 | """
129 |
130 | def __init__(self, **kwargs):
131 | super().__init__(**kwargs)
132 |
133 |
134 | class GeoJson(AbstractDataType):
135 | """
136 | :param type: (string) Geometry types supported by GeoJson, like polygon
137 | :param coordinates: (coordinate[]) Coordinate array
138 | """
139 |
140 | def __init__(self,
141 | type: str,
142 | coordinates: list | list[Coordinate]):
143 | self.type = type
144 | self.coordinates = coordinates
145 |
146 | self.dict = self._as_dict()
147 |
148 |
149 | # IngestUpdateTemplate original place
150 |
151 | class IngestFilter(AbstractDataType):
152 | """
153 | :param start: (string) Used to filter scenes by last metadata ingest
154 | :param end: (string) Used to filter scenes by last metadata ingest
155 | """
156 |
157 | def __init__(self,
158 | start: str | None = None,
159 | end: str | None = None):
160 | self.start = start
161 | self.end = end
162 |
163 | self.dict = self._as_dict()
164 |
165 |
166 | class MetadataFilter(AbstractDataType): # MetadataFilter/Metadata
167 | """
168 | This is an abstract data model, use MetadataAnd, MetadataBetween, MetadataOr, or MetadataValue
169 | """
170 |
171 | def __init__(self, **kwargs):
172 | if 'filterType' not in kwargs:
173 | raise ValueError(f'filterType parameter is required')
174 |
175 | if kwargs['filterType'] == "and":
176 | # noinspection PyTypeChecker
177 | MetadataAnd.__init__(self, **kwargs)
178 |
179 | elif kwargs['filterType'] == "between":
180 | # noinspection PyTypeChecker
181 | MetadataBetween.__init__(self, **kwargs)
182 |
183 | elif kwargs['filterType'] == "or":
184 | # noinspection PyTypeChecker
185 | MetadataOr.__init__(self, **kwargs)
186 |
187 | elif kwargs['filterType'] == "value":
188 | # noinspection PyTypeChecker
189 | MetadataValue.__init__(self, **kwargs)
190 |
191 | else:
192 | raise ValueError(
193 | f'Invalid filterType value: {kwargs["filterType"]}. Check for SpatialFilterMbr or SpatialFilterGeoJson data types') #
194 |
195 |
196 | class MetadataAnd(AbstractDataType):
197 | """
198 | :param filterType: (string) Value must be "and"
199 | :param childFilters: (metadataFilter[]) Joins any filter parameters together with the "and" logical operator
200 | """
201 |
202 | def __init__(self,
203 | childFilters: list[MetadataFilter],
204 | filterType: Literal['and'] = 'and'):
205 | self.childFilters = childFilters
206 | self.filterType = filterType
207 |
208 | self.dict = self._as_dict()
209 |
210 |
211 | class MetadataBetween(AbstractDataType):
212 | """
213 | :param filterType (string) Value must be "between"
214 | :param filterId (string) Unique Identifier for the dataset criteria field and it can be retrieved by dataset-filters
215 | https://m2m.cr.usgs.gov/api/docs/reference/#dataset-filters
216 | :param firstValue (int) First value in between clause
217 | :param secondValue (int) Second value in between clause
218 | """
219 |
220 | def __init__(self,
221 | filterType: Literal['between'],
222 | filterId: str | None = None,
223 | firstValue: int | None = None,
224 | secondValue: int | None = None):
225 | self.filterType = filterType
226 | self.filterId = filterId
227 | self.firstValue = firstValue
228 | self.secondValue = secondValue
229 |
230 | self.dict = self._as_dict()
231 |
232 |
233 | class MetadataOr(AbstractDataType):
234 | """
235 | :param filterType (string) Value must be "or"
236 | :param childFilters: (metadataFilter[]) Joins any filter parameters together with the "or" logical operator
237 | """
238 |
239 | def __init__(self,
240 | childFilters: list[MetadataFilter],
241 | filterType: Literal['or'] = 'or'):
242 | self.filterType = filterType
243 | self.childFilters = childFilters
244 |
245 | self.dict = self._as_dict()
246 |
247 |
248 | class MetadataValue(AbstractDataType):
249 | """
250 | :param filterType (string) Value must be "value"
251 | :param filterId (string) Unique Identifier for the dataset criteria field and it can be retrieved by dataset-filters
252 | :param value (string) Value to use
253 | :param operand (string) Determines what operand to search with - accepted values are "=" and "like"
254 | """
255 |
256 | def __init__(self,
257 | filterType: Literal['value'] = 'value',
258 | filterId: str | None = None,
259 | value: str | None = None,
260 | operand: str | None = None):
261 | self.filterType = filterType
262 | self.filterId = filterId
263 | self.value = value
264 | self.operand = operand
265 |
266 | self.dict = self._as_dict()
267 |
268 |
269 | # replaced
270 | class SpatialFilter(AbstractDataType):
271 | """
272 | This is an abstract data model, use SpatialFilterMbr or SpatialFilterGeoJson
273 | """
274 |
275 | def __init__(self, **kwargs):
276 | if 'filterType' not in kwargs:
277 | raise ValueError(f'filterType parameter is required')
278 |
279 | if kwargs['filterType'] == "mbr":
280 | # noinspection PyTypeChecker
281 | SpatialFilterMbr.__init__(self, **kwargs)
282 |
283 | elif kwargs['filterType'] == "geojson":
284 | # noinspection PyTypeChecker
285 | SpatialFilterGeoJson.__init__(self, **kwargs)
286 |
287 | else:
288 | raise ValueError(
289 | f'Invalid filterType value: {kwargs["filterType"]}. Check for SpatialFilterMbr or SpatialFilterGeoJson data types')
290 |
291 |
292 | # replaced
293 | class SceneFilter(AbstractDataType):
294 | """
295 | :param acquisitionFilter: (AcquisitionFilter) Used to apply a acquisition filter on the data
296 | :param cloudCoverFilter: (CloudCoverFilter) Used to apply a cloud cover filter on the data
297 | :param datasetName: (string) Dataset name
298 | :param ingestFilter: (IngestFilter) Used to apply an ingest filter on the data
299 | :param metadataFilter: (MetadataFilter) Used to apply a metadata filter on the data
300 | :param seasonalFilter: (int[]) Used to apply month numbers from 1 to 12 on the data
301 | :param spatialFilter: (SpatialFilter) Used to apply a spatial filter on the data
302 | """
303 |
304 | def __init__(self,
305 | acquisitionFilter: AcquisitionFilter | None = None,
306 | cloudCoverFilter: CloudCoverFilter | None = None,
307 | datasetName: str | None = None,
308 | ingestFilter: IngestFilter | None = None,
309 | metadataFilter: MetadataFilter | None = None,
310 | seasonalFilter: list[int] | None = None,
311 | spatialFilter: SpatialFilter | None = None):
312 | self.acquisitionFilter = acquisitionFilter
313 | self.cloudCoverFilter = cloudCoverFilter
314 | self.datasetName = datasetName
315 | self.ingestFilter = ingestFilter
316 | self.metadataFilter = metadataFilter
317 | self.seasonalFilter = seasonalFilter
318 | self.spatialFilter = spatialFilter
319 |
320 | self.dict = self._as_dict()
321 |
322 |
323 | class SceneDatasetFilter(AbstractDataType):
324 | """
325 | :param datasetName: (string) Dataset name
326 | :param sceneFilter: (sceneFilter) Used to apply a scene filter on the data
327 | """
328 |
329 | def __init__(self,
330 | datasetName: str | None = None,
331 | sceneFilter: SceneFilter | None = None):
332 | self.datasetName = datasetName
333 | self.sceneFilter = sceneFilter
334 |
335 | self.dict = self._as_dict()
336 |
337 |
338 | # SceneFilter original place
339 |
340 |
341 | class SceneMetadataConfig(AbstractDataType):
342 | """
343 | :param includeNulls: (boolean) Used to include or exclude null values
344 | :param type: (string) Value can be 'full', 'summary' or null
345 | :param template: (string) Metadata template
346 | """
347 |
348 | # todo: check if parameter `type` allows None or only 'null' values (as string)?
349 | def __init__(self,
350 | includeNulls: bool | None = None,
351 | type: Literal['full', 'summary'] | None = None,
352 | template: str | None = None):
353 | self.includeNulls = includeNulls
354 | self.type = type
355 | self.template = template
356 |
357 | self.dict = self._as_dict()
358 |
359 |
360 | class SpatialBounds(AbstractDataType):
361 | """
362 | This is an abstract data model, use spatialBoundsMbr or geoJson
363 | """
364 |
365 | def __init__(self, **kwargs):
366 | if 'north' in kwargs:
367 | # noinspection PyTypeChecker
368 | SpatialBoundsMbr.__init__(self, **kwargs)
369 | elif 'coordinates' in kwargs:
370 | # noinspection PyTypeChecker
371 | GeoJson.__init__(self, **kwargs)
372 | raise ValueError(
373 | f"'north' or 'coordinates' parameter is required. Check for SpatialBoundsMbr or GeoJson data types")
374 |
375 |
376 | class SpatialBoundsMbr(AbstractDataType):
377 | """
378 | :param north: (string) Decimal degree coordinate value in EPSG:4326 projection representing the northern most point of the MBR
379 | :param east: (string) Decimal degree coordinate value in EPSG:4326 projection representing the eastern most point of the MBR
380 | :param south: (string) Decimal degree coordinate value in EPSG:4326 projection representing the southern most point of the MBR
381 | :param west: (string) Decimal degree coordinate value in EPSG:4326 projection representing the western most point of the MBR
382 | """
383 |
384 | def __init__(self,
385 | north: str | None = None,
386 | east: str | None = None,
387 | south: str | None = None,
388 | west: str | None = None):
389 | self.north = north
390 | self.east = east
391 | self.south = south
392 | self.west = west
393 |
394 | self.dict = self._as_dict()
395 |
396 |
397 | # SpatialFilter original place
398 |
399 |
400 | class SpatialFilterMbr(AbstractDataType):
401 | """
402 | :param filterType: (string) value must be "mbr"
403 | :param lowerLeft: (Coordinate) The southwest point of the minimum bounding rectangle
404 | :param upperRight: (Coordinate) The northeast point of the minimum bounding rectangle
405 | """
406 |
407 | def __init__(self,
408 | lowerLeft: Coordinate,
409 | upperRight: Coordinate,
410 | filterType: Literal['mbr'] = 'mbr'):
411 | self.lowerLeft = lowerLeft
412 | self.upperRight = upperRight
413 | self.filterType = filterType
414 |
415 | self.dict = self._as_dict()
416 |
417 |
418 | class SpatialFilterGeoJson(AbstractDataType):
419 | """
420 | :param filterType: (string) value must be "geojson"
421 | :param geoJson: (geoJson) A GeoJson object representing a region of space
422 | """
423 |
424 | def __init__(self,
425 | geoJson: GeoJson,
426 | filterType: Literal['geojson'] = 'geojson'):
427 | self.filterType = filterType
428 | self.geoJson = geoJson
429 |
430 | self.dict = self._as_dict()
431 |
432 |
433 | class UserContext(AbstractDataType):
434 | """
435 | :param contactId: (string) Internal user Identifier
436 | :param ipAddress: (string) Ip address used to send the request
437 | """
438 |
439 | def __init__(self,
440 | contactId: str | None,
441 | ipAddress: str | None):
442 | self.contactId = contactId
443 | self.ipAddress = ipAddress
444 |
445 | self.dict = self._as_dict()
446 |
447 |
448 | class TemporalCoverage(AbstractDataType):
449 | """
450 | :param StartDate: (date) Starting temporal extent of coverage - ISO 8601 Formatted Date
451 | :param endDate: (date) Ending temporal extent of the coverage - ISO 8601 Formatted Date
452 | Even though this specifies the `date` data type, it is most likely `str` (not tested)
453 | """
454 |
455 | def __init__(self,
456 | StartDate: str | None = None,
457 | endDate: str | None = None):
458 | self.StartDate = StartDate
459 | self.endDate = endDate
460 |
461 | self.dict = self._as_dict()
462 |
463 |
464 | class TemporalFilter(AbstractDataType):
465 | """
466 | :param start: (date) ISO 8601 Formatted Date
467 | :param end: (date) ISO 8601 Formatted Date
468 | Even though this specifies the `date` data type, it is most likely `str` (not tested)
469 | """
470 |
471 | def __init__(self,
472 | start: str | None = None,
473 | end: str | None = None):
474 | self.start = start
475 | self.end = end
476 |
477 | self.dict = self._as_dict()
478 |
479 |
480 | class DownloadResponse(AbstractDataType):
481 | """
482 | :param id: (int) Scene Identifier
483 | :param displayId: (string) Scene Identifier used for display
484 | :param entityId: (string) Entity Identifier
485 | :param datasetId: (string) Dataset Identifier
486 | :param available: (string) Value is "Y" or "N". Denotes if the download option is available
487 | :param filesize: (long) The size of the download in bytes
488 | :param productName: (string) The user friendly name for this download option
489 | :param productCode: (string) Internal product code to represent the download option
490 | :param bulkAvailable: (string) Value is "Y" or "N". Denotes if the download option is available for bulk
491 | :param downloadSystem: (string) The system that is running the download
492 | :param secondaryDownloads: (DownloadResponse) An array of related downloads
493 | """
494 |
495 | def __init__(self,
496 | id: int | None = None,
497 | displayId: str | None = None,
498 | entityId: str | None = None,
499 | datasetId: str | None = None,
500 | available: Literal['Y', 'N'] | None = None,
501 | filesize: int | None = None,
502 | productName: str | None = None,
503 | productCode: str | None = None,
504 | bulkAvailable: Literal['Y', 'N'] | None = None,
505 | downloadSystem: str | None = None,
506 | secondaryDownloads: list | None = None):
507 | self.id = id
508 | self.displayId = displayId
509 | self.entityId = entityId
510 | self.datasetId = datasetId
511 | self.available = available
512 | self.filesize = filesize
513 | self.productName = productName
514 | self.productCode = productCode
515 | self.bulkAvailable = bulkAvailable
516 | self.downloadSystem = downloadSystem
517 | self.secondaryDownloads = secondaryDownloads
518 |
519 | self.dict = self._as_dict()
520 |
521 |
522 | class DownloadInput(AbstractDataType):
523 | """
524 | :param entityId: (string) Entity Identifier
525 | :param productId: (string) Product identifiers
526 | :param dataUse: (string) The type of use of this data
527 | :param label: (string) The label name used when requesting the download
528 | """
529 |
530 | def __init__(self,
531 | entityId: str | None = None,
532 | productId: str | None = None,
533 | dataUse: str | None = None,
534 | label: str | None = None):
535 | self.entityId = entityId
536 | self.productId = productId
537 | self.dataUse = dataUse
538 | self.label = label
539 |
540 | self.dict = self._as_dict()
541 |
542 |
543 | class DownloadQueueDownload(AbstractDataType):
544 | """
545 | :param downloadId: (int) Download Identifier
546 | :param collectionName: (string) User friendly name of the collection
547 | :param datasetId: (string) Dataset Identifier
548 | :param displayId: (string) Scene Identifier used for display
549 | :param entityId: (string) Entity Identifier
550 | :param eulaCode: (string) A EULA Code to use for EULA retrieval - only populated when loading download orders
551 | :param filesize: (long) The size of the download in bytes
552 | :param label: (string) The label name used when requesting the download
553 | :param productCode: (string) Internal product code to represent the download option
554 | :param productName: (string) The user friendly name for this product
555 | :param statusCode: (string) Internal status code
556 | :param statusText: (string) User friendly status
557 | """
558 |
559 | def __init__(self,
560 | downloadId: int | None = None,
561 | collectionName: str | None = None,
562 | datasetId: str | None = None,
563 | displayId: str | None = None,
564 | entityId: str | None = None,
565 | eulaCode: str | None = None,
566 | filesize: int | None = None,
567 | label: str | None = None,
568 | productCode: str | None = None,
569 | productName: str | None = None,
570 | statusCode: str | None = None,
571 | statusText: str | None = None):
572 | self.downloadId = downloadId
573 | self.collectionName = collectionName
574 | self.datasetId = datasetId
575 | self.displayId = displayId
576 | self.entityId = entityId
577 | self.eulaCode = eulaCode
578 | self.filesize = filesize
579 | self.label = label
580 | self.productCode = productCode
581 | self.productName = productName
582 | self.statusCode = statusCode
583 | self.statusText = statusText
584 |
585 | self.dict = self._as_dict()
586 |
587 |
588 | class Eula(AbstractDataType):
589 | """
590 | :param eulaCode: (string) A EULA Code to use for EULA retrieval - only populated when loading download orders
591 | :param agreementContent: (string) Agreement clauses to use the data - only populated when loading download orders
592 | """
593 |
594 | def __init__(self,
595 | eulaCode: str | None = None,
596 | agreementContent: str | None = None):
597 | self.eulaCode = eulaCode
598 | self.agreementContent = agreementContent
599 |
600 | self.dict = self._as_dict()
601 |
602 |
603 | class FilegroupDownload(AbstractDataType):
604 | """
605 | :param datasetName: (string) Dataset name
606 | :param fileGroups: (string[]) Internal codes used to represent the file groups
607 | :param listId: (string) The name of scene list to request from
608 | :param dataUse: (string) The type of use of this data
609 | :param label: (string) The label name used when requesting the download
610 | """
611 |
612 | def __init__(self,
613 | datasetName: str | None = None,
614 | fileGroups: list[str] | None = None,
615 | listId: str | None = None,
616 | dataUse: str | None = None,
617 | label: str | None = None):
618 | self.datasetName = datasetName
619 | self.fileGroups = fileGroups
620 | self.listId = listId
621 | self.dataUse = dataUse
622 | self.label = label
623 |
624 | self.dict = self._as_dict()
625 |
626 |
627 | class FilepathDownload(AbstractDataType):
628 | """
629 | :param datasetName: (string) Dataset name
630 | :param productCode: (string) Internal code used to represent this product during ordering
631 | :param dataPath: (string) The data location to stream the download from
632 | :param dataUse: (string) The type of use of this data
633 | :param label: (string) The label name used when requesting the download
634 | """
635 |
636 | def __init__(self,
637 | datasetName: str | None = None,
638 | productCode: str | None = None,
639 | dataPath: str | None = None,
640 | dataUse: str | None = None,
641 | label: str | None = None):
642 | self.datasetName = datasetName
643 | self.productCode = productCode
644 | self.dataPath = dataPath
645 | self.dataUse = dataUse
646 | self.label = label
647 |
648 | self.dict = self._as_dict()
649 |
650 |
651 | class Options(AbstractDataType):
652 | """
653 | :param bulk: (boolean) Denotes if the scene is available for bulk
654 | :param order: (boolean) Denotes if the scene is available for order
655 | :param download: (boolean) Denotes if the scene is available for download
656 | :param secondary: (boolean) Denotes if the scene is available for secondary download
657 | """
658 |
659 | def __init__(self,
660 | bulk: bool | None = None,
661 | order: bool | None = None,
662 | download: bool | None = None,
663 | secondary: bool | None = None):
664 | self.bulk = bulk
665 | self.order = order
666 | self.download = download
667 | self.secondary = secondary
668 |
669 | self.dict = self._as_dict()
670 |
671 |
672 | class ProductDownload(AbstractDataType):
673 | """
674 | :param datasetName: (string) Dataset name
675 | :param productIds: (string[]) Product identifiers
676 | :param sceneFilter: (SceneFilter) Used to apply a scene filter on the data
677 | """
678 |
679 | def __init__(self,
680 | datasetName: str | None = None,
681 | productIds: list[str] | None = None,
682 | sceneFilter: SceneFilter | None = None):
683 | self.datasetName = datasetName
684 | self.productIds = productIds
685 | self.sceneFilter = sceneFilter
686 |
687 | self.dict = self._as_dict()
688 |
689 |
690 | class ProxiedDownload(AbstractDataType):
691 | """
692 | :param downloadId: (int) Download Identifier
693 | :param downloadedSize: (bigint) Total downloaded size of the file
694 | """
695 |
696 | def __init__(self,
697 | downloadId: int | None = None,
698 | downloadedSize: int | None = None):
699 | self.downloadId = downloadId
700 | self.downloadedSize = downloadedSize
701 |
702 | self.dict = self._as_dict()
703 |
704 |
705 | class Selected(AbstractDataType):
706 | """
707 | :param bulk: (boolean) Denotes if the scene is selected for bulk
708 | :param order: (boolean) Denotes if the scene is selected for order
709 | :param compare: (boolean) Denotes if the scene is selected for compare
710 | """
711 |
712 | def __init__(self,
713 | bulk: bool | None = None,
714 | order: bool | None = None,
715 | compare: bool | None = None):
716 | self.bulk = bulk
717 | self.order = order
718 | self.compare = compare
719 |
720 | self.dict = self._as_dict()
721 |
722 |
723 | class MetadataExport(AbstractDataType):
724 | """
725 | :param exportId: (string) Identifier of this export
726 | :param exportName: (string) Name of this export
727 | :param datasetId: (string) Dataset Identifier
728 | :param datasetName: (string) Dataset name
729 | :param sceneFilter: (sceneFilter) Used to apply a scene filter on the data
730 | :param customMessage: (string) The content of the custom message
731 | :param exportType: (string) Type of this export
732 | :param status: (string) Internal Status Code
733 | :param statusName: (string) User Friendly Status
734 | :param dateEntered: (string) The date this export was entered
735 | :param dateUpdated: (string) Date the export was last updated
736 | """
737 |
738 | def __init__(self,
739 | exportId: str | None = None,
740 | exportName: str | None = None,
741 | datasetId: str | None = None,
742 | datasetName: str | None = None,
743 | sceneFilter: SceneFilter | None = None,
744 | customMessage: str | None = None,
745 | exportType: str | None = None,
746 | status: str | None = None,
747 | statusName: str | None = None,
748 | dateEntered: str | None = None,
749 | dateUpdated: str | None = None):
750 | self.exportId = exportId
751 | self.exportName = exportName
752 | self.datasetId = datasetId
753 | self.datasetName = datasetName
754 | self.sceneFilter = sceneFilter
755 | self.customMessage = customMessage
756 | self.exportType = exportType
757 | self.status = status
758 | self.statusName = statusName
759 | self.dateEntered = dateEntered
760 | self.dateUpdated = dateUpdated
761 |
762 | self.dict = self._as_dict()
763 |
764 |
765 | class MetadataField(AbstractDataType):
766 | """
767 | :param id: (int) Metadata Identifier
768 | :param fieldName: (string) The name of the metadata field
769 | :param dictionaryLink: (string) A link to the data dictionary entry for this field
770 | :param value: (string) The value for this metadata field
771 | """
772 |
773 | def __init__(self,
774 | id: int | None = None,
775 | fieldName: str | None = None,
776 | dictionaryLink: str | None = None,
777 | value: str | None = None):
778 | self.id = id
779 | self.fieldName = fieldName
780 | self.dictionaryLink = dictionaryLink
781 | self.value = value
782 |
783 | self.dict = self._as_dict()
784 |
785 |
786 | class Browse(AbstractDataType):
787 | """
788 | :param browseRotationEnabled: (boolean) Denotes if the rotation is enabled for browse
789 | :param browseName: (string) Name for browse
790 | :param browsePath: (string) Path for browse
791 | :param overlayPath: (string) Path of overlay
792 | :param overlayType: (string) Type of overlay
793 | :param thumbnailPath: (string) Path of thumbnail
794 | """
795 |
796 | def __init__(self,
797 | browseRotationEnabled: bool | None = None,
798 | browseName: str | None = None,
799 | browsePath: str | None = None,
800 | overlayPath: str | None = None,
801 | overlayType: str | None = None,
802 | thumbnailPath: str | None = None):
803 | self.browseRotationEnabled = browseRotationEnabled
804 | self.browseName = browseName
805 | self.browsePath = browsePath
806 | self.overlayPath = overlayPath
807 | self.overlayType = overlayType
808 | self.thumbnailPath = thumbnailPath
809 |
810 | self.dict = self._as_dict()
811 |
812 |
813 | class Dataset(AbstractDataType):
814 | """
815 | :param abstractText: (string) Abstract of the dataset
816 | :param acquisitionStart: (date) Start date the scene was acquired, ISO 8601 Formatted Date
817 | :param acquisitionEnd: (date) End date the scene was acquired, ISO 8601 Formatted Date
818 | :param catalogs: (string[]) The Machine-to-Machine dataset catalogs including "EE", "GV", "HDDS", "LPCS"
819 | :param collectionName: (string) User friendly name of the collection
820 | :param collectionLongName: (string) Full User friendly dataset name
821 | :param datasetId: (string) Dataset Identifier
822 | :param datasetAlias: (string) Short User friendly dataset name
823 | :param datasetCategoryName: (string) Category this dataset belongs to
824 | :param dataOwner: (string) Owner of the data
825 | :param dateUpdated: (date) Date the dataset was last updated, ISO 8601 Formatted Date
826 | :param doiNumber: (string) DOI name of the dataset
827 | :param ingestFrequency: (string) Interval to ingest this dataset (ISO-8601 formatted string)
828 | :param keywords: (string) Keywords of the dataset
829 | :param sceneCount: (int) The number of scenes under the dataset
830 | :param spatialBounds: (spatialBounds) Dataset Spatial Extent
831 | :param temporalCoverage: (temporalCoverage) Temporal extent of the dataset (ISO 8601 Formatted Date)
832 | :param supportCloudCover: (boolean) Denotes if the dataset supports cloud cover searching (via cloudCover filter in
833 | the scene search parameters)
834 | :param supportDeletionSearch: (boolean) Denotes if the dataset supports deletion searching
835 | """
836 |
837 | def __init__(self, abstractText: str | None = None,
838 | acquisitionStart: str | None = None,
839 | acquisitionEnd: str | None = None,
840 | catalogs: list[str] | None = None,
841 | collectionName: str | None = None,
842 | collectionLongName: str | None = None,
843 | datasetId: str | None = None,
844 | datasetAlias: str | None = None,
845 | datasetCategoryName: str | None = None,
846 | dataOwner: str | None = None,
847 | dateUpdated: str | None = None,
848 | doiNumber: str | None = None,
849 | ingestFrequency: str | None = None,
850 | keywords: str | None = None,
851 | sceneCount: int | None = None,
852 | spatialBounds: SpatialBounds | None = None,
853 | temporalCoverage: TemporalCoverage | None = None,
854 | supportCloudCover: bool | None = None,
855 | supportDeletionSearch: bool | None = None):
856 | self.abstractText = abstractText
857 | self.acquisitionStart = acquisitionStart
858 | self.acquisitionEnd = acquisitionEnd
859 | self.catalogs = catalogs
860 | self.collectionName = collectionName
861 | self.collectionLongName = collectionLongName
862 | self.datasetId = datasetId
863 | self.datasetAlias = datasetAlias
864 | self.datasetCategoryName = datasetCategoryName
865 | self.dataOwner = dataOwner
866 | self.dateUpdated = dateUpdated
867 | self.doiNumber = doiNumber
868 | self.ingestFrequency = ingestFrequency
869 | self.keywords = keywords
870 | self.sceneCount = sceneCount
871 | self.spatialBounds = spatialBounds
872 | self.temporalCoverage = temporalCoverage
873 | self.supportCloudCover = supportCloudCover
874 | self.supportDeletionSearch = supportDeletionSearch
875 |
876 | self.dict = self._as_dict()
877 |
878 |
879 | class DatasetCategory(AbstractDataType):
880 | """
881 | :param id: (int) Dataset category Identifier
882 | :param categoryName: (string) Name of the category
883 | :param categoryDescription: (string) Description of the category
884 | :param parentCategoryId: (int) Parent category Identifier
885 | :param parentCategoryName: (string) Name of the parent category
886 | :param referenceLink: (string) Information for the category
887 | """
888 |
889 | def __init__(self,
890 | id: int | None = None,
891 | categoryName: str | None = None,
892 | categoryDescription: str | None = None,
893 | parentCategoryId: int | None = None,
894 | parentCategoryName: str | None = None,
895 | referenceLink: str | None = None):
896 | self.id = id
897 | self.categoryName = categoryName
898 | self.categoryDescription = categoryDescription
899 | self.parentCategoryId = parentCategoryId
900 | self.parentCategoryName = parentCategoryName
901 | self.referenceLink = referenceLink
902 |
903 | self.dict = self._as_dict()
904 |
905 |
906 | # replaced
907 | class Metadata(AbstractDataType):
908 | """
909 | :param metadataType: (string) Value can be 'export', 'res_sum', 'shp', or 'full'
910 | :param id: (string) Used to identify which field your referencing.
911 | :param sortOrder: (integer) Used to change the order in which the fields are sorted.
912 | """
913 |
914 | def __init__(self,
915 | metadataType: Literal['export', 'res_sum', 'shp', 'full'] | None = None,
916 | id: str | None = None,
917 | sortOrder: int | None = None):
918 | self.metadataType = metadataType
919 | self.id = id
920 | self.sortOrder = sortOrder
921 |
922 | self.dict = self._as_dict()
923 |
924 |
925 | # replaced
926 | class SearchSort(AbstractDataType):
927 | """
928 | :param id: (string) Used to identify which field you want to sort by.
929 | :param direction: (string) Used to determine which directions to sort (ASC, DESC).
930 | """
931 |
932 | def __init__(self,
933 | id: str | None = None,
934 | direction: Literal['ASC', 'DESC'] | None = None):
935 | self.id = id
936 | self.direction = direction
937 |
938 | self.dict = self._as_dict()
939 |
940 |
941 | # replaced
942 | class FileGroups(AbstractDataType):
943 | """
944 | :param fileGroupId: (string) Values are the internal file group IDs.
945 | :param productIds: (string[]) An array of product IDs within the file group.
946 | """
947 |
948 | def __init__(self,
949 | fileGroupId: str | None = None,
950 | productIds: list[str] | None = None):
951 | self.fileGroupId = fileGroupId
952 | self.productIds = productIds
953 |
954 | self.dict = self._as_dict()
955 |
956 |
957 | class DatasetCustomization(AbstractDataType):
958 | """
959 | :param datasetName: (string) Alias of the dataset
960 | :param excluded: (boolean) Used to include or exclude a dataset
961 | :param metadata: (Metadata) Used to customize the layout of a datasets metadata
962 | :param searchSort: (SearchSort) Used to sort the datasets results
963 | :param fileGroups: (FileGroups) Used to customize the downloads by file groups
964 | """
965 |
966 | def __init__(self,
967 | datasetName: str | None = None,
968 | excluded: str | None = None,
969 | metadata: Metadata | None = None,
970 | searchSort: SearchSort | None = None,
971 | fileGroups: FileGroups | None = None
972 | ):
973 | self.datasetName = datasetName
974 | self.excluded = excluded
975 | self.metadata = metadata
976 | self.searchSort = searchSort
977 | self.fileGroups = fileGroups
978 |
979 | self.dict = self._as_dict()
980 |
981 |
982 | # Metadata original place
983 |
984 |
985 | # SearchSort original place
986 |
987 |
988 | # FileGroups original place
989 |
990 |
991 | class SortCustomization(AbstractDataType):
992 | """
993 | :param field_name: (string) Used to identify which field you want to sort by.
994 | :param direction: (string) Used to determine which directions to sort (ASC, DESC).
995 | """
996 |
997 | def __init__(self,
998 | field_name: str | None = None,
999 | direction: Literal['ASC', 'DESC'] | None = None):
1000 | self.field_name = field_name
1001 | self.direction = direction
1002 |
1003 | self.dict = self._as_dict()
1004 |
1005 |
1006 | # replaced
1007 | class FieldConfig(AbstractDataType):
1008 | """
1009 | :param type: (string) Value can be 'Select", 'Text', 'Range'
1010 | :param filters: (filter[]) Reference only. Describes the input for a query
1011 | :param validators: ([]) Reference only. Describes various validation the input data is put through prior to being
1012 | used in the query
1013 | :param displayListId: (string) Internal reference. Used to reference where provided value lists are sourced from
1014 | """
1015 |
1016 | # todo: It's not clear to me what these data types are: `filter[]` and `[]`.
1017 | def __init__(self,
1018 | type: Literal['Select', 'Text', 'Range'],
1019 | filters: list | None = None,
1020 | validators: list | None = None,
1021 | displayListId: str | None = None):
1022 | self.type = type
1023 | self.filters = filters
1024 | self.validators = validators
1025 | self.displayListId = displayListId
1026 |
1027 | self.dict = self._as_dict()
1028 |
1029 |
1030 | class DatasetFilter(AbstractDataType):
1031 | """
1032 | :param id: (int) Dataset Identifier
1033 | :param legacyFieldId: (int) Legacy field Identifier
1034 | :param dictionaryLink: (string) A link to the data dictionary entry for this field
1035 | :param fieldConfig: (FieldConfig) Configuration of the field
1036 | :param fieldLabel: (string) The label name used when requesting the field
1037 | :param searchSql: (string) WHERE clause when searching in the database
1038 | """
1039 |
1040 | def __init__(self,
1041 | id: int | None = None,
1042 | legacyFieldId: int | None = None,
1043 | dictionaryLink: str | None = None,
1044 | fieldConfig: FieldConfig | None = None,
1045 | fieldLabel: str | None = None,
1046 | searchSql: str | None = None):
1047 | self.id = id
1048 | self.legacyFieldId = legacyFieldId
1049 | self.dictionaryLink = dictionaryLink
1050 | self.fieldConfig = fieldConfig
1051 | self.fieldLabel = fieldLabel
1052 | self.searchSql = searchSql
1053 |
1054 | self.dict = self._as_dict()
1055 |
1056 |
1057 | # FieldConfig original place
1058 |
1059 |
1060 | class Notification(AbstractDataType):
1061 | """
1062 | :param id: (int) Notification Identifier
1063 | :param subject: (string) The subject of the notification
1064 | :param messageContent: (string) The content of the notification message
1065 | :param severityCode: (string) Internal severity code
1066 | :param severityCssClass: (string) Class of the severity
1067 | :param severityText: (string) The user friendly name for this severity
1068 | :param dateUpdated: (string) Date the notification was last updated
1069 | """
1070 |
1071 | def __init__(self,
1072 | id: int | None = None,
1073 | subject: str | None = None,
1074 | messageContent: str | None = None,
1075 | severityCode: str | None = None,
1076 | severityCssClass: str | None = None,
1077 | severityText: str | None = None,
1078 | dateUpdated: str | None = None):
1079 | self.id = id
1080 | self.subject = subject
1081 | self.messageContent = messageContent
1082 | self.severityCode = severityCode
1083 | self.severityCssClass = severityCssClass
1084 | self.severityText = severityText
1085 | self.dateUpdated = dateUpdated
1086 |
1087 | self.dict = self._as_dict()
1088 |
1089 |
1090 | class ProductResponse(AbstractDataType):
1091 | """
1092 | :param id: (int) Product Identifier
1093 | :param entityId: (string) Entity Identifier
1094 | :param datasetId: (string) Dataset Identifier
1095 | :param available: (string) Denotes if the download option is available
1096 | :param price: (double) The price for ordering this product, less the $5.00 handling fee per order(Handling Fee -
1097 | Applies to Orders that require payment)
1098 | :param productName: (string) User friendly name for this product
1099 | :param productCode: (string) Internal code used to represent this product during ordering
1100 | """
1101 |
1102 | def __init__(self,
1103 | id: int | None = None,
1104 | entityId: str | None = None,
1105 | datasetId: str | None = None,
1106 | available: str | None = None,
1107 | price: float | None = None,
1108 | productName: str | None = None,
1109 | productCode: str | None = None):
1110 | self.id = id
1111 | self.entityId = entityId
1112 | self.datasetId = datasetId
1113 | self.available = available
1114 | self.price = price
1115 | self.productName = productName
1116 | self.productCode = productCode
1117 |
1118 | self.dict = self._as_dict()
1119 |
1120 |
1121 | class ProductInput(AbstractDataType):
1122 | """
1123 | :param datasetName: (string) Dataset name
1124 | :param entityId: (string) Entity Identifier
1125 | :param productId: (string) Product identifiers
1126 | :param productCode: (string) Internal product code to represent the download option
1127 | """
1128 |
1129 | def __init__(self,
1130 | datasetName: str | None = None,
1131 | entityId: str | None = None,
1132 | productId: str | None = None,
1133 | productCode: str | None = None):
1134 | self.datasetName = datasetName
1135 | self.entityId = entityId
1136 | self.productId = productId
1137 | self.productCode = productCode
1138 |
1139 | self.dict = self._as_dict()
1140 |
1141 |
1142 | class RunOptions(AbstractDataType):
1143 | """
1144 | :param resultFormats: (string[]) The valid values are 'metadata', 'email', 'kml', 'shapefile', 'geojson'
1145 | """
1146 |
1147 | def __init__(self, resultFormats: Literal['metadata', 'email', 'kml', 'shapefile', 'geojson']):
1148 | self.resultFormats = resultFormats
1149 |
1150 | self.dict = self._as_dict()
1151 |
1152 |
1153 | class Scene(AbstractDataType):
1154 | """
1155 | :param browse: (browse) An array of browse options
1156 | :param cloudCover: (string) The cloud cover score for this scene (-1 if score does not exist)
1157 | :param entityId: (string) Entity Identifier
1158 | :param displayId: (string) Scene Identifier used for display
1159 | :param metadata: (metadata) An array of metadata for this scene
1160 | :param options: (options) An array of available download options for this scene
1161 | :param selected: (selected) Denotes if the scene is selected for various systems
1162 | :param spatialBounds: (spatialBounds) Dataset Spatial Extent
1163 | :param spatialCoverage: (spatialBounds) Dataset spatial coverage
1164 | :param temporalCoverage: (temporalCoverage) Dataset temporal coverage
1165 | :param publishDate: (string) The date the scene was published
1166 | """
1167 |
1168 | def __init__(self,
1169 | browse: Browse | None = None,
1170 | cloudCover: str | None = None,
1171 | entityId: str | None = None,
1172 | displayId: str | None = None,
1173 | metadata: list[MetadataField] | None = None,
1174 | options: Options | None = None,
1175 | selected: Selected | None = None,
1176 | spatialBounds: SpatialBounds | None = None,
1177 | spatialCoverage: SpatialBounds | None = None,
1178 | temporalCoverage: TemporalCoverage | None = None,
1179 | publishDate: str | None = None):
1180 | self.browse = browse
1181 | self.cloudCover = cloudCover
1182 | self.entityId = entityId
1183 | self.displayId = displayId
1184 | self.metadata = metadata
1185 | self.options = options
1186 | self.selected = selected
1187 | self.spatialBounds = spatialBounds
1188 | self.spatialCoverage = spatialCoverage
1189 | self.temporalCoverage = temporalCoverage
1190 | self.publishDate = publishDate
1191 |
1192 | self.dict = self._as_dict()
1193 |
1194 |
1195 | class IngestSubscription(AbstractDataType):
1196 | """
1197 | :param subscriptionId: (int) The unique Identifier for the subscription
1198 | :param subscriptionName: (string) Used for user reference to name a request
1199 | :param username: (string) The user who created this subscription
1200 | :param catalogId: (string) The Machine-to-Machine dataset catalog being used
1201 | :param datasets: (string) Used to identify datasets to search and the parameters specific to each dataset
1202 | :param runOptions: (runOptions) Used to set subscription runtime configurations
1203 | :param runStartDate: (string) Used to apply a temporal filter on the data based on ingest date
1204 | :param runEndDate: (string) Used to apply a temporal filter on the data based on ingest date
1205 | :param requestApp: (string)
1206 | :param requestAppReferenceId: (string) The application that is creating the subscription
1207 | :param runFrequency: (string) Run this subscription at this interval
1208 | :param status: (string) The status of the subscription
1209 | :param dateEntered: (string) The date this subscription was entered
1210 | :param lastRunDate: (string) The date of the last run for this subscription
1211 | :param lastAttemptDate: (string) The date of the last attempt for this subscription
1212 | """
1213 |
1214 | def __init__(self,
1215 | subscriptionId: int | None = None,
1216 | subscriptionName: str | None = None,
1217 | username: str | None = None,
1218 | catalogId: str | None = None,
1219 | datasets: str | None = None,
1220 | runOptions: RunOptions | None = None,
1221 | runStartDate: str | None = None,
1222 | runEndDate: str | None = None,
1223 | requestApp: str | None = None,
1224 | requestAppReferenceId: str | None = None,
1225 | runFrequency: str | None = None,
1226 | status: str | None = None,
1227 | dateEntered: str | None = None,
1228 | lastRunDate: str | None = None,
1229 | lastAttemptDate: str | None = None):
1230 | self.subscriptionId = subscriptionId
1231 | self.subscriptionName = subscriptionName
1232 | self.username = username
1233 | self.catalogId = catalogId
1234 | self.datasets = datasets
1235 | self.runOptions = runOptions
1236 | self.runStartDate = runStartDate
1237 | self.runEndDate = runEndDate
1238 | self.requestApp = requestApp
1239 | self.requestAppReferenceId = requestAppReferenceId
1240 | self.runFrequency = runFrequency
1241 | self.status = status
1242 | self.dateEntered = dateEntered
1243 | self.lastRunDate = lastRunDate
1244 | self.lastAttemptDate = lastAttemptDate
1245 |
1246 | self.dict = self._as_dict()
1247 |
1248 |
1249 | class IngestSubscriptionLog(AbstractDataType):
1250 | """
1251 | :param runId: (int) The unique Identifier for this subscription run
1252 | :param subscriptionId: (int) The unique Identifier for the subscription
1253 | :param runDate: (string) The date of this subscription run
1254 | :param executionTime: (string) The number of seconds this subscription took to run
1255 | :param numScenesMatched: (string) The number of scenes this subscription run matched
1256 | :param resultCode: (string) The result of this subscription run
1257 | :param runScriptOutput: (string) The output of this subscription run
1258 | :param runSummary: (string) Any summary text associated with this subscription run
1259 | :param runOptions: (runOptions) Runtime configurations of this subscription run
1260 | :param datasets: (string) Datasets of this subscription run
1261 | :param catalogId: (string) The Machine-to-Machine dataset catalog being used
1262 | :param lastRunDate: (string) The date of the last run for this subscription
1263 | :param orderIds: (string) Tram order Identifier
1264 | :param bulkIds: (string) Bulk order Identifier
1265 | """
1266 |
1267 | def __init__(self,
1268 | runId: int | None = None,
1269 | subscriptionId: int | None = None,
1270 | runDate: str | None = None,
1271 | executionTime: str | None = None,
1272 | numScenesMatched: str | None = None,
1273 | resultCode: str | None = None,
1274 | runScriptOutput: str | None = None,
1275 | runSummary: str | None = None,
1276 | runOptions: RunOptions | None = None,
1277 | datasets: str | None = None,
1278 | catalogId: str | None = None,
1279 | lastRunDate: str | None = None,
1280 | orderIds: str | None = None,
1281 | bulkIds: str | None = None):
1282 | self.runId = runId
1283 | self.subscriptionId = subscriptionId
1284 | self.runDate = runDate
1285 | self.executionTime = executionTime
1286 | self.numScenesMatched = numScenesMatched
1287 | self.resultCode = resultCode
1288 | self.runScriptOutput = runScriptOutput
1289 | self.runSummary = runSummary
1290 | self.runOptions = runOptions
1291 | self.datasets = datasets
1292 | self.catalogId = catalogId
1293 | self.lastRunDate = lastRunDate
1294 | self.orderIds = orderIds
1295 | self.bulkIds = bulkIds
1296 |
1297 | self.dict = self._as_dict()
1298 |
1299 |
1300 | class SubscriptionDataset(AbstractDataType):
1301 | """
1302 | :param datasetName: (string) Dataset name
1303 | """
1304 |
1305 | def __init__(self, datasetName: str | None = None):
1306 | self.datasetName = datasetName
1307 |
1308 | self.dict = self._as_dict()
1309 |
1310 |
1311 | class TramOrder(AbstractDataType):
1312 | """
1313 | :param orderId: (int) Order Identifier
1314 | :param username: (string) The user who created this order
1315 | :param processingPriority: (int) Processing priority for the order
1316 | :param orderComment: (string) Comment contents of the order
1317 | :param statusCode: (string) Internal status code
1318 | :param statusCodeText: (string) User friendly status
1319 | :param dateEntered: (string) The date this order was entered
1320 | :param lastUpdatedDate: (string) Date the order was last updated
1321 | """
1322 |
1323 | def __init__(self,
1324 | orderId: int | None = None,
1325 | username: str | None = None,
1326 | processingPriority: int | None = None,
1327 | orderComment: str | None = None,
1328 | statusCode: str | None = None,
1329 | statusCodeText: str | None = None,
1330 | dateEntered: str | None = None,
1331 | lastUpdatedDate: str | None = None):
1332 | self.orderId = orderId
1333 | self.username = username
1334 | self.processingPriority = processingPriority
1335 | self.orderComment = orderComment
1336 | self.statusCode = statusCode
1337 | self.statusCodeText = statusCodeText
1338 | self.dateEntered = dateEntered
1339 | self.lastUpdatedDate = lastUpdatedDate
1340 |
1341 | self.dict = self._as_dict()
1342 |
1343 |
1344 | class TramUnit(AbstractDataType):
1345 | """
1346 | :param unitNumber: (int) The unit Identifier
1347 | :param productCode: (string) Internal product code
1348 | :param productName: (string) The user friendly name for the product
1349 | :param datasetId: (string) Dataset identifier
1350 | :param datasetName: (string) Dataset name
1351 | :param collectionName: (string) User friendly name of the collection
1352 | :param orderingId: (string) Scene Identifier used within the ordering system
1353 | :param unitPrice: (string) The price for ordering this unit
1354 | :param unitComment: (string) Any comments that should be retained with this product
1355 | :param statusCode: (string) Internal status code
1356 | :param statusCodeText: (string) User friendly status
1357 | :param lastUpdatedDate: (string) Date the unit was last updated
1358 | """
1359 |
1360 | def __init__(self,
1361 | unitNumber: int | None = None,
1362 | productCode: str | None = None,
1363 | productName: str | None = None,
1364 | datasetId: str | None = None,
1365 | datasetName: str | None = None,
1366 | collectionName: str | None = None,
1367 | orderingId: str | None = None,
1368 | unitPrice: str | None = None,
1369 | unitComment: str | None = None,
1370 | statusCode: str | None = None,
1371 | statusCodeText: str | None = None,
1372 | lastUpdatedDate: str | None = None):
1373 | self.unitNumber = unitNumber
1374 | self.productCode = productCode
1375 | self.productName = productName
1376 | self.datasetId = datasetId
1377 | self.datasetName = datasetName
1378 | self.collectionName = collectionName
1379 | self.orderingId = orderingId
1380 | self.unitPrice = unitPrice
1381 | self.unitComment = unitComment
1382 | self.statusCode = statusCode
1383 | self.statusCodeText = statusCodeText
1384 | self.lastUpdatedDate = lastUpdatedDate
1385 |
1386 | self.dict = self._as_dict()
1387 |
--------------------------------------------------------------------------------
/usgs_m2m/usgsMethods.py:
--------------------------------------------------------------------------------
1 | import requests
2 | from warnings import warn
3 | from .checkResponse import _check_response
4 |
5 |
6 | # noinspection PyPep8Naming
7 | class API:
8 | """
9 | Implementation date: 30.07.2020
10 | Revision date: 09.12.2024
11 |
12 | Official USGS/EROS Inventory Service Documentation (Machine-to-Machine API):
13 | https://m2m.cr.usgs.gov
14 |
15 | This API complies with the methods given in:
16 | https://m2m.cr.usgs.gov/api/docs/reference/
17 | """
18 |
19 | apiURL = r'https://m2m.cr.usgs.gov/api/api/json/stable/' # must ends with slash: "/"
20 | apiKey = None
21 | loud_mode = False
22 |
23 | def __send_request_and_check_it(self, url: str, json_payload: dict) -> dict:
24 | """Protected function to send request, check it and return response. Used in almost every method in this API.
25 | :param url: request URL
26 | :param json_payload: `requests` json payload.
27 | :return: response as dictionary
28 | """
29 | response = requests.post(url, json=json_payload, headers={'X-Auth-Token': self.apiKey})
30 | _check_response(response)
31 | return response.json()
32 |
33 | def dataOwner(self, dataOwner):
34 | """
35 | This method is used to provide the contact information of the data owner.
36 | :param dataOwner: (string) Used to identify the data owner - this value comes from the dataset-search response
37 | :return: (dict) Response as a dictionary
38 | """
39 | url = f'{self.apiURL}data-owner'
40 | json_payload = {"dataOwner": dataOwner}
41 | return self.__send_request_and_check_it(url, json_payload)
42 |
43 | def dataset(self, datasetId=None, datasetName=None):
44 | """
45 | This method is used to retrieve the dataset by id or name.
46 | :param datasetId: (string) The dataset identifier - must use this or datasetName
47 | :param datasetName: (string) The system-friendly dataset name - must use this or datasetId
48 | :return: (dict) Response as a dictionary
49 | """
50 | if all(v is None for v in {datasetId, datasetName}):
51 | raise ValueError('datasetId or datasetName must be used.')
52 |
53 | elif all(v is not None for v in {datasetId, datasetName}):
54 | raise ValueError('datasetId or datasetName must be used, not both!')
55 |
56 | url = f'{self.apiURL}dataset'
57 | json_payload = {"datasetId": datasetId,
58 | "datasetName": datasetName}
59 | return self.__send_request_and_check_it(url, json_payload)
60 |
61 | def datasetBrowse(self, datasetId):
62 | """
63 | This request is used to return the browse configurations for the specified dataset.
64 | :param datasetId: (string) Determines which dataset to return browse configurations for
65 | :return: (dict) Response as a dictionary
66 | """
67 | url = f'{self.apiURL}dataset-browse'
68 | json_payload = {"datasetId": datasetId}
69 | return self.__send_request_and_check_it(url, json_payload)
70 |
71 | def datasetBulkProducts(self, datasetName):
72 | """
73 | Lists all available bulk products for a dataset - this does not guarantee scene availability.
74 | :param datasetName: (str) Used to identify the which dataset to return results for
75 | :return: (dict) Response as a dictionary
76 | """
77 | url = f'{self.apiURL}dataset-bulk-products'
78 | json_payload = {"datasetName": datasetName}
79 | return self.__send_request_and_check_it(url, json_payload)
80 |
81 | def datasetCatalogs(self):
82 | """
83 | This method is used to retrieve the available dataset catalogs. The use of dataset catalogs are not required,
84 | but are used to group datasets by their use within our web applications.
85 | :return: (dict) Response as a dictionary
86 | """
87 | url = f'{self.apiURL}dataset-catalogs'
88 | json_payload = {}
89 | return self.__send_request_and_check_it(url, json_payload)
90 |
91 | def datasetCategories(self, catalog, includeMessages=False, publicOnly=False, useCustomization=False, parentId=None,
92 | datasetFilter=None):
93 | """
94 | This method is used to search datasets under the categories.
95 | :param catalog: (string) Used to identify datasets that are associated with a given application
96 | :param includeMessages: (boolean) Optional parameter to include messages regarding specific dataset components
97 | :param publicOnly: (boolean) Used as a filter out datasets that are not accessible to unauthenticated general
98 | public users
99 | :param useCustomization: (boolean) Used as a filter out datasets that are excluded by user customization
100 | :param parentId: (string) If provided, returned categories are limited to categories that are children of the
101 | provided ID
102 | :param datasetFilter: (string) If provided, filters the datasets - this automatically adds a wildcard before and
103 | after the input value
104 | :return: (dict) Response as a dictionary
105 | """
106 | url = f'{self.apiURL}dataset-categories'
107 | json_payload = {"catalog": catalog,
108 | "includeMessages": includeMessages,
109 | "publicOnly": publicOnly,
110 | "useCustomization": useCustomization,
111 | "parentId": parentId,
112 | "datasetFilter": datasetFilter}
113 | return self.__send_request_and_check_it(url, json_payload)
114 |
115 | def datasetClearCustomization(self, datasetName=None, metadataType=None, fileGroupIds=None):
116 | """
117 | This method is used the remove an entire customization or clear out a specific metadata type.
118 | :param datasetName: (string) Used to identify the dataset to clear. If null, all dataset customizations will be cleared.
119 | :param metadataType: (string[]) If populated, identifies which metadata to clear(export, full, res_sum, shp)
120 | :param fileGroupIds: (string[]) If populated, identifies which file group to clear
121 | :return: (dict) Response as a dictionary
122 | """
123 | if fileGroupIds is None:
124 | fileGroupIds = []
125 | if metadataType is None:
126 | metadataType = []
127 | url = f'{self.apiURL}dataset-clear-customization'
128 | json_payload = {"datasetName": datasetName,
129 | "metadataType": metadataType,
130 | "fileGroupIds": fileGroupIds,
131 | }
132 | return self.__send_request_and_check_it(url, json_payload)
133 |
134 | def datasetCoverage(self, datasetName):
135 | """
136 | Returns coverage for a given dataset.
137 | :param datasetName: (string) Determines which dataset to return coverage for
138 | :return: (dict) Response as a dictionary
139 | """
140 | url = f'{self.apiURL}dataset-coverage'
141 | json_payload = {"datasetName": datasetName}
142 | return self.__send_request_and_check_it(url, json_payload)
143 |
144 | def datasetDownloadOptions(self, datasetName, sceneFilter=None):
145 | """
146 | This request lists all available products for a given dataset - this does not guarantee scene availability.
147 | :param datasetName: (string) Used to identify the which dataset to return results for
148 | :param sceneFilter: (SceneFilter) Used to filter data within the dataset
149 | :return: (dict) Response as a dictionary
150 | """
151 | url = f'{self.apiURL}dataset-download-options'
152 | json_payload = {"datasetName": datasetName,
153 | "sceneFilter": sceneFilter}
154 | return self.__send_request_and_check_it(url, json_payload)
155 |
156 | def datasetFileGroups(self, datasetName):
157 | """
158 | This method is used to list all configured file groups for a dataset.
159 | :param datasetName: (string) Dataset alias
160 | :return: (dict) Response as a dictionary
161 | """
162 | url = f'{self.apiURL}dataset-file-groups'
163 | json_payload = {"datasetName": datasetName}
164 | return self.__send_request_and_check_it(url, json_payload)
165 |
166 | def datasetFilters(self, datasetName):
167 | """
168 | This request is used to return the metadata filter fields for the specified dataset. These values can be used
169 | as additional criteria when submitting search and hit queries.
170 | :param datasetName: (string) Determines which dataset to return filters for
171 | :return: (dict) Response as a dictionary
172 | """
173 | url = f'{self.apiURL}dataset-filters'
174 | json_payload = {"datasetName": datasetName}
175 | return self.__send_request_and_check_it(url, json_payload)
176 |
177 | def datasetGetCustomization(self, datasetName):
178 | """
179 | This method is used to retrieve metadata customization for a specific dataset.
180 | as additional criteria when submitting search and hit queries.
181 | :param datasetName: (string) Used to identify the dataset to search
182 | :return: (dict) Response as a dictionary
183 | """
184 | url = f'{self.apiURL}dataset-get-customization'
185 | json_payload = {"datasetName": datasetName}
186 | return self.__send_request_and_check_it(url, json_payload)
187 |
188 | def datasetGetCustomizations(self, datasetNames=None, metadataType=None):
189 | """
190 | This method is used to retrieve metadata customizations for multiple datasets at once.
191 | :param datasetNames: (string[]) Used to identify the dataset(s) to return. If null it will return all the users
192 | customizations
193 | :param metadataType: (string[]) If populated, identifies which metadata to return(export, full, res_sum, shp)
194 | :return: (dict) Response as a dictionary
195 | """
196 | url = f'{self.apiURL}dataset-get-customizations'
197 | json_payload = {"datasetName": datasetNames,
198 | "metadataType": metadataType,
199 | }
200 | return self.__send_request_and_check_it(url, json_payload)
201 |
202 | def datasetMessages(self, catalog=None, datasetName=None, datasetNames=None):
203 | """
204 | Returns any notices regarding the given datasets features.
205 | :param catalog: (string) Used to identify datasets that are associated with a given application
206 | :param datasetName: (string) Used as a filter with wildcards inserted at the beginning and the end of the supplied value
207 | :param datasetNames: (string[]) Used as a filter with wildcards inserted at the beginning and the end of the supplied value
208 | :return: (dict) Response as a dictionary
209 | """
210 | url = f'{self.apiURL}dataset-messages'
211 | json_payload = {"catalog": catalog,
212 | "datasetName": datasetName,
213 | "datasetNames": datasetNames,
214 | }
215 | return self.__send_request_and_check_it(url, json_payload)
216 |
217 | def datasetMetadata(self, datasetName):
218 | """
219 | This method is used to retrieve all metadata fields for a given dataset.
220 | :param datasetName: (string) The system-friendly dataset name
221 | :return: (dict) Response as a dictionary
222 | """
223 | url = f'{self.apiURL}dataset-metadata'
224 | json_payload = {"datasetName": datasetName}
225 | return self.__send_request_and_check_it(url, json_payload)
226 |
227 | def datasetOrderProducts(self, datasetName):
228 | """
229 | Lists all available order products for a dataset - this does not guarantee scene availability.
230 | :param datasetName: (string) Used to identify the which dataset to return results for
231 | :return: (dict) Response as a dictionary
232 | """
233 | url = f'{self.apiURL}dataset-order-products'
234 | json_payload = {"datasetName": datasetName}
235 | return self.__send_request_and_check_it(url, json_payload)
236 |
237 | def datasetSearch(self, catalog=None, categoryId=None, datasetName=None, includeMessages=None, publicOnly=None,
238 | includeUnknownSpatial=None, temporalFilter=None, spatialFilter=None, sortDirection=None,
239 | sortField=None, useCustomization=None):
240 | """
241 | This method is used to find datasets available for searching. By passing only API Key, all available datasets
242 | are returned. Additional parameters such as temporal range and spatial bounding box can be used to find datasets
243 | that provide more specific data. The dataset name parameter can be used to limit the results based on matching
244 | the supplied value against the public dataset name with assumed wildcards at the beginning and end.
245 | :param catalog: (string) Used to identify datasets that are associated with a given application
246 | :param categoryId: (string) Used to restrict results to a specific category (does not search sub-sategories)
247 | :param datasetName: (string) Used as a filter with wildcards inserted at the beginning and the end of the supplied value
248 | :param includeMessages: (boolean) Optional parameter to include messages regarding specific dataset components
249 | :param publicOnly: (boolean) Used as a filter out datasets that are not accessible to unauthenticated general public users
250 | :param includeUnknownSpatial: (string) Optional parameter to include datasets that do not support geographic searching
251 | :param temporalFilter: (TemporalFilter) Used to filter data based on data acquisition
252 | :param spatialFilter: (SpatialFilter) Used to filter data based on data location
253 | :param sortDirection: (string) Defined the sorting as Ascending (ASC) or Descending (DESC) - default is ASC
254 | :param sortField: (string) Identifies which field should be used to sort datasets (shortName - default, longName, dastasetName, GloVis)
255 | :param useCustomization: (string) Optional parameter to indicate whether to use customization
256 |
257 | :return: (dict) Response as a dictionary
258 | """
259 | url = f'{self.apiURL}dataset-search'
260 | json_payload = {"catalog": catalog,
261 | "categoryId": categoryId,
262 | "datasetName": datasetName,
263 | "includeMessages": includeMessages,
264 | "publicOnly": publicOnly,
265 | "includeUnknownSpatial": includeUnknownSpatial,
266 | "temporalFilter": temporalFilter,
267 | "spatialFilter": spatialFilter,
268 | "sortDirection": sortDirection,
269 | "sortField": sortField,
270 | "useCustomization": useCustomization,
271 | }
272 | return self.__send_request_and_check_it(url, json_payload)
273 |
274 | def datasetSetCustomization(self, datasetName, excluded=None, metadata=None, searchSort=None, fileGroups=None):
275 | """
276 | This method is used to create or update dataset customizations for a given dataset.
277 | :param datasetName: (string) Used to identify the dataset to search
278 | :param excluded: (boolean) Used to exclude the dataset
279 | :param metadata: (Metadata) Used to customize the metadata layout.
280 | :param searchSort: (SearchSort) Used to sort the dataset results.
281 | :param fileGroups: (FileGroups) Used to customize downloads by file groups
282 | :return: (dict) Response as a dictionary
283 | """
284 | url = f'{self.apiURL}dataset-set-customization'
285 | json_payload = {"datasetName": datasetName,
286 | "excluded": excluded,
287 | "metadata": metadata,
288 | "searchSort": searchSort,
289 | "fileGroups": fileGroups,
290 | }
291 | return self.__send_request_and_check_it(url, json_payload)
292 |
293 | def datasetSetCustomizations(self, datasetCustomization):
294 | """
295 | This method is used to create or update dataset customizations for a given dataset.
296 | :param datasetCustomization: (DatasetCustomization) Used to create or update a dataset customization for
297 | multiple datasets.
298 | :return: (dict) Response as a dictionary
299 | """
300 | url = f'{self.apiURL}dataset-set-customizations'
301 | json_payload = {"datasetCustomization": datasetCustomization}
302 | return self.__send_request_and_check_it(url, json_payload)
303 |
304 | def downloadCompleteProxied(self, proxiedDownloads):
305 | """
306 | Updates status to 'C' with total downloaded file size for completed proxied downloads
307 | :param proxiedDownloads: (ProxiedDownload[]) Used to specify multiple proxied downloads
308 | :return: (dict) Response as a dictionary
309 | """
310 | url = f'{self.apiURL}download-complete-proxied'
311 | json_payload = {"proxiedDownloads": proxiedDownloads}
312 | return self.__send_request_and_check_it(url, json_payload)
313 |
314 | def downloadEula(self, eulaCode=None, eulaCodes=None):
315 | """
316 | Gets the contents of a EULA from the eulaCodes.
317 | :param eulaCode: (string) Used to specify a single eula
318 | :param eulaCodes: (string[]) Used to specify multiple eulas
319 | :return: (dict) Response as a dictionary
320 | """
321 | url = f'{self.apiURL}download-eula'
322 | json_payload = {"eulaCode": eulaCode,
323 | "eulaCodes": eulaCodes,
324 | }
325 | return self.__send_request_and_check_it(url, json_payload)
326 |
327 | def downloadLabels(self, downloadApplication=None):
328 | """
329 | Gets a list of unique download labels associated with the orders.
330 | :param downloadApplication: (string) Used to denote the application that will perform the download
331 | :return: (dict) Response as a dictionary
332 | """
333 | url = f'{self.apiURL}download-labels'
334 | json_payload = {"downloadApplication": downloadApplication}
335 | return self.__send_request_and_check_it(url, json_payload)
336 |
337 | def downloadOptions(self, datasetName, entityIds=None, listId=None, includeSecondaryFileGroups=None):
338 | """
339 | The download options request is used to discover downloadable products for each dataset. If a download is marked
340 | as not available, an order must be placed to generate that product.
341 | :param datasetName: (str) Dataset alias
342 | :param entityIds: (str) List of scenes
343 | :param listId: (str) Used to identify the list of scenes to use
344 | :param includeSecondaryFileGroups: (boolean) Optional parameter to return file group IDs with secondary products
345 | :return: (dict) Response as a dictionary
346 | """
347 | url = f'{self.apiURL}download-options'
348 | json_payload = {"datasetName": datasetName,
349 | "entityIds": entityIds,
350 | "listId": listId,
351 | "includeSecondaryFileGroups": includeSecondaryFileGroups,
352 | }
353 | return self.__send_request_and_check_it(url, json_payload)
354 |
355 | def downloadOrderLoad(self, downloadApplication=None, label=None):
356 | """
357 | This method is used to prepare a download order for processing by moving the scenes into the queue for processing
358 | :param downloadApplication: (string) Used to denote the application that will perform the download
359 | :param label: (string) Determines which order to load
360 | :return: (dict) Response as a dictionary
361 | """
362 | url = f'{self.apiURL}download-order-load'
363 | json_payload = {"downloadApplication": downloadApplication,
364 | "label": label,
365 | }
366 | return self.__send_request_and_check_it(url, json_payload)
367 |
368 | def downloadOrderRemove(self, label, downloadApplication=None):
369 | """
370 | This method is used to remove an order from the download queue.
371 | :param downloadApplication: (string) Used to denote the application that will perform the download
372 | :param label: (string) Determines which order to remove
373 | :return: (dict) Response as a dictionary
374 | """
375 |
376 | url = f'{self.apiURL}download-order-remove'
377 | json_payload = {"downloadApplication": downloadApplication,
378 | "label": label,
379 | }
380 | return self.__send_request_and_check_it(url, json_payload)
381 |
382 | def downloadRemove(self, downloadId):
383 | """
384 | Removes an item from the download queue.
385 | :param downloadId: (int) Represents the ID of the download from within the queue
386 | :return: (dict) Response as a dictionary
387 | """
388 | url = f'{self.apiURL}download-remove'
389 | json_payload = {"downloadId": downloadId,
390 | }
391 | return self.__send_request_and_check_it(url, json_payload)
392 |
393 | def downloadRequest(self, configurationCode=None, downloadApplication=None, downloads=None, dataPaths=None,
394 | label=None, systemId=None, dataGroups=None, returnAvailable=False):
395 | """
396 | This method is used to insert the requested downloads into the download queue and returns the available download
397 | URLs.
398 | Each ID supplied in the downloads parameter you provide will be returned in one of three elements:
399 | availableDownloads - URLs provided in this list are immediately available; note that these URLs take you to
400 | other distribution systems that may require authentication
401 | preparingDownloads - IDs have been accepted but the URLs are NOT YET available for use
402 | failed - IDs were rejected; see the errorMessage field for an explanation
403 |
404 | Other information is also provided in the response:
405 | newRecords - Includes a downloadId for each element of the downloads parameter that was accepted and a label
406 | that applies to the whole request
407 | duplicateProducts - Requests that duplicate previous requests by the same user; these are not re-added to
408 | the queue and are not included in newRecords
409 | numInvalidScenes - The number of products that could not be found by ID or failed to be requested for any
410 | reason
411 | remainingLimits - The number of remaining downloads to hit the rate limits by user and IP address
412 | limitType - The type of the limits are counted by, the value is either 'user' or 'ip'
413 | username - The user name associated with the request
414 | ipAddress - The IP address associated with the request
415 | recentDownloadCount - The number of downloads requested in the past 15 minutes
416 | pendingDownloadCount - The number of downloads in pending state before they are available for download
417 | unattemptedDownloadCount - The number of downloads in available status but the user has not downloaded
418 | yet
419 |
420 | This API may be online while the distribution systems are unavailable. When this occurs, you will recieve the following
421 | error when requesting products that belong to any of these systems: 'This download has been temporarily disabled.
422 | Please try again at a later time. We apologize for the inconvenience.'. Once the distribution system is back online,
423 | this error will stop occuring and download requests will succeed.
424 |
425 | :param configurationCode: (string) Used to customize the the download routine, primarily for testing.
426 | The valid values include no_data, test, order, order+email and null
427 | :param downloadApplication: (string) Used to denote the application that will perform the download
428 | (default = M2M). Internal use only.
429 | :param downloads: (DownloadInput[]) Used to identify higher level products that this data may be used to create
430 | :param dataPaths: (FilepathDownload[]) Used to identify products by data path, specifically for internal
431 | automation and DDS functionality
432 | :param label: (string) If this value is passed it will overide all individual download label values
433 | :param systemId: (string) Identifies the system submitting the download/order (default = M2M). Internal use only
434 | :param dataGroups: (FilegroupDownload[]) Identifies the products by file groups
435 | :param returnAvailable: THIS MAY BE UNDOCUMENTED PARAMETER
436 | :return: (dict) Response as a dictionary
437 | """
438 | url = f'{self.apiURL}download-request'
439 | json_payload = {"configurationCode": configurationCode,
440 | "downloadApplication": downloadApplication,
441 | "downloads": downloads,
442 | "dataPaths": dataPaths,
443 | "label": label,
444 | "systemId": systemId,
445 | "dataGroups": dataGroups,
446 | "returnAvailable": returnAvailable, # this may be undocumented parameter
447 | }
448 | return self.__send_request_and_check_it(url, json_payload)
449 |
450 | def downloadRetrieve(self, downloadApplication=None, label=None):
451 | """
452 | Returns all available and previously requests but not completed downloads.
453 |
454 | This API may be online while the distribution systems are unavailable. When this occurs, the downloads being fulfilled
455 | by those systems will not appear as available nor are they counted in the 'queueSize' response field.
456 |
457 | :param downloadApplication: (string) Used to denote the application that will perform the download
458 | :param label: (string) Determines which downloads to return
459 | :return: (dict) Response as a dictionary
460 | """
461 | url = f'{self.apiURL}download-retrieve'
462 | json_payload = {"label": label,
463 | "downloadApplication": downloadApplication,
464 | }
465 | return self.__send_request_and_check_it(url, json_payload)
466 |
467 | def downloadSearch(self, activeOnly=None, label=None, downloadApplication=None):
468 | """
469 | This method is used to searche for downloads within the queue, regardless of status, that match the given label.
470 | :param activeOnly: (boolean) Determines if completed, failed, cleared and proxied downloads are returned
471 | :param label: (string) Used to filter downloads by label
472 | :param downloadApplication: (string) Used to filter downloads by the intended downloading application
473 | :return: (dict) Response as a dictionary
474 | """
475 | url = f'{self.apiURL}download-search'
476 | json_payload = {"activeOnly": activeOnly,
477 | "label": label,
478 | "downloadApplication": downloadApplication,
479 | }
480 | return self.__send_request_and_check_it(url, json_payload)
481 |
482 | def downloadSummary(self, downloadApplication, label, sendEmail=None):
483 | """
484 | Gets a summary of all downloads, by dataset, for any matching labels.
485 | :param downloadApplication: (string) Used to denote the application that will perform the download
486 | :param label: (string) Determines which downloads to return
487 | :param sendEmail: (boolean) If set to true, a summary email will also be sent
488 | :return: (dict) Response as a dictionary
489 | """
490 | url = f'{self.apiURL}download-summary'
491 | json_payload = {"downloadApplication": downloadApplication,
492 | "label": label,
493 | "sendEmail": sendEmail,
494 | }
495 | return self.__send_request_and_check_it(url, json_payload)
496 |
497 | def grid2ll(self, gridType, responseShape=None, path=None, row=None):
498 | """
499 | Used to translate between known grids and coordinates.
500 | :param gridType: (string) Which grid system is being used? (WRS1 or WRS2)
501 | :param responseShape: (string) What type of geometry should be returned - a bounding box polygon or a center point? (polygon or point)
502 | :param path: (string) The x coordinate in the grid system
503 | :param row: (string) The y coordinate in the grid system
504 | :return: (dict) Response as a dictionary
505 | """
506 | url = f'{self.apiURL}grid2ll'
507 | json_payload = {"gridType": gridType,
508 | "responseShape": responseShape,
509 | "path": path,
510 | "row": row,
511 | }
512 | return self.__send_request_and_check_it(url, json_payload)
513 |
514 | def login(self, username, password, userContext=None):
515 | """
516 | Upon a successful login, an API key will be returned. This key will be active for two hours and should be
517 | destroyed upon final use of the service by calling the logout method. This request requires an HTTP POST
518 | request instead of a HTTP GET request as a security measure to prevent username and password information
519 | from being logged by firewalls, web servers, etc.
520 | :param username: (string) ERS Username
521 | :param password: (string) ERS Password
522 | :param userContext: (UserContext) Metadata describing the user the request is on behalf of
523 | :return: (dict) Response as a dictionary
524 | """
525 | warn("The `login` endpoint will be deprecated in February 2025. To continue using the API, use the "
526 | "`login-token` endpoint for authentication; additional details can be found here: "
527 | "https://www.usgs.gov/media/files/m2m-application-token-documentation",
528 | DeprecationWarning, stacklevel=2)
529 |
530 | url = f'{self.apiURL}login'
531 | json_payload = {"username": username,
532 | "password": password,
533 | "userContext": userContext,
534 | }
535 | response = requests.post(url, json=json_payload)
536 | _check_response(response)
537 | self.apiKey = response.json()['data']
538 | if self.loud_mode:
539 | print(f'Login successful. API key: {self.apiKey}')
540 | return response.json()
541 |
542 | def loginAppGuest(self, applicationToken, userToken):
543 | """
544 | This endpoint assumes that the calling application has generated a single-use token to complete the
545 | authentication and return an API Key specific to that guest user. All subsequent requests should use the API
546 | Key under the 'X-Auth-Token' HTTP header as the Single Sign-On cookie will not authenticate those requests.
547 | The API Key will be active for two hours, which is restarted after each subsequent request, and should be
548 | destroyed upon final use of the service by calling the logout method.
549 |
550 | The 'appToken' field will be used to verify the 'Referrer' HTTP Header to ensure the request was authentically
551 | sent from the assumed application.
552 | :param applicationToken: (string) The token for the calling application
553 | :param userToken: (string) The single-use token generated for this user
554 | :return: (dict) Response as a dictionary
555 | """
556 | url = f'{self.apiURL}login-app-guest'
557 | json_payload = {"applicationToken": applicationToken,
558 | "userToken": userToken,
559 | }
560 | response = requests.post(url, json=json_payload)
561 | _check_response(response)
562 | self.apiKey = response.json()['data']
563 | if self.loud_mode:
564 | print(f'Login successful. API key: {self.apiKey}')
565 | return response.json()
566 |
567 | def loginSSO(self, userContext):
568 | """
569 | This endpoint assumes that a user has an active ERS Single Sign-On Cookie in their browser or attached to this
570 | request. Authentication will be performed from the Single Sign-On Cookie and return an API Key upon successful
571 | authentication. All subsequent requests should use the API Key under the 'X-Auth-Token' HTTP header as the
572 | Single Sign-On cookie will not authenticate those requests. The API Key will be active for two hours, which is
573 | restarted after each subsequent request, and should be destroyed upon final use of the service by calling the
574 | logout method.
575 | :param userContext: (UserContext) Metadata describing the user the request is on behalf of
576 | :return: (dict) Response as a dictionary
577 | """
578 | url = f'{self.apiURL}login-sso'
579 | json_payload = {"userContext": userContext}
580 | response = requests.post(url, json=json_payload)
581 | _check_response(response)
582 | self.apiKey = response.json()['data']
583 | if self.loud_mode:
584 | print(f'Login successful. API key: {self.apiKey}')
585 | return response.json()
586 |
587 | def loginToken(self, username, token):
588 | """
589 | This login method uses ERS application tokens to allow for authentication that is not directly tied the users
590 | ERS password. Instructions for generating the application token can be found here:
591 | https://www.usgs.gov/media/files/m2m-application-token-documentation
592 | Upon a successful login, an API key will be returned. This key will be active for two hours and should be
593 | destroyed upon final use of the service by calling the logout method.
594 |
595 | This request requires an HTTP POST request instead of a HTTP GET request as a security measure to prevent
596 | username and password information from being logged by firewalls, web servers, etc.
597 | :param username: (string) ERS Username
598 | :param token: (string) Application Token
599 | :return: (dict) Response as a dictionary
600 | """
601 | url = f'{self.apiURL}login-token'
602 | json_payload = {"username": username,
603 | "token": token,
604 | }
605 | response = requests.post(url, json=json_payload)
606 | _check_response(response)
607 | self.apiKey = response.json()['data']
608 | if self.loud_mode:
609 | print(f'Login successful. API key: {self.apiKey}')
610 | return response.json()
611 |
612 | def logout(self):
613 | """
614 | This method is used to remove the users API key from being used in the future.
615 | :return: (dict) Response as a dictionary
616 | """
617 | url = f'{self.apiURL}logout'
618 | response = requests.post(url, headers={'X-Auth-Token': self.apiKey})
619 | _check_response(response)
620 | if self.loud_mode:
621 | print(f'Logout successful. API key destroyed: {self.apiKey}')
622 | return response.json()
623 |
624 | def notifications(self, systemId):
625 | """
626 | Gets a notification list. Note: Few valid systems ids are BDA, DDS, EE, ERS, GVN, HDDS, M2M, etc.
627 | :param systemId: (string) Determines the system you wish to return notifications for
628 | :return: (dict) Response as a dictionary
629 | """
630 | url = f'{self.apiURL}notifications'
631 | json_payload = {"systemId": systemId}
632 | return self.__send_request_and_check_it(url, json_payload)
633 |
634 | def orderProducts(self, datasetName, entityIds=None, listId=None):
635 | """
636 | Gets a list of currently selected products - paginated.
637 | Note: "listId" is the id of the customized list which is built by scene-list-add:
638 | https://m2m.cr.usgs.gov/api/docs/reference/#scene-list-add
639 | :param datasetName: (string) Dataset alias
640 | :param entityIds: (string) List of scenes
641 | :param listId: (string) Used to identify the list of scenes to use
642 | :return: (dict) Response as a dictionary
643 | """
644 | url = f'{self.apiURL}order-products'
645 | json_payload = {"datasetName": datasetName,
646 | "entityIds": entityIds,
647 | "listId": listId,
648 | }
649 | return self.__send_request_and_check_it(url, json_payload)
650 |
651 | def orderSubmit(self, products, autoBulkOrder=None, processingParameters=None, priority=None,
652 | orderComment=None, systemId=None):
653 | """
654 | Submits the current product list as a TRAM order - internally calling tram-order-create.
655 | :param products: (Product[]) Used to identify higher level products that this data may be used to create
656 | :param autoBulkOrder: (boolean) If any products can be bulk ordered as a resulk of completed processing this
657 | option allows users to have orders automatically submitted.
658 | :param processingParameters: (string) Optional processing parameters to send to the processing system
659 | :param priority: (int) Processing Priority
660 | :param orderComment: (string) Optional textual identifier for the order
661 | :param systemId: (string) Identifies the system submitting the order
662 | :return: (dict) Response as a dictionary
663 | """
664 | url = f'{self.apiURL}order-submit'
665 | json_payload = {"products": products,
666 | "autoBulkOrder": autoBulkOrder,
667 | "processingParameters": processingParameters,
668 | "priority": priority,
669 | "orderComment": orderComment,
670 | "systemId": systemId,
671 | }
672 | return self.__send_request_and_check_it(url, json_payload)
673 |
674 | def permissions(self):
675 | """
676 | Returns a list of user permissions for the authenticated user. This method does not accept any input.
677 | :return: (dict) Response as a dictionary
678 | """
679 | url = f'{self.apiURL}permissions'
680 | json_payload = {}
681 | return self.__send_request_and_check_it(url, json_payload)
682 |
683 | # Perhaps the feature description is not yet complete? https://m2m.cr.usgs.gov/api/docs/reference/#placename
684 | def placename(self, featureType=None, name=None):
685 | """
686 | (Description Unavailable)
687 | :param featureType: (string) Type or feature - either US or world
688 | :param name: (string) Name of the feature
689 | :return: (dict) Response as a dictionary
690 | """
691 | url = f'{self.apiURL}placename'
692 | json_payload = {"featureType": featureType,
693 | "name": name,
694 | }
695 | return self.__send_request_and_check_it(url, json_payload)
696 |
697 | def rateLimitSummary(self, ipAddress=None):
698 | """
699 | Returns download rate limits and how many downloads are in each status as well as how close the user is to
700 | reaching the rate limits
701 |
702 | Three elements are provided in the response:
703 |
704 | initialLimits - Includes the initial downloads rate limits
705 | recentDownloadCount - The maximum number of downloads requested in the past 15 minutes
706 | pendingDownloadCount - The maximum number of downloads in pending state before they are available for download
707 | unattemptedDownloadCount - The maximum number of downloads in available status but the user has not downloaded yet
708 | remainingLimits - - Includes downloads that are currently remaining and count towards the rate limits. Users should
709 | be watching out for any of those numbers approaching 0 which means it is close to hitting the rate limits
710 | limitType - The type of the limits are counted by, the value is either 'user' or 'ip'
711 | username - The user name associated with the request
712 | ipAddress - The IP address associated with the request
713 | recentDownloadCount - The number of downloads requested in the past 15 minutes
714 | pendingDownloadCount - The number of downloads in pending state before they are available for download
715 | unattemptedDownloadCount - The number of downloads in available status but the user has not downloaded yet
716 | recentDownloadCounts - Includes the downloads count in each status for the past 15 minutes
717 | countType - The type of the download counts are calculated by, the value is either 'user' or 'ip'
718 | username - The user name associated with the request
719 | ipAddress - The IP address associated with the request
720 | downloadCount - The number of downloads per status in the past 15 minutes
721 |
722 | This API may be online while the distribution systems are unavailable. When this occurs, you will recieve the following
723 | error when requesting products that belong to any of these systems: 'This download has been temporarily disabled.
724 | Please try again at a later time. We apologize for the inconvenience.'. Once the distribution system is back online,
725 | this error will stop occuring and download requests will succeed.
726 | :param ipAddress: (string[]) Used to specify multiple IP address
727 | :return: (dict) Response as a dictionary
728 | """
729 | url = f'{self.apiURL}rate-limit-summary'
730 | json_payload = {"ipAddress": ipAddress}
731 | return self.__send_request_and_check_it(url, json_payload)
732 |
733 | def sceneListAdd(self, listId, datasetName, idField=None, entityId=None, entityIds=None, timeToLive=None,
734 | checkDownloadRestriction=None):
735 | """
736 | Adds items in the given scene list.
737 | :param listId: (string) User defined name for the list
738 | :param datasetName: (string) Dataset alias
739 | :param idField: (string) Used to determine which ID is being used - entityId (default) or displayId
740 | :param entityId: (string) Scene Indentifier
741 | :param entityIds: (string[]) A list of Scene Indentifiers
742 | :param timeToLive: (string) User defined lifetime using ISO-8601 formatted duration (such as "P1M") for the list
743 | :param checkDownloadRestriction: (boolean) Optional parameter to check download restricted access and availability
744 | :return: (dict) Response as a dictionary
745 | """
746 |
747 | url = f'{self.apiURL}scene-list-add'
748 | json_payload = {"listId": listId,
749 | "datasetName": datasetName,
750 | "idField": idField,
751 | "entityId": entityId,
752 | "entityIds": entityIds,
753 | "timeToLive": timeToLive,
754 | "checkDownloadRestriction": checkDownloadRestriction,
755 | }
756 | return self.__send_request_and_check_it(url, json_payload)
757 |
758 | def sceneListGet(self, listId, datasetName=None, startingNumber=None, maxResults=None):
759 | """
760 | Returns items in the given scene list.
761 | :param listId: (string) User defined name for the list
762 | :param datasetName: (string) Dataset alias
763 | :param startingNumber: (int) Used to identify the start number to search from
764 | :param maxResults: (int) How many results should be returned?
765 | :return: (dict) Response as a dictionary
766 | """
767 | url = f'{self.apiURL}scene-list-get'
768 | json_payload = {"listId": listId,
769 | "datasetName": datasetName,
770 | "startingNumber": startingNumber,
771 | "maxResults": maxResults,
772 | }
773 | return self.__send_request_and_check_it(url, json_payload)
774 |
775 | def sceneListRemove(self, listId, datasetName=None, entityId=None, entityIds=None):
776 | """
777 | Removes items from the given list. If no datasetName is provided, the call removes the whole list. If a
778 | datasetName is provided but no entityId, this call removes that dataset with all its IDs. If a datasetName and
779 | entityId(s) are provided, the call removes the ID(s) from the dataset.
780 | :param listId: (string) User defined name for the list
781 | :param datasetName: (string) Dataset alias
782 | :param entityId: (string) Scene Indentifier
783 | :param entityIds: (string[]) A list of Scene Indentifiers
784 | :return: (dict) Response as a dictionary
785 | """
786 | url = f'{self.apiURL}scene-list-remove'
787 | json_payload = {"listId": listId,
788 | "datasetName": datasetName,
789 | "entityId": entityId,
790 | "entityIds": entityIds,
791 | }
792 | return self.__send_request_and_check_it(url, json_payload)
793 |
794 | def sceneListSummary(self, listId, datasetName=None):
795 | """
796 | Returns summary information for a given list.
797 | :param listId: (string) User defined name for the list
798 | :param datasetName: (string) Dataset alias
799 | :return: (dict) Response as a dictionary
800 | """
801 | url = f'{self.apiURL}scene-list-summary'
802 | json_payload = {"listId": listId,
803 | "datasetName": datasetName,
804 | }
805 | return self.__send_request_and_check_it(url, json_payload)
806 |
807 | def sceneListTypes(self, listFilter=None):
808 | """
809 | Returns scene list types (exclude, search, order, bulk, etc).
810 | :param listFilter: (string) If provided, only returns listIds that have the provided filter value within the ID
811 | :return: (dict) Response as a dictionary
812 | """
813 | url = f'{self.apiURL}scene-list-types'
814 | json_payload = {"listFilter": listFilter}
815 | return self.__send_request_and_check_it(url, json_payload)
816 |
817 | def sceneMetadata(self, datasetName, entityId, idType=None, metadataType=None, includeNullMetadataValues=None,
818 | useCustomization=None):
819 | """
820 | This request is used to return metadata for a given scene.
821 | :param datasetName: (string) Used to identify the dataset to search
822 | :param entityId: (string) Used to identify the scene to return results for
823 | :param idType: (string) If populated, identifies which ID field (entityId, displayId or orderingId) to use when
824 | searching for the provided entityId (default = entityId)
825 | :param metadataType: (string) If populated, identifies which metadata to return (summary, full, fgdc, iso)
826 | :param includeNullMetadataValues: (boolean) Optional parameter to include null metadata values
827 | :param useCustomization: (boolean) Optional parameter to display metadata view as per user customization
828 | :return: (dict) Response as a dictionary
829 | """
830 | url = f'{self.apiURL}scene-metadata'
831 | json_payload = {"datasetName": datasetName,
832 | "entityId": entityId,
833 | "idType": idType,
834 | "metadataType": metadataType,
835 | "includeNullMetadataValues": includeNullMetadataValues,
836 | "useCustomization": useCustomization,
837 | }
838 | return self.__send_request_and_check_it(url, json_payload)
839 |
840 | def sceneMetadataList(self, listId, datasetName=None, metadataType=None, includeNullMetadataValues=None,
841 | useCustomization=None):
842 | """
843 | Scene Metadata where the input is a pre-set list.
844 | :param datasetName: (string) Used to identify the dataset to search
845 | :param listId: (string) Used to identify the list of scenes to use
846 | :param metadataType: (string) If populated, identifies which metadata to return (summary or full)
847 | :param includeNullMetadataValues: (boolean) Optional parameter to include null metadata values
848 | :param useCustomization: (boolean) Optional parameter to display metadata view as per user customization
849 | :return: (dict) Response as a dictionary
850 | """
851 | url = f'{self.apiURL}scene-metadata-list'
852 | json_payload = {"datasetName": datasetName,
853 | "listId": listId,
854 | "metadataType": metadataType,
855 | "includeNullMetadataValues": includeNullMetadataValues,
856 | "useCustomization": useCustomization,
857 | }
858 | return self.__send_request_and_check_it(url, json_payload)
859 |
860 | def sceneMetadataXML(self, datasetName, entityId, metadataType=None):
861 | """
862 | Returns metadata formatted in XML, ahering to FGDC, ISO and EE scene metadata formatting standards.
863 | :param datasetName: (string) Used to identify the dataset to search
864 | :param entityId: (string) Used to identify the scene to return results for
865 | :param metadataType: (string) If populated, identifies which metadata to return (full, fgdc, iso)
866 | :return: (dict) Response as a dictionary
867 | """
868 | url = f'{self.apiURL}scene-metadata-xml'
869 | json_payload = {"datasetName": datasetName,
870 | "entityId": entityId,
871 | "metadataType": metadataType,
872 | }
873 | return self.__send_request_and_check_it(url, json_payload)
874 |
875 | def sceneSearch(self, datasetName, maxResults=None, startingNumber=None, metadataType=None, sortField=None,
876 | sortDirection=None, sortCustomization=None, useCustomization=None, sceneFilter=None,
877 | compareListName=None, bulkListName=None, orderListName=None, excludeListName=None,
878 | includeNullMetadataValues=None):
879 | """
880 | Searching is done with limited search criteria. All coordinates are assumed decimal-degree format. If lowerLeft
881 | or upperRight are supplied, then both must exist in the request to complete the bounding box. Starting and
882 | ending dates, if supplied, are used as a range to search data based on acquisition dates. The current
883 | implementation will only search at the date level, discarding any time information. If data in a given dataset
884 | is composite data, or data acquired over multiple days, a search will be done to match any intersection of the
885 | acquisition range. There currently is a 50,000 scene limit for the number of results that are returned, however,
886 | some client applications may encounter timeouts for large result sets for some datasets. To use the sceneFilter
887 | field, pass one of the four search filter objects (SearchFilterAnd, SearchFilterBetween, SearchFilterOr,
888 | SearchFilterValue) in JSON format with sceneFilter being the root element of the object.
889 |
890 | The response of this request includes a 'totalHits' response parameter that indicates the total number of scenes
891 | that match the search query to allow for pagination. Due to this, searches without a 'sceneFilter' parameter can
892 | take much longer to execute. To minimize this impact we use a cached scene count for 'totalHits' instead of
893 | computing the actual row count. An additional field, 'totalHitsAccuracy', is also included in the response to
894 | indicate if the 'totalHits' value was computed based off the query or using an approximated value. This does not
895 | impact the users ability to access these results via pagination. This cached value is updated daily for all
896 | datasets with active data ingests. Ingest frequency for each dataset can be found using the 'ingestFrequency'
897 | field in the dataset, dataset-categories and dataset-search endpoint responses.
898 | :param datasetName: (string) Used to identify the dataset to search
899 | :param maxResults: (int) Used to identify the dataset to search
900 | :param startingNumber: (int) Used to identify the dataset to search
901 | :param metadataType: (string) If populated, identifies which metadata to return (summary or full)
902 | :param sortField: (string) Determines which field to sort the results on
903 | :param sortDirection: (string) Determines how the results should be sorted - ASC or DESC
904 | :param sortCustomization: (SortCustomization) Used to pass in custom sorts
905 | :param useCustomization: (boolean) Optional parameter to indicate whether to use customization
906 | :param sceneFilter: (SceneFilter) Used to filter data within the dataset
907 | :param compareListName: (string) If provided, defined a scene-list listId to use to track scenes selected for comparison
908 | :param bulkListName: (string) If provided, defined a scene-list listId to use to track scenes selected for bulk ordering
909 | :param orderListName: (string) If provided, defined a scene-list listId to use to track scenes selected for on-demand ordering
910 | :param excludeListName: (string) If provided, defined a scene-list listId to use to exclude scenes from the results
911 | :param includeNullMetadataValues: (boolean) Optional parameter to include null metadata values
912 | :return: (dict) Response as a dictionary
913 | """
914 | url = f'{self.apiURL}scene-search'
915 | json_payload = {"datasetName": datasetName,
916 | "maxResults": maxResults,
917 | "startingNumber": startingNumber,
918 | "metadataType": metadataType,
919 | "sortField": sortField,
920 | "sortDirection": sortDirection,
921 | "sortCustomization": sortCustomization,
922 | "useCustomization": useCustomization,
923 | "sceneFilter": sceneFilter,
924 | "compareListName": compareListName,
925 | "bulkListName": bulkListName,
926 | "orderListName": orderListName,
927 | "excludeListName": excludeListName,
928 | "includeNullMetadataValues": includeNullMetadataValues,
929 | }
930 | return self.__send_request_and_check_it(url, json_payload)
931 |
932 | def sceneSearchDelete(self, datasetName, maxResults=None, startingNumber=None, sortField=None, sortDirection=None,
933 | temporalFilter=None):
934 | """
935 | This method is used to detect deleted scenes from datasets that support it. Supported datasets are determined by
936 | the 'supportDeletionSearch' parameter in the 'datasets' response. There currently is a 50,000 scene limit for
937 | the number of results that are returned, however, some client applications may encounter timeouts for large
938 | result sets for some datasets.
939 | :param datasetName: (string) Used to identify the dataset to search
940 | :param maxResults: (int) Used to identify the dataset to search
941 | :param startingNumber: (int) Used to identify the dataset to search
942 | :param sortField: (string) Determines which field to sort the results on
943 | :param sortDirection: (string) Determines how the results should be sorted - ASC or DESC
944 | :param temporalFilter: (TemporalFilter) Used to filter data based on data acquisition
945 | :return: (dict) Response as a dictionary
946 | """
947 | url = f'{self.apiURL}scene-search-delete'
948 | json_payload = {"datasetName": datasetName,
949 | "maxResults": maxResults,
950 | "startingNumber": startingNumber,
951 | "sortField": sortField,
952 | "sortDirection": sortDirection,
953 | "temporalFilter": temporalFilter
954 | }
955 | return self.__send_request_and_check_it(url, json_payload)
956 |
957 | def sceneSearchSecondary(self, entityId, datasetName, maxResults=None, startingNumber=None, metadataType=None,
958 | sortField=None, sortDirection=None, compareListName=None, bulkListName=None,
959 | orderListName=None, excludeListName=None):
960 | """
961 | This method is used to find the related scenes for a given scene.
962 | :param entityId: (string) Used to identify the scene to find related scenes for
963 | :param datasetName: (string) Used to identify the dataset to search
964 | :param maxResults: (int) Used to identify the dataset to search
965 | :param startingNumber: (int) Used to identify the dataset to search
966 | :param metadataType: (string) If populated, identifies which metadata to return (summary or full)
967 | :param sortField: (string) Determines which field to sort the results on
968 | :param sortDirection: (string) Determines how the results should be sorted - ASC or DESC
969 | :param compareListName: (string) If provided, defined a scene-list listId to use to track scenes selected for comparison
970 | :param bulkListName: (string) If provided, defined a scene-list listId to use to track scenes selected for bulk ordering
971 | :param orderListName: (string) If provided, defined a scene-list listId to use to track scenes selected for on-demand ordering
972 | :param excludeListName: (string) If provided, defined a scene-list listId to use to exclude scenes from the results
973 | :return: (dict) Response as a dictionary
974 | """
975 | url = f'{self.apiURL}scene-search-secondary'
976 | json_payload = {"entityId": entityId,
977 | "datasetName": datasetName,
978 | "maxResults": maxResults,
979 | "startingNumber": startingNumber,
980 | "metadataType": metadataType,
981 | "sortField": sortField,
982 | "sortDirection": sortDirection,
983 | "compareListName": compareListName,
984 | "bulkListName": bulkListName,
985 | "orderListName": orderListName,
986 | "excludeListName": excludeListName,
987 |
988 | }
989 | return self.__send_request_and_check_it(url, json_payload)
990 |
991 | def tramOrderDetailUpdate(self, orderNumber, detailKey, detailValue):
992 | """
993 | This method is used to set metadata for an order.
994 | :param orderNumber: (string) The order ID for the order to update
995 | :param detailKey: (string) The system detail key
996 | :param detailValue: (string) The value to store under the detailKey
997 | :return: (dict) Response as a dictionary
998 | """
999 | url = f'{self.apiURL}tram-order-detail-update'
1000 | json_payload = {"orderNumber": orderNumber,
1001 | "detailKey": detailKey,
1002 | "detailValue": detailValue,
1003 | }
1004 | return self.__send_request_and_check_it(url, json_payload)
1005 |
1006 | def tramOrderDetails(self, orderNumber):
1007 | """
1008 | This method is used to set metadata for an order.
1009 | :param orderNumber: (string) The order ID to get details for
1010 | :return: (dict) Response as a dictionary
1011 | """
1012 | url = f'{self.apiURL}tram-order-details'
1013 | json_payload = {"orderNumber": orderNumber}
1014 | return self.__send_request_and_check_it(url, json_payload)
1015 |
1016 | def tramOrderDetailsClear(self, orderNumber):
1017 | """
1018 | This method is used to clear all metadata within an order.
1019 | :param orderNumber: (string) The order ID to clear details for
1020 | :return: (dict) Response as a dictionary
1021 | """
1022 | url = f'{self.apiURL}tram-order-details-clear'
1023 | json_payload = {"orderNumber": orderNumber}
1024 | return self.__send_request_and_check_it(url, json_payload)
1025 |
1026 | def tramOrderDetailsRemove(self, orderNumber, detailKey):
1027 | """
1028 | This method is used to clear all metadata within an order.
1029 | :param orderNumber: (string) The order ID to clear details for
1030 | :param detailKey: (string) The system detail key
1031 | :return: (dict) Response as a dictionary
1032 | """
1033 | url = f'{self.apiURL}tram-order-details-remove'
1034 | json_payload = {"orderNumber": orderNumber,
1035 | "detailKey": detailKey,
1036 | }
1037 | return self.__send_request_and_check_it(url, json_payload)
1038 |
1039 | def tramOrderSearch(self, orderId=None, maxResults=None, systemId=None, sortAsc=None, sortField=None,
1040 | statusFilter=None):
1041 | """
1042 | Search TRAM orders.
1043 | :param orderId: (string) The order ID to get status for (accepts '%' wildcard)
1044 | :param maxResults: (int) How many results should be returned on each page? (default = 25)
1045 | :param systemId: (string) Limit results based on the application that order was submitted from
1046 | :param sortAsc: (boolean) True for ascending results, false for descending results
1047 | :param sortField: (string) Which field should sorting be done on? (order_id, date_entered or date_updated)
1048 | :param statusFilter: (string[]) An array of status codes to
1049 | :return: (dict) Response as a dictionary
1050 | """
1051 | url = f'{self.apiURL}tram-order-search'
1052 | json_payload = {"orderId": orderId,
1053 | "maxResults": maxResults,
1054 | "systemId": systemId,
1055 | "sortAsc": sortAsc,
1056 | "sortField": sortField,
1057 | "statusFilter": statusFilter,
1058 | }
1059 | return self.__send_request_and_check_it(url, json_payload)
1060 |
1061 | def tramOrderStatus(self, orderNumber):
1062 | """
1063 | Gets the status of a TRAM order.
1064 | :param orderNumber: (string) The order ID to get status for
1065 | :return: (dict) Response as a dictionary
1066 | """
1067 | url = f'{self.apiURL}tram-order-status'
1068 | json_payload = {"orderNumber": orderNumber}
1069 | return self.__send_request_and_check_it(url, json_payload)
1070 |
1071 | def tramOrderUnits(self, orderNumber):
1072 | """
1073 | Lists units for a specified order.
1074 | :param orderNumber: (string) The order ID to get units for
1075 | :return: (dict) Response as a dictionary
1076 | """
1077 | url = f'{self.apiURL}tram-order-units'
1078 | json_payload = {"orderNumber": orderNumber}
1079 | return self.__send_request_and_check_it(url, json_payload)
1080 |
1081 | def userPreferenceGet(self, systemId=None, setting=None):
1082 | """
1083 | This method is used to retrieve user's preference settings.
1084 | :param systemId: (string) Used to identify which system to return preferences for. If null it will return all
1085 | the users preferences
1086 | :param setting: (string[]) If populated, identifies which setting(s) to return
1087 | :return: (dict) Response as a dictionary
1088 | """
1089 | url = f'{self.apiURL}user-preference-get'
1090 | json_payload = {"systemId": systemId,
1091 | "setting": setting,
1092 | }
1093 | return self.__send_request_and_check_it(url, json_payload)
1094 |
1095 | def userPreferenceSet(self, systemId, userPreferences):
1096 | """
1097 | This method is used to create or update user's preferences.
1098 | :param systemId: (string) Used to identify which system the preferences are for.
1099 | :param userPreferences: (string[]) Used to set user preferences for various systems.
1100 | :return: (dict) Response as a dictionary
1101 | """
1102 | url = f'{self.apiURL}user-preference-set'
1103 | json_payload = {"systemId": systemId,
1104 | "userPreferences": userPreferences,
1105 | }
1106 | return self.__send_request_and_check_it(url, json_payload)
1107 |
--------------------------------------------------------------------------------