├── .github
├── FUNDING.yml
└── workflows
│ ├── install-test-conda-forge.yml
│ ├── install-test-pypi.yml
│ ├── install-test-testpypi.yml
│ ├── publish-pypi.yml
│ ├── publish-testpypi.yml
│ ├── run-integration-tests.yml
│ └── run-unit-tests.yml
├── .readthedocs.yaml
├── .style.yapf
├── LICENSE
├── README.rst
├── docs
├── Makefile
├── api.rst
├── cli.rst
├── cloud_coll_rtd.rst
├── conf.py
├── contributing.rst
├── description.rst
├── examples
│ ├── api_getting_started.py
│ ├── l7_composite.ipynb
│ └── s2_composite.ipynb
├── index.rst
├── installation.rst
├── make.bat
└── requirements.txt
├── geedim
├── __init__.py
├── cli.py
├── collection.py
├── data
│ └── ee_stac_urls.json
├── download.py
├── enums.py
├── errors.py
├── mask.py
├── medoid.py
├── schema.py
├── stac.py
├── tile.py
├── utils.py
└── version.py
├── meta.yaml
├── pyproject.toml
└── tests
├── __init__.py
├── conftest.py
├── data
├── const_image_25ha.tif
├── region.geojson
├── region_10000ha.geojson
├── region_100ha.geojson
└── region_25ha.geojson
├── integration.py
├── test_cli.py
├── test_collection.py
├── test_download.py
├── test_mask.py
├── test_stac.py
├── test_tile.py
└── test_utils.py
/.github/FUNDING.yml:
--------------------------------------------------------------------------------
1 | # These are supported funding model platforms
2 |
3 | github: [leftfield-geospatial] # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2]
4 | patreon: # Replace with a single Patreon username
5 | open_collective: # Replace with a single Open Collective username
6 | ko_fi: # Replace with a single Ko-fi username
7 | tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
8 | community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
9 | liberapay: # Replace with a single Liberapay username
10 | issuehunt: # Replace with a single IssueHunt username
11 | otechie: # Replace with a single Otechie username
12 | lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
13 | custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
14 |
--------------------------------------------------------------------------------
/.github/workflows/install-test-conda-forge.yml:
--------------------------------------------------------------------------------
1 | # Test installing from conda-forge
2 |
3 | name: Install & test conda-forge package
4 | on:
5 | workflow_dispatch:
6 |
7 | jobs:
8 | test:
9 | runs-on: ${{ matrix.os }}
10 | defaults:
11 | run:
12 | shell: bash -el {0}
13 | strategy:
14 | fail-fast: false
15 | matrix:
16 | os: [ macos-latest, ubuntu-latest, windows-latest ]
17 | python-version: [ '3.8', '3.11', '3.12' ]
18 | steps:
19 |
20 | - name: Set up conda
21 | uses: conda-incubator/setup-miniconda@v3
22 | with:
23 | python-version: ${{ matrix.python-version }}
24 | channels: conda-forge
25 | channel-priority: strict
26 | activate-environment: geedim-test
27 | conda-solver: libmamba
28 |
29 | - name: Install package
30 | run: |
31 | conda info
32 | conda install geedim>=1.9.1
33 | conda list
34 |
35 | - name: Run tests
36 | timeout-minutes: 5
37 | env:
38 | EE_SERVICE_ACC_PRIVATE_KEY: ${{ secrets.EE_SERVICE_ACC_PRIVATE_KEY }}
39 | run: |
40 | geedim --version
41 | geedim --help
42 | geedim search --help
43 | geedim search -c l8-c2-l2 -s 2019-02-01 -e 2019-03-01 --bbox 23 -33 23.2 -33.2 composite -cm q-mosaic --mask download --scale 30 --crs EPSG:3857 -o
44 | test -f ./data/test_example/homogenised/3324c_2015_1004_05_0182_RGB_HOMO_cREF_mGAIN-BLK-OFFSET_k5_5.tif && echo "Test OK"
45 | pwd
46 | ls -R
47 |
--------------------------------------------------------------------------------
/.github/workflows/install-test-pypi.yml:
--------------------------------------------------------------------------------
1 | # Test publishing to, and installing and running with Test PyPI
2 |
3 | name: Install & test PyPI package
4 | on:
5 | workflow_dispatch:
6 |
7 | jobs:
8 | # Install geedim from Test PyPI and test CLI
9 | test:
10 | runs-on: ${{ matrix.os }}
11 | strategy:
12 | fail-fast: false
13 | matrix:
14 | os: [ macos-latest, ubuntu-latest, windows-latest ]
15 | python-version: [ '3.8', '3.10', '3.11', '3.12' ]
16 | steps:
17 | - name: Set up Python
18 | uses: actions/setup-python@v5
19 | with:
20 | python-version: ${{ matrix.python-version }}
21 | - name: Install geedim from PyPI
22 | run: |
23 | python -m pip install --upgrade pip
24 | python -m pip install --upgrade geedim
25 | - name: Test geedim CLI
26 | timeout-minutes: 5
27 | env:
28 | EE_SERVICE_ACC_PRIVATE_KEY: ${{ secrets.EE_SERVICE_ACC_PRIVATE_KEY }}
29 | shell: bash
30 | run: |
31 | geedim --version
32 | geedim --help
33 | geedim search --help
34 | geedim search -c l8-c2-l2 -s 2019-02-01 -e 2019-03-01 --bbox 23 -33 23.2 -33.2 composite -cm q-mosaic --mask download --scale 30 --crs EPSG:3857 -o
35 | dir LANDSAT-LC08-C02-T1_L2-2019_02_04-2019_02_20-Q-MOSAIC-COMP.tif && echo "Test OK"
36 | pwd
37 | ls -R
38 |
--------------------------------------------------------------------------------
/.github/workflows/install-test-testpypi.yml:
--------------------------------------------------------------------------------
1 | # Test publishing to, and installing and running with Test PyPI
2 |
3 | name: Install & test TestPyPI package
4 | on:
5 | workflow_dispatch:
6 |
7 | jobs:
8 | # Install geedim from Test PyPI and test CLI
9 | test:
10 | runs-on: ${{ matrix.os }}
11 | strategy:
12 | fail-fast: false
13 | matrix:
14 | os: [ macos-latest, ubuntu-latest, windows-latest ]
15 | python-version: [ '3.8', '3.10', '3.11', '3.12' ]
16 | steps:
17 | - name: Set up Python
18 | uses: actions/setup-python@v5
19 | with:
20 | python-version: ${{ matrix.python-version }}
21 | - name: Install geedim from Test PyPI
22 | run: |
23 | python -m pip install --upgrade pip
24 | python -m pip install --extra-index-url https://test.pypi.org/simple/ --upgrade geedim
25 | - name: Test geedim CLI
26 | timeout-minutes: 5
27 | env:
28 | EE_SERVICE_ACC_PRIVATE_KEY: ${{ secrets.EE_SERVICE_ACC_PRIVATE_KEY }}
29 | run: |
30 | geedim --version
31 | geedim --help
32 | geedim search --help
33 | geedim search -c l8-c2-l2 -s 2019-02-01 -e 2019-03-01 --bbox 23 -33 23.2 -33.2 composite -cm q-mosaic --mask download --scale 30 --crs EPSG:3857 -o
34 | dir -f LANDSAT-LC08-C02-T1_L2-2019_02_04-2019_02_20-Q-MOSAIC-COMP.tif && echo "Test OK"
35 | pwd
36 | ls -R
37 |
--------------------------------------------------------------------------------
/.github/workflows/publish-pypi.yml:
--------------------------------------------------------------------------------
1 | # Build geedim package and publish to PyPI
2 |
3 | name: Publish
4 | on:
5 | release:
6 | types: [published]
7 | workflow_dispatch:
8 |
9 | jobs:
10 | publish:
11 | runs-on: ubuntu-latest
12 | steps:
13 | - uses: actions/checkout@v4
14 | - name: Set up Python
15 | uses: actions/setup-python@v5
16 | with:
17 | python-version: '3.x'
18 | - name: Install dependencies
19 | run: |
20 | python -m pip install --upgrade pip
21 | pip install build twine
22 | - name: Build package
23 | run: python -m build
24 | - name: Publish package to PyPI
25 | uses: pypa/gh-action-pypi-publish@release/v1
26 | with:
27 | user: __token__
28 | password: ${{ secrets.PYPI_TOKEN }}
29 |
--------------------------------------------------------------------------------
/.github/workflows/publish-testpypi.yml:
--------------------------------------------------------------------------------
1 | # Test publishing to, and installing and running with Test PyPI
2 |
3 | name: Build & publish to Test PyPI
4 | on:
5 | workflow_dispatch:
6 |
7 | jobs:
8 | # Build geedim package and upload to Test PyPI
9 | publish:
10 | runs-on: ubuntu-latest
11 | steps:
12 | - uses: actions/checkout@v4
13 | - name: Set up Python
14 | uses: actions/setup-python@v5
15 | with:
16 | python-version: '3.x'
17 | - name: Install dependencies
18 | run: |
19 | python -m pip install --upgrade pip
20 | pip install build twine
21 | - name: Build package
22 | run: python -m build
23 | - name: Publish package to TestPyPI
24 | uses: pypa/gh-action-pypi-publish@release/v1
25 | continue-on-error: true
26 | with:
27 | user: __token__
28 | password: ${{ secrets.TEST_PYPI_TOKEN }}
29 | repository-url: https://test.pypi.org/legacy/
30 | - name: Clean up
31 | run: rm -Rf *
32 |
33 | # Install geedim from Test PyPI and test CLI
34 | test:
35 | needs: publish
36 | runs-on: ${{ matrix.os }}
37 | strategy:
38 | fail-fast: false
39 | matrix:
40 | os: [ macos-latest, ubuntu-latest, windows-latest ]
41 | python-version: ['3.12']
42 | steps:
43 | - name: Set up Python
44 | uses: actions/setup-python@v5
45 | with:
46 | python-version: ${{ matrix.python-version }}
47 | - name: Install geedim from Test PyPI
48 | run: |
49 | python -m pip install --upgrade pip
50 | python -m pip install --extra-index-url https://test.pypi.org/simple/ --upgrade geedim
51 | - name: Test geedim CLI
52 | timeout-minutes: 5
53 | env:
54 | EE_SERVICE_ACC_PRIVATE_KEY: ${{ secrets.EE_SERVICE_ACC_PRIVATE_KEY }}
55 | run: |
56 | geedim search -c l8-c2-l2 -s 2019-02-01 -e 2019-03-01 --bbox 23 -33 23.2 -33.2 composite -cm q-mosaic --mask download --scale 30 --crs EPSG:3857 -o
57 | pwd
58 | ls -R
59 |
--------------------------------------------------------------------------------
/.github/workflows/run-integration-tests.yml:
--------------------------------------------------------------------------------
1 |
2 | name: Test integration
3 |
4 | on:
5 | workflow_dispatch:
6 |
7 | jobs:
8 | test-pypi:
9 | runs-on: ${{ matrix.os }}
10 | strategy:
11 | fail-fast: false
12 | max-parallel: 2
13 | matrix:
14 | os: [ macos-latest, ubuntu-latest, windows-latest ]
15 | python-version: [ '3.8', '3.12' ]
16 |
17 | steps:
18 | - name: Check out repository
19 | uses: actions/checkout@v4
20 |
21 | - name: Set up Python
22 | uses: actions/setup-python@v5
23 | with:
24 | python-version: ${{ matrix.python-version }}
25 | cache: 'pip'
26 |
27 | - name: Install dependencies
28 | run: |
29 | python -m pip install --upgrade pip
30 | python -m pip install pytest pytest-xdist
31 | python -m pip install earthengine-api click rasterio tqdm tabulate
32 |
33 | - name: Run integration tests
34 | timeout-minutes: 10
35 | env:
36 | EE_SERVICE_ACC_PRIVATE_KEY: ${{ secrets.EE_SERVICE_ACC_PRIVATE_KEY }}
37 | run: |
38 | python -m pytest -n auto ./tests/integration.py
39 |
40 | test-conda:
41 | needs: test-pypi # run after test-conda to limit concurrent ee requests
42 | runs-on: ${{ matrix.os }}
43 | defaults:
44 | run:
45 | shell: bash -el {0}
46 | strategy:
47 | fail-fast: false
48 | max-parallel: 2
49 | matrix:
50 | os: [ macos-latest, ubuntu-latest, windows-latest ]
51 | python-version: [ '3.8', '3.12' ]
52 |
53 | steps:
54 | - name: Check out repository
55 | uses: actions/checkout@v4
56 |
57 | - name: Set up conda
58 | uses: conda-incubator/setup-miniconda@v3
59 | with:
60 | python-version: ${{ matrix.python-version }}
61 | channels: conda-forge
62 | channel-priority: strict
63 | activate-environment: geedim-test
64 | conda-solver: libmamba
65 |
66 | - name: Install dependencies
67 | run: |
68 | conda info
69 | conda install pytest pytest-xdist
70 | conda install earthengine-api click rasterio tqdm tabulate
71 |
72 | - name: Run integration tests
73 | timeout-minutes: 10
74 | env:
75 | EE_SERVICE_ACC_PRIVATE_KEY: ${{ secrets.EE_SERVICE_ACC_PRIVATE_KEY }}
76 | run: |
77 | python -m pytest -n auto ./tests/integration.py
78 |
--------------------------------------------------------------------------------
/.github/workflows/run-unit-tests.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions
2 |
3 | name: Tests
4 |
5 | on:
6 | push:
7 | branches: [ main ]
8 | pull_request:
9 | branches: [ main ]
10 | workflow_dispatch:
11 | schedule:
12 | - cron: '0 2 * * 1' # run weekly
13 |
14 | jobs:
15 | test:
16 | runs-on: ${{ matrix.os }}
17 | strategy:
18 | fail-fast: false
19 | matrix:
20 | python-version: ['3.8', '3.12']
21 | os: [ macos-latest, ubuntu-latest, windows-latest ]
22 |
23 | steps:
24 | - name: Check out repository
25 | uses: actions/checkout@v4
26 |
27 | - name: Set up Python ${{ matrix.python-version }}
28 | uses: actions/setup-python@v5
29 | with:
30 | python-version: ${{ matrix.python-version }}
31 | cache: 'pip'
32 |
33 | - name: Install dependencies
34 | run: |
35 | python -m pip install --upgrade pip
36 | python -m pip install flake8 pytest pytest-xdist pytest-cov
37 | python -m pip install earthengine-api click rasterio tqdm tabulate
38 |
39 | - name: Lint with flake8
40 | run: |
41 | # stop the build if there are Python syntax errors or undefined names
42 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
43 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
44 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
45 |
46 | - name: Test with pytest
47 | timeout-minutes: 10
48 | env:
49 | EE_SERVICE_ACC_PRIVATE_KEY: ${{ secrets.EE_SERVICE_ACC_PRIVATE_KEY }}
50 | run: |
51 | python -m pytest -n auto --durations=10 --cov=geedim --cov-report=term-missing --cov-report=xml:coverage.xml ./tests
52 |
53 | - name: Upload coverage
54 | uses: codecov/codecov-action@v4
55 | with:
56 | fail_ci_if_error: true
57 | files: ./coverage.xml
58 | token: ${{ secrets.CODECOV_TOKEN }}
59 | verbose: true
60 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | sphinx:
4 | configuration: docs/conf.py
5 |
6 | build:
7 | os: ubuntu-22.04
8 | tools:
9 | python: "3.11"
10 |
11 | python:
12 | install:
13 | - requirements: docs/requirements.txt
14 | - method: pip
15 | path: .
16 |
--------------------------------------------------------------------------------
/.style.yapf:
--------------------------------------------------------------------------------
1 | [style]
2 | # Align closing bracket with visual indentation.
3 | align_closing_bracket_with_visual_indent=False
4 |
5 | # Allow dictionary keys to exist on multiple lines. For example:
6 | #
7 | # x = {
8 | # ('this is the first element of a tuple',
9 | # 'this is the second element of a tuple'):
10 | # value,
11 | # }
12 | allow_multiline_dictionary_keys=False
13 |
14 | # Allow lambdas to be formatted on more than one line.
15 | allow_multiline_lambdas=False
16 |
17 | # Allow splitting before a default / named assignment in an argument list.
18 | allow_split_before_default_or_named_assigns=True
19 |
20 | # Allow splits before the dictionary value.
21 | allow_split_before_dict_value=False
22 |
23 | # Let spacing indicate operator precedence. For example:
24 | #
25 | # a = 1 * 2 + 3 / 4
26 | # b = 1 / 2 - 3 * 4
27 | # c = (1 + 2) * (3 - 4)
28 | # d = (1 - 2) / (3 + 4)
29 | # e = 1 * 2 - 3
30 | # f = 1 + 2 + 3 + 4
31 | #
32 | # will be formatted as follows to indicate precedence:
33 | #
34 | # a = 1*2 + 3/4
35 | # b = 1/2 - 3*4
36 | # c = (1+2) * (3-4)
37 | # d = (1-2) / (3+4)
38 | # e = 1*2 - 3
39 | # f = 1 + 2 + 3 + 4
40 | #
41 | arithmetic_precedence_indication=False
42 |
43 | # Number of blank lines surrounding top-level function and class
44 | # definitions.
45 | blank_lines_around_top_level_definition=2
46 |
47 | # Number of blank lines between top-level imports and variable
48 | # definitions.
49 | blank_lines_between_top_level_imports_and_variables=1
50 |
51 | # Insert a blank line before a class-level docstring.
52 | blank_line_before_class_docstring=False
53 |
54 | # Insert a blank line before a module docstring.
55 | blank_line_before_module_docstring=False
56 |
57 | # Insert a blank line before a 'def' or 'class' immediately nested
58 | # within another 'def' or 'class'. For example:
59 | #
60 | # class Foo:
61 | # # <------ this blank line
62 | # def method():
63 | # ...
64 | blank_line_before_nested_class_or_def=True
65 |
66 | # Do not split consecutive brackets. Only relevant when
67 | # dedent_closing_brackets is set. For example:
68 | #
69 | # call_func_that_takes_a_dict(
70 | # {
71 | # 'key1': 'value1',
72 | # 'key2': 'value2',
73 | # }
74 | # )
75 | #
76 | # would reformat to:
77 | #
78 | # call_func_that_takes_a_dict({
79 | # 'key1': 'value1',
80 | # 'key2': 'value2',
81 | # })
82 | coalesce_brackets=False
83 |
84 | # The column limit.
85 | column_limit=120
86 |
87 | # The style for continuation alignment. Possible values are:
88 | #
89 | # - SPACE: Use spaces for continuation alignment. This is default behavior.
90 | # - FIXED: Use fixed number (CONTINUATION_INDENT_WIDTH) of columns
91 | # (ie: CONTINUATION_INDENT_WIDTH/INDENT_WIDTH tabs or
92 | # CONTINUATION_INDENT_WIDTH spaces) for continuation alignment.
93 | # - VALIGN-RIGHT: Vertically align continuation lines to multiple of
94 | # INDENT_WIDTH columns. Slightly right (one tab or a few spaces) if
95 | # cannot vertically align continuation lines with indent characters.
96 | continuation_align_style=SPACE
97 |
98 | # Indent width used for line continuations.
99 | continuation_indent_width=4
100 |
101 | # Put closing brackets on a separate line, dedented, if the bracketed
102 | # expression can't fit in a single line. Applies to all kinds of brackets,
103 | # including function definitions and calls. For example:
104 | #
105 | # config = {
106 | # 'key1': 'value1',
107 | # 'key2': 'value2',
108 | # } # <--- this bracket is dedented and on a separate line
109 | #
110 | # time_series = self.remote_client.query_entity_counters(
111 | # entity='dev3246.region1',
112 | # key='dns.query_latency_tcp',
113 | # transform=Transformation.AVERAGE(window=timedelta(seconds=60)),
114 | # start_ts=now()-timedelta(days=3),
115 | # end_ts=now(),
116 | # ) # <--- this bracket is dedented and on a separate line
117 | dedent_closing_brackets=True
118 |
119 | # Disable the heuristic which places each list element on a separate line
120 | # if the list is comma-terminated.
121 | disable_ending_comma_heuristic=True
122 |
123 | # Place each dictionary entry onto its own line.
124 | each_dict_entry_on_separate_line=True
125 |
126 | # Require multiline dictionary even if it would normally fit on one line.
127 | # For example:
128 | #
129 | # config = {
130 | # 'key1': 'value1'
131 | # }
132 | force_multiline_dict=False
133 |
134 | # The regex for an i18n comment. The presence of this comment stops
135 | # reformatting of that line, because the comments are required to be
136 | # next to the string they translate.
137 | i18n_comment=#\..*
138 |
139 | # The i18n function call names. The presence of this function stops
140 | # reformattting on that line, because the string it has cannot be moved
141 | # away from the i18n comment.
142 | i18n_function_call=N_, _
143 |
144 | # Indent blank lines.
145 | indent_blank_lines=False
146 |
147 | # Put closing brackets on a separate line, indented, if the bracketed
148 | # expression can't fit in a single line. Applies to all kinds of brackets,
149 | # including function definitions and calls. For example:
150 | #
151 | # config = {
152 | # 'key1': 'value1',
153 | # 'key2': 'value2',
154 | # } # <--- this bracket is indented and on a separate line
155 | #
156 | # time_series = self.remote_client.query_entity_counters(
157 | # entity='dev3246.region1',
158 | # key='dns.query_latency_tcp',
159 | # transform=Transformation.AVERAGE(window=timedelta(seconds=60)),
160 | # start_ts=now()-timedelta(days=3),
161 | # end_ts=now(),
162 | # ) # <--- this bracket is indented and on a separate line
163 | indent_closing_brackets=False
164 |
165 | # Indent the dictionary value if it cannot fit on the same line as the
166 | # dictionary key. For example:
167 | #
168 | # config = {
169 | # 'key1':
170 | # 'value1',
171 | # 'key2': value1 +
172 | # value2,
173 | # }
174 | indent_dictionary_value=True
175 |
176 | # The number of columns to use for indentation.
177 | indent_width=4
178 |
179 | # Join short lines into one line. E.g., single line 'if' statements.
180 | join_multiple_lines=False
181 |
182 | # Do not include spaces around selected binary operators. For example:
183 | #
184 | # 1 + 2 * 3 - 4 / 5
185 | #
186 | # will be formatted as follows when configured with "*,/":
187 | #
188 | # 1 + 2*3 - 4/5
189 | no_spaces_around_selected_binary_operators=
190 |
191 | # Use spaces around default or named assigns.
192 | spaces_around_default_or_named_assign=False
193 |
194 | # Adds a space after the opening '{' and before the ending '}' dict
195 | # delimiters.
196 | #
197 | # {1: 2}
198 | #
199 | # will be formatted as:
200 | #
201 | # { 1: 2 }
202 | spaces_around_dict_delimiters=False
203 |
204 | # Adds a space after the opening '[' and before the ending ']' list
205 | # delimiters.
206 | #
207 | # [1, 2]
208 | #
209 | # will be formatted as:
210 | #
211 | # [ 1, 2 ]
212 | spaces_around_list_delimiters=False
213 |
214 | # Use spaces around the power operator.
215 | spaces_around_power_operator=False
216 |
217 | # Use spaces around the subscript / slice operator. For example:
218 | #
219 | # my_list[1 : 10 : 2]
220 | spaces_around_subscript_colon=False
221 |
222 | # Adds a space after the opening '(' and before the ending ')' tuple
223 | # delimiters.
224 | #
225 | # (1, 2, 3)
226 | #
227 | # will be formatted as:
228 | #
229 | # ( 1, 2, 3 )
230 | spaces_around_tuple_delimiters=False
231 |
232 | # The number of spaces required before a trailing comment.
233 | # This can be a single value (representing the number of spaces
234 | # before each trailing comment) or list of values (representing
235 | # alignment column values; trailing comments within a block will
236 | # be aligned to the first column value that is greater than the maximum
237 | # line length within the block). For example:
238 | #
239 | # With spaces_before_comment=5:
240 | #
241 | # 1 + 1 # Adding values
242 | #
243 | # will be formatted as:
244 | #
245 | # 1 + 1 # Adding values <-- 5 spaces between the end of the
246 | # # statement and comment
247 | #
248 | # With spaces_before_comment=15, 20:
249 | #
250 | # 1 + 1 # Adding values
251 | # two + two # More adding
252 | #
253 | # longer_statement # This is a longer statement
254 | # short # This is a shorter statement
255 | #
256 | # a_very_long_statement_that_extends_beyond_the_final_column # Comment
257 | # short # This is a shorter statement
258 | #
259 | # will be formatted as:
260 | #
261 | # 1 + 1 # Adding values <-- end of line comments in block
262 | # # aligned to col 15
263 | # two + two # More adding
264 | #
265 | # longer_statement # This is a longer statement <-- end of line
266 | # # comments in block aligned to col 20
267 | # short # This is a shorter statement
268 | #
269 | # a_very_long_statement_that_extends_beyond_the_final_column # Comment <-- the end of line comments are aligned based on the line length
270 | # short # This is a shorter statement
271 | #
272 | spaces_before_comment=2
273 |
274 | # Insert a space between the ending comma and closing bracket of a list,
275 | # etc.
276 | space_between_ending_comma_and_closing_bracket=True
277 |
278 | # Use spaces inside brackets, braces, and parentheses. For example:
279 | #
280 | # method_call( 1 )
281 | # my_dict[ 3 ][ 1 ][ get_index( *args, **kwargs ) ]
282 | # my_set = { 1, 2, 3 }
283 | space_inside_brackets=False
284 |
285 | # Split before arguments
286 | split_all_comma_separated_values=False
287 |
288 | # Split before arguments, but do not split all subexpressions recursively
289 | # (unless needed).
290 | split_all_top_level_comma_separated_values=False
291 |
292 | # Split before arguments if the argument list is terminated by a
293 | # comma.
294 | # [my note: this only works if disable_ending_comma_heuristic=True]
295 | split_arguments_when_comma_terminated=False
296 |
297 | # Set to True to prefer splitting before '+', '-', '*', '/', '//', or '@'
298 | # rather than after.
299 | split_before_arithmetic_operator=False
300 |
301 | # Set to True to prefer splitting before '&', '|' or '^' rather than
302 | # after.
303 | split_before_bitwise_operator=False
304 |
305 | # Split before the closing bracket if a list or dict literal doesn't fit on
306 | # a single line.
307 | split_before_closing_bracket=True
308 |
309 | # Split before a dictionary or set generator (comp_for). For example, note
310 | # the split before the 'for':
311 | #
312 | # foo = {
313 | # variable: 'Hello world, have a nice day!'
314 | # for variable in bar if variable != 42
315 | # }
316 | split_before_dict_set_generator=False
317 |
318 | # Split before the '.' if we need to split a longer expression:
319 | #
320 | # foo = ('This is a really long string: {}, {}, {}, {}'.format(a, b, c, d))
321 | #
322 | # would reformat to something like:
323 | #
324 | # foo = ('This is a really long string: {}, {}, {}, {}'
325 | # .format(a, b, c, d))
326 | split_before_dot=False
327 |
328 | # Split after the opening paren which surrounds an expression if it doesn't
329 | # fit on a single line.
330 | split_before_expression_after_opening_paren=True
331 |
332 | # If an argument / parameter list is going to be split, then split before
333 | # the first argument.
334 | split_before_first_argument=True
335 |
336 | # Set to True to prefer splitting before 'and' or 'or' rather than
337 | # after.
338 | split_before_logical_operator=False
339 |
340 | # Split named assignments onto individual lines.
341 | split_before_named_assigns=False
342 |
343 | # Set to True to split list comprehensions and generators that have
344 | # non-trivial expressions and multiple clauses before each of these
345 | # clauses. For example:
346 | #
347 | # result = [
348 | # a_long_var + 100 for a_long_var in xrange(1000)
349 | # if a_long_var % 10]
350 | #
351 | # would reformat to something like:
352 | #
353 | # result = [
354 | # a_long_var + 100
355 | # for a_long_var in xrange(1000)
356 | # if a_long_var % 10]
357 | split_complex_comprehension=False
358 |
359 | # The penalty for splitting right after the opening bracket.
360 | split_penalty_after_opening_bracket=300
361 |
362 | # The penalty for splitting the line after a unary operator.
363 | split_penalty_after_unary_operator=10000
364 |
365 | # The penalty of splitting the line around the '+', '-', '*', '/', '//',
366 | # ``%``, and '@' operators.
367 | split_penalty_arithmetic_operator=300
368 |
369 | # The penalty for splitting right before an if expression.
370 | split_penalty_before_if_expr=0
371 |
372 | # The penalty of splitting the line around the '&', '|', and '^'
373 | # operators.
374 | split_penalty_bitwise_operator=300
375 |
376 | # The penalty for splitting a list comprehension or generator
377 | # expression.
378 | split_penalty_comprehension=2100
379 |
380 | # The penalty for characters over the column limit.
381 | split_penalty_excess_character=7000
382 |
383 | # The penalty incurred by adding a line split to the logical line. The
384 | # more line splits added the higher the penalty.
385 | # [my note: prev 30]
386 | split_penalty_for_added_line_split=0
387 |
388 | # The penalty of splitting a list of "import as" names. For example:
389 | #
390 | # from a_very_long_or_indented_module_name_yada_yad import (long_argument_1,
391 | # long_argument_2,
392 | # long_argument_3)
393 | #
394 | # would reformat to something like:
395 | #
396 | # from a_very_long_or_indented_module_name_yada_yad import (
397 | # long_argument_1, long_argument_2, long_argument_3)
398 | split_penalty_import_names=0
399 |
400 | # The penalty of splitting the line around the 'and' and 'or'
401 | # operators.
402 | split_penalty_logical_operator=300
403 |
404 | # Use the Tab character for indentation.
405 | use_tabs=False
406 |
407 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | |Tests| |codecov| |PyPI version| |conda-forge version| |docs| |License|
2 |
3 | ``geedim``
4 | ==========
5 |
6 | .. short_descr_start
7 |
8 | Search, composite, and download `Google Earth Engine `__ imagery, without size limits.
9 |
10 | .. short_descr_end
11 |
12 | .. description_start
13 |
14 | Description
15 | -----------
16 |
17 | ``geedim`` provides a command line interface and API for searching, compositing and downloading satellite imagery
18 | from Google Earth Engine (EE). It optionally performs cloud/shadow masking, and cloud/shadow-free compositing on
19 | supported collections. Images and composites can be downloaded; or exported to Google Drive, Earth Engine asset or
20 | Google Cloud Storage. Images larger than the
21 | `EE size limit `_ are split and downloaded
22 | as separate tiles, then re-assembled into a single GeoTIFF.
23 |
24 | .. description_end
25 |
26 | See the documentation site for more detail: https://geedim.readthedocs.io/.
27 |
28 | .. supp_im_start
29 |
30 | Cloud/shadow support
31 | ~~~~~~~~~~~~~~~~~~~~
32 |
33 | Any EE imagery can be searched, composited and downloaded by ``geedim``. Cloud/shadow masking, and cloud/shadow-free
34 | compositing are supported on the following collections:
35 |
36 | .. supp_im_end
37 |
38 | +------------------------------------------+-------------------------------------------------------+
39 | | EE name | Description |
40 | +==========================================+=======================================================+
41 | | `LANDSAT/LT04/C02/T1_L2 | Landsat 4, collection 2, tier 1, level 2 surface |
42 | | `_ | |
45 | +------------------------------------------+-------------------------------------------------------+
46 | | `LANDSAT/LT05/C02/T1_L2 | Landsat 5, collection 2, tier 1, level 2 surface |
47 | | `_ | |
50 | +------------------------------------------+-------------------------------------------------------+
51 | | `LANDSAT/LE07/C02/T1_L2 | Landsat 7, collection 2, tier 1, level 2 surface |
52 | | `_ | |
55 | +------------------------------------------+-------------------------------------------------------+
56 | | `LANDSAT/LC08/C02/T1_L2 | Landsat 8, collection 2, tier 1, level 2 surface |
57 | | `_ | |
60 | +------------------------------------------+-------------------------------------------------------+
61 | | `LANDSAT/LC09/C02/T1_L2 | Landsat 9, collection 2, tier 1, level 2 surface |
62 | | `_ | |
65 | +------------------------------------------+-------------------------------------------------------+
66 | | `COPERNICUS/S2 | Sentinel-2, level 1C, top of atmosphere reflectance. |
67 | | `_ | |
69 | +------------------------------------------+-------------------------------------------------------+
70 | | `COPERNICUS/S2_SR | Sentinel-2, level 2A, surface reflectance. |
71 | | `_ | |
73 | +------------------------------------------+-------------------------------------------------------+
74 | | `COPERNICUS/S2_HARMONIZED | Harmonised Sentinel-2, level 1C, top of atmosphere |
75 | | `_ | |
78 | +------------------------------------------+-------------------------------------------------------+
79 | | `COPERNICUS/S2_SR_HARMONIZED | Harmonised Sentinel-2, level 2A, surface reflectance. |
80 | | `_ | |
83 | +------------------------------------------+-------------------------------------------------------+
84 |
85 | .. install_start
86 |
87 | Installation
88 | ------------
89 |
90 | ``geedim`` is a python 3 package, and requires users to be registered with `Google Earth
91 | Engine `__.
92 |
93 | It can be installed with `pip `_ or `conda `_.
94 |
95 | pip
96 | ~~~
97 |
98 | .. code:: shell
99 |
100 | pip install geedim
101 |
102 | conda
103 | ~~~~~
104 |
105 | .. code:: shell
106 |
107 | conda install -c conda-forge geedim
108 |
109 | Authentication
110 | ~~~~~~~~~~~~~~
111 |
112 | Following installation, Earth Engine should be authenticated:
113 |
114 | .. code:: shell
115 |
116 | earthengine authenticate
117 |
118 | .. install_end
119 |
120 | Getting started
121 | ---------------
122 |
123 | Command line interface
124 | ~~~~~~~~~~~~~~~~~~~~~~
125 |
126 | .. cli_start
127 |
128 | ``geedim`` command line functionality is accessed through the commands:
129 |
130 | - ``search``: Search for images.
131 | - ``composite``: Create a composite image.
132 | - ``download``: Download image(s).
133 | - ``export``: Export image(s).
134 | - ``config``: Configure cloud/shadow masking.
135 |
136 | Get help on ``geedim`` with:
137 |
138 | .. code:: shell
139 |
140 | geedim --help
141 |
142 | and help on a ``geedim`` command with:
143 |
144 | .. code:: shell
145 |
146 | geedim --help
147 |
148 | Examples
149 | ^^^^^^^^
150 |
151 | Search for Landsat-8 images, reporting cloudless portions.
152 |
153 | .. code:: shell
154 |
155 | geedim search -c l8-c2-l2 -s 2021-06-01 -e 2021-07-01 --bbox 24 -33 24.1 -33.1 --cloudless-portion
156 |
157 | Download a Landsat-8 image with cloud/shadow mask applied.
158 |
159 | .. code:: shell
160 |
161 | geedim download -i LANDSAT/LC08/C02/T1_L2/LC08_172083_20210610 --bbox 24 -33 24.1 -33.1 --mask
162 |
163 | Command pipelines
164 | ~~~~~~~~~~~~~~~~~
165 |
166 | Multiple ``geedim`` commands can be chained together in a pipeline where image results from the previous command form
167 | inputs to the current command. For example, if the ``composite`` command is chained with ``download`` command, the
168 | created composite image will be downloaded, or if the ``search`` command is chained with the ``composite`` command, the
169 | search result images will be composited.
170 |
171 | Common command options are also piped between chained commands. For example, if the ``config`` command is chained with
172 | other commands, the configuration specified with ``config`` will be applied to subsequent commands in the pipeline. Many
173 | command combinations are possible.
174 |
175 | .. _examples-1:
176 |
177 | Examples
178 | ^^^^^^^^
179 |
180 | Composite two Landsat-7 images and download the result:
181 |
182 | .. code:: shell
183 |
184 | geedim composite -i LANDSAT/LE07/C02/T1_L2/LE07_173083_20100203 -i LANDSAT/LE07/C02/T1_L2/LE07_173083_20100219 download --bbox 22 -33.1 22.1 -33 --crs EPSG:3857 --scale 30
185 |
186 | Composite the results of a Landsat-8 search and download the result.
187 |
188 | .. code:: shell
189 |
190 | geedim search -c l8-c2-l2 -s 2019-02-01 -e 2019-03-01 --bbox 23 -33 23.2 -33.2 composite -cm q-mosaic download --scale 30 --crs EPSG:3857
191 |
192 | Composite the results of a Landsat-8 search, export to Earth Engine asset, and download the asset image.
193 |
194 | .. code:: shell
195 |
196 | geedim search -c l8-c2-l2 -s 2019-02-01 -e 2019-03-01 --bbox 23 -33 23.2 -33.2 composite -cm q-mosaic export --type asset --folder --scale 30 --crs EPSG:3857 download
197 |
198 | Search for Sentinel-2 SR images with a cloudless portion of at least 60%, using the ``cloud-score`` mask-method to identify clouds:
199 |
200 | .. code:: shell
201 |
202 | geedim config --mask-method cloud-score search -c s2-sr-hm --cloudless-portion 60 -s 2022-01-01 -e 2022-01-14 --bbox 24 -34 24.5 -33.5
203 |
204 | .. cli_end
205 |
206 | API
207 | ~~~
208 |
209 | Example
210 | ^^^^^^^
211 |
212 | .. code:: python
213 |
214 | import geedim as gd
215 |
216 | gd.Initialize() # initialise earth engine
217 |
218 | # geojson polygon to search / download
219 | region = {
220 | "type": "Polygon",
221 | "coordinates": [[[24, -33.6], [24, -33.53], [23.93, -33.53], [23.93, -33.6], [24, -33.6]]]
222 | }
223 |
224 | # make collection and search, reporting cloudless portions
225 | coll = gd.MaskedCollection.from_name('COPERNICUS/S2_SR_HARMONIZED')
226 | coll = coll.search('2019-01-10', '2019-01-21', region, cloudless_portion=0)
227 | print(coll.schema_table)
228 | print(coll.properties_table)
229 |
230 | # create and download an image
231 | im = gd.MaskedImage.from_id('COPERNICUS/S2_SR_HARMONIZED/20190115T080251_20190115T082230_T35HKC')
232 | im.download('s2_image.tif', region=region)
233 |
234 | # composite search results and download
235 | comp_im = coll.composite()
236 | comp_im.download('s2_comp_image.tif', region=region, crs='EPSG:32735', scale=10)
237 |
238 | License
239 | -------
240 |
241 | This project is licensed under the terms of the `Apache-2.0 License `__.
242 |
243 | Contributing
244 | ------------
245 |
246 | See the `documentation `__ for details.
247 |
248 | Credits
249 | -------
250 |
251 | - Tiled downloading was inspired by the work in `GEES2Downloader `__ under
252 | terms of the `MIT license `__.
253 | - Medoid compositing was adapted from `gee_tools `__ under the terms of the
254 | `MIT license `__.
255 | - Sentinel-2 cloud/shadow masking was adapted from `ee_extra `__ under
256 | terms of the `Apache-2.0 license `__
257 |
258 |
259 | .. |Tests| image:: https://github.com/leftfield-geospatial/geedim/actions/workflows/run-unit-tests.yml/badge.svg
260 | :target: https://github.com/leftfield-geospatial/geedim/actions/workflows/run-unit-tests.yml
261 | .. |codecov| image:: https://codecov.io/gh/leftfield-geospatial/geedim/branch/main/graph/badge.svg?token=69GZNQ3TI3
262 | :target: https://codecov.io/gh/leftfield-geospatial/geedim
263 | .. |PyPI version| image:: https://img.shields.io/pypi/v/geedim.svg
264 | :target: https://pypi.org/project/geedim/
265 | .. |conda-forge version| image:: https://img.shields.io/conda/vn/conda-forge/geedim.svg
266 | :alt: conda-forge
267 | :target: https://anaconda.org/conda-forge/geedim
268 | .. |docs| image:: https://readthedocs.org/projects/geedim/badge/?version=latest
269 | :target: https://geedim.readthedocs.io/en/latest/?badge=latest
270 | .. |License| image:: https://img.shields.io/badge/License-Apache%202.0-blue.svg
271 | :target: https://opensource.org/licenses/Apache-2.0
272 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/api.rst:
--------------------------------------------------------------------------------
1 | API
2 | ===
3 |
4 | Getting started
5 | ---------------
6 |
7 | This section gives a quick overview of the API. You can also take a look at the :ref:`tutorials `.
8 |
9 | Initialisation
10 | ^^^^^^^^^^^^^^
11 |
12 | :meth:`~geedim.utils.Initialize` provides a shortcut for initialising the Earth Engine API.
13 |
14 | .. literalinclude:: examples/api_getting_started.py
15 | :language: python
16 | :start-after: [initialise-start]
17 | :end-before: [initialise-end]
18 |
19 |
20 | Searching image collections
21 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
22 |
23 | `Any Earth Engine image collection `_ can be searched with :class:`~geedim.collection.MaskedCollection`. Here, we search for `Landsat-8 surface reflectance `_ images over Stellenbosch, South Africa.
24 |
25 | .. literalinclude:: examples/api_getting_started.py
26 | :language: python
27 | :start-after: [search-start]
28 | :end-before: [search-end]
29 |
30 | The output:
31 |
32 | .. _search results:
33 |
34 | .. code:: shell
35 |
36 | ABBREV DESCRIPTION
37 | --------- ---------------------------------------
38 | ID Earth Engine image id
39 | DATE Image capture date/time (UTC)
40 | FILL Portion of region pixels that are valid (%)
41 | CLOUDLESS Portion of filled pixels that are cloud/shadow free (%)
42 | GRMSE Orthorectification RMSE (m)
43 | SAA Solar azimuth angle (deg)
44 | SEA Solar elevation angle (deg)
45 |
46 | ID DATE FILL CLOUDLESS GRMSE SAA SEA
47 | ------------------------------------------- ---------------- ----- --------- ----- ----- -----
48 | LANDSAT/LC08/C02/T1_L2/LC08_175083_20190101 2019-01-01 08:35 99.83 55.62 8.48 79.36 59.20
49 | LANDSAT/LC08/C02/T1_L2/LC08_175084_20190101 2019-01-01 08:35 99.79 60.35 9.71 77.28 58.67
50 | LANDSAT/LC08/C02/T1_L2/LC08_175083_20190117 2019-01-17 08:35 99.98 94.90 8.84 76.98 56.79
51 | LANDSAT/LC08/C02/T1_L2/LC08_175084_20190117 2019-01-17 08:35 99.97 95.07 9.75 75.13 56.21
52 | LANDSAT/LC08/C02/T1_L2/LC08_175083_20190202 2019-02-02 08:34 99.91 95.82 8.46 71.91 54.00
53 | LANDSAT/LC08/C02/T1_L2/LC08_175084_20190202 2019-02-02 08:35 99.87 95.21 9.21 70.34 53.30
54 |
55 | Image creation and download
56 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
57 |
58 | Images can be created, masked and downloaded with the :class:`~geedim.mask.MaskedImage` class. Typically, one would pass the Earth Engine image ID to :meth:`.MaskedImage.from_id` to create the image.
59 |
60 | .. literalinclude:: examples/api_getting_started.py
61 | :language: python
62 | :start-after: [image-download-start]
63 | :end-before: [image-download-end]
64 |
65 | Compositing
66 | ^^^^^^^^^^^
67 |
68 | Let's form a cloud/shadow-free composite of the search result images, using the *q-mosaic* method, then download the result. By specifying the ``region`` parameter to :meth:`.MaskedCollection.composite`, we prioritise selection of pixels from the least cloudy images when forming the composite.
69 |
70 | .. note::
71 | When downloading composite images, the ``region``, ``crs`` and ``scale`` parameters must be specified, as the image has no fixed (known) projection.
72 |
73 | .. literalinclude:: examples/api_getting_started.py
74 | :language: python
75 | :start-after: [composite-start]
76 | :end-before: [composite-end]
77 |
78 |
79 | Cloud/shadow masking
80 | ^^^^^^^^^^^^^^^^^^^^
81 |
82 | All :class:`~geedim.mask.MaskedImage` and :class:`~geedim.collection.MaskedCollection` methods that involve cloud/shadow
83 | masking (:meth:`.MaskedImage.from_id`, :meth:`.MaskedCollection.search`, and :meth:`.MaskedCollection.composite`)
84 | take optional cloud/shadow masking ``**kwargs``. See :meth:`.MaskedImage.__init__` for a description of these
85 | parameters.
86 |
87 | Here, we create and download a cloud/shadow masked
88 | `Sentinel-2 image `_, specifying a cloud score threshold of 0.7.
89 |
90 | .. literalinclude:: examples/api_getting_started.py
91 | :language: python
92 | :start-after: [mask-start]
93 | :end-before: [mask-end]
94 |
95 |
96 | Image metadata
97 | ^^^^^^^^^^^^^^
98 |
99 | ``geedim`` populates downloaded files with metadata from the source Earth Engine image, and the associated STAC entry.
100 | The next code snippet uses `rasterio `_ to read the metadata of the downloaded
101 | Sentinel-2 image.
102 |
103 | .. literalinclude:: examples/api_getting_started.py
104 | :language: python
105 | :start-after: [metadata-start]
106 | :end-before: [metadata-end]
107 |
108 | Output:
109 |
110 | .. code:: shell
111 |
112 | Image properties:
113 | {'AOT_RETRIEVAL_ACCURACY': '0', 'AREA_OR_POINT': 'Area', 'CLOUDY_PIXEL_PERCENTAGE': '4.228252', 'CLOUD_COVERAGE_ASSESSMENT': '4.228252', 'CLOUD_SHADOW_PERCENTAGE': '0.353758', 'DARK_FEATURES_PERCENTAGE': '1.390344', 'DATASTRIP_ID': 'S2A_OPER_MSI_L2A_DS_MTI__20190101T112242_S20190101T084846_N02.11', 'DATATAKE_IDENTIFIER': 'GS2A_20190101T082331_018422_N02.11', 'DATATAKE_TYPE': 'INS-NOBS', 'DEGRADED_MSI_DATA_PERCENTAGE': '0', 'FORMAT_CORRECTNESS': 'PASSED', 'GENERAL_QUALITY': 'PASSED', 'GENERATION_TIME': '1546341762000', 'GEOMETRIC_QUALITY': 'PASSED', 'GRANULE_ID': 'L2A_T34HCH_A018422_20190101T084846', 'HIGH_PROBA_CLOUDS_PERCENTAGE': '1.860266', 'LICENSE': 'https://developers.google.com/earth-engine/datasets/catalog/COPERNICUS_S2_SR#terms-of-use', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B1': '197.927117994', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B10': '200.473257333', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B11': '199.510962726', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B12': '198.925728126', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B2': '204.233801024', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B3': '201.623624653', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B4': '200.124411228', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B5': '199.531415295', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B6': '199.06932777', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B7': '198.686746475', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B8': '202.762429499', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B8A': '198.389627535', 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B9': '197.722038042', 'MEAN_INCIDENCE_ZENITH_ANGLE_B1': '3.18832352559', 'MEAN_INCIDENCE_ZENITH_ANGLE_B10': '2.74480253282', 'MEAN_INCIDENCE_ZENITH_ANGLE_B11': '2.9240632909', 'MEAN_INCIDENCE_ZENITH_ANGLE_B12': '3.12457836272', 'MEAN_INCIDENCE_ZENITH_ANGLE_B2': '2.56258512587', 'MEAN_INCIDENCE_ZENITH_ANGLE_B3': '2.66821142821', 'MEAN_INCIDENCE_ZENITH_ANGLE_B4': '2.78791939543', 'MEAN_INCIDENCE_ZENITH_ANGLE_B5': '2.86049380258', 'MEAN_INCIDENCE_ZENITH_ANGLE_B6': '2.93757718579', 'MEAN_INCIDENCE_ZENITH_ANGLE_B7': '3.01912758709', 'MEAN_INCIDENCE_ZENITH_ANGLE_B8': '2.61179829178', 'MEAN_INCIDENCE_ZENITH_ANGLE_B8A': '3.10418395274', 'MEAN_INCIDENCE_ZENITH_ANGLE_B9': '3.28253454154', 'MEAN_SOLAR_AZIMUTH_ANGLE': '74.331216318', 'MEAN_SOLAR_ZENITH_ANGLE': '27.589988524', 'MEDIUM_PROBA_CLOUDS_PERCENTAGE': '0.774948', 'MGRS_TILE': '34HCH', 'NODATA_PIXEL_PERCENTAGE': '2.7e-05', 'NOT_VEGETATED_PERCENTAGE': '72.305781', 'PROCESSING_BASELINE': '02.11', 'PRODUCT_ID': 'S2A_MSIL2A_20190101T082331_N0211_R121_T34HCH_20190101T112242', 'RADIATIVE_TRANSFER_ACCURACY': '0', 'RADIOMETRIC_QUALITY': 'PASSED', 'REFLECTANCE_CONVERSION_CORRECTION': '1.03413456106', 'SATURATED_DEFECTIVE_PIXEL_PERCENTAGE': '0', 'SENSING_ORBIT_DIRECTION': 'DESCENDING', 'SENSING_ORBIT_NUMBER': '121', 'SENSOR_QUALITY': 'PASSED', 'SNOW_ICE_PERCENTAGE': '0.0156', 'SOLAR_IRRADIANCE_B1': '1884.69', 'SOLAR_IRRADIANCE_B10': '367.15', 'SOLAR_IRRADIANCE_B11': '245.59', 'SOLAR_IRRADIANCE_B12': '85.25', 'SOLAR_IRRADIANCE_B2': '1959.72', 'SOLAR_IRRADIANCE_B3': '1823.24', 'SOLAR_IRRADIANCE_B4': '1512.06', 'SOLAR_IRRADIANCE_B5': '1424.64', 'SOLAR_IRRADIANCE_B6': '1287.61', 'SOLAR_IRRADIANCE_B7': '1162.08', 'SOLAR_IRRADIANCE_B8': '1041.63', 'SOLAR_IRRADIANCE_B8A': '955.32', 'SOLAR_IRRADIANCE_B9': '812.92', 'SPACECRAFT_NAME': 'Sentinel-2A', 'system-asset_size': '1820790758', 'system-index': '20190101T082331_20190101T084846_T34HCH', 'system-time_end': '1546332584000', 'system-time_start': '1546332584000', 'THIN_CIRRUS_PERCENTAGE': '1.593038', 'UNCLASSIFIED_PERCENTAGE': '1.202621', 'VEGETATION_PERCENTAGE': '18.241563', 'WATER_PERCENTAGE': '2.262083', 'WATER_VAPOUR_RETRIEVAL_ACCURACY': '0'}
114 | Band names:
115 | ('B1', 'B2', 'B3', 'B4', 'B5', 'B6', 'B7', 'B8', 'B8A', 'B9', 'B11', 'B12', 'AOT', 'WVP', 'SCL', 'TCI_R', 'TCI_G', 'TCI_B', 'MSK_CLDPRB', 'MSK_SNWPRB', 'QA10', 'QA20', 'QA60', 'FILL_MASK', 'CLOUD_MASK', 'CLOUDLESS_MASK', 'SHADOW_MASK', 'CLOUD_PROB', 'CLOUD_DIST')
116 | Band 1 properties:
117 | {'center_wavelength': '0.4439', 'description': 'Aerosols', 'gsd': '60', 'name': 'B1', 'scale': '0.0001', 'wavelength': '443.9nm (S2A) / 442.3nm (S2B)'}
118 |
119 |
120 | Computed images and user memory
121 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
122 |
123 | Earth engine has a size limit of 32 MB on `download requests `_. ``geedim`` avoids exceeding this by tiling downloads. However, Earth engine also has a `limit on user memory `_ for image computations. This limit can be exceeded when downloading large computed images (such as custom user images or ``geedim`` generated composites), raising a *user memory limit exceeded* error. Unfortunately, there is no way for ``geedim`` to adjust tiles to avoid exceeding this limit, as the memory requirements of a computation are not known in advance. The user has two options for working around this error:
124 |
125 | 1) max_tile_size
126 | ~~~~~~~~~~~~~~~~
127 |
128 | Decreasing the ``max_tile_size`` argument to :meth:`geedim.mask.MaskedImage.download` reduces the user memory required by computations. The user would need to experiment to find a reduced value that solves any memory limit problem. :option:`--max-tile-size ` is the equivalent option on the command line.
129 |
130 | .. literalinclude:: examples/api_getting_started.py
131 | :language: python
132 | :start-after: [max_tile_size-start]
133 | :end-before: [max_tile_size-end]
134 |
135 | 2) Exporting
136 | ~~~~~~~~~~~~~
137 |
138 | Exporting the image to an Earth Engine asset, and then downloading. Exporting images is not subject to the user memory limit, and once exported, computation on the asset image is complete. The exported asset image can then be downloaded in the standard way.
139 |
140 | .. literalinclude:: examples/api_getting_started.py
141 | :language: python
142 | :start-after: [export-asset-download-start]
143 | :end-before: [export-asset-download-end]
144 |
145 |
146 | Reference
147 | ---------
148 |
149 | MaskedImage
150 | ^^^^^^^^^^^
151 |
152 | .. currentmodule:: geedim.mask
153 |
154 | .. autoclass:: MaskedImage
155 | :special-members: __init__
156 |
157 | .. rubric:: Methods
158 |
159 | .. autosummary::
160 | :toctree: _generated
161 |
162 | ~MaskedImage.from_id
163 | ~MaskedImage.mask_clouds
164 | ~MaskedImage.download
165 | ~MaskedImage.export
166 | ~MaskedImage.monitor_export
167 |
168 |
169 | .. rubric:: Attributes
170 |
171 | .. autosummary::
172 | :toctree: _generated
173 |
174 | ~MaskedImage.ee_image
175 | ~MaskedImage.id
176 | ~MaskedImage.date
177 | ~MaskedImage.crs
178 | ~MaskedImage.scale
179 | ~MaskedImage.footprint
180 | ~MaskedImage.transform
181 | ~MaskedImage.shape
182 | ~MaskedImage.count
183 | ~MaskedImage.dtype
184 | ~MaskedImage.size
185 | ~MaskedImage.has_fixed_projection
186 | ~MaskedImage.name
187 | ~MaskedImage.properties
188 | ~MaskedImage.band_properties
189 |
190 |
191 | MaskedCollection
192 | ^^^^^^^^^^^^^^^^
193 |
194 | .. currentmodule:: geedim.collection
195 |
196 | .. autoclass:: MaskedCollection
197 | :special-members: __init__
198 |
199 |
200 | .. rubric:: Methods
201 |
202 | .. autosummary::
203 | :toctree: _generated/
204 |
205 | ~MaskedCollection.from_name
206 | ~MaskedCollection.from_list
207 | ~MaskedCollection.search
208 | ~MaskedCollection.composite
209 |
210 |
211 | .. rubric:: Attributes
212 |
213 | .. autosummary::
214 | :toctree: _generated/
215 |
216 | ~MaskedCollection.ee_collection
217 | ~MaskedCollection.name
218 | ~MaskedCollection.image_type
219 | ~MaskedCollection.properties
220 | ~MaskedCollection.properties_table
221 | ~MaskedCollection.schema
222 | ~MaskedCollection.schema_table
223 | ~MaskedCollection.refl_bands
224 |
225 |
226 | enums
227 | ^^^^^
228 |
229 | CompositeMethod
230 | ~~~~~~~~~~~~~~~
231 |
232 | .. currentmodule:: geedim.enums
233 |
234 | .. autoclass:: CompositeMethod
235 | :members:
236 |
237 | CloudMaskMethod
238 | ~~~~~~~~~~~~~~~
239 |
240 | .. currentmodule:: geedim.enums
241 |
242 | .. autoclass:: CloudMaskMethod
243 | :members:
244 |
245 | ResamplingMethod
246 | ~~~~~~~~~~~~~~~~~
247 |
248 | .. currentmodule:: geedim.enums
249 |
250 | .. autoclass:: ResamplingMethod
251 | :members:
252 |
253 | ExportType
254 | ~~~~~~~~~~
255 |
256 | .. currentmodule:: geedim.enums
257 |
258 | .. autoclass:: ExportType
259 | :members:
260 |
261 |
262 | Initialize
263 | ^^^^^^^^^^
264 |
265 | .. currentmodule:: geedim.utils
266 |
267 | .. autofunction:: Initialize
268 |
269 |
270 | .. _tutorials:
271 |
272 | Tutorials
273 | ---------
274 |
275 | .. toctree::
276 | :maxdepth: 1
277 |
278 | examples/l7_composite.ipynb
279 | examples/s2_composite.ipynb
280 |
--------------------------------------------------------------------------------
/docs/cli.rst:
--------------------------------------------------------------------------------
1 | Command line interface
2 | ----------------------
3 |
4 | Getting started
5 | ~~~~~~~~~~~~~~~
6 |
7 | .. include:: ../README.rst
8 | :start-after: cli_start
9 | :end-before: cli_end
10 |
11 | Usage
12 | ~~~~~
13 |
14 | .. click:: geedim.cli:cli
15 | :prog: geedim
16 |
17 | .. click:: geedim.cli:search
18 | :prog: geedim search
19 |
20 | .. click:: geedim.cli:download
21 | :prog: geedim download
22 |
23 | .. click:: geedim.cli:export
24 | :prog: geedim export
25 |
26 | .. click:: geedim.cli:composite
27 | :prog: geedim composite
28 |
29 | .. click:: geedim.cli:config
30 | :prog: geedim config
31 |
--------------------------------------------------------------------------------
/docs/cloud_coll_rtd.rst:
--------------------------------------------------------------------------------
1 | +------------------------------------------+-------------------------------------------------------+
2 | | EE name | Description |
3 | +==========================================+=======================================================+
4 | | `LANDSAT/LT04/C02/T1_L2 | Landsat 4, collection 2, tier 1, level 2 surface |
5 | | `_ | |
8 | +------------------------------------------+-------------------------------------------------------+
9 | | `LANDSAT/LT05/C02/T1_L2 | Landsat 5, collection 2, tier 1, level 2 surface |
10 | | `_ | |
13 | +------------------------------------------+-------------------------------------------------------+
14 | | `LANDSAT/LE07/C02/T1_L2 | Landsat 7, collection 2, tier 1, level 2 surface |
15 | | `_ | |
18 | +------------------------------------------+-------------------------------------------------------+
19 | | `LANDSAT/LC08/C02/T1_L2 | Landsat 8, collection 2, tier 1, level 2 surface |
20 | | `_ | |
23 | +------------------------------------------+-------------------------------------------------------+
24 | | `LANDSAT/LC09/C02/T1_L2 | Landsat 9, collection 2, tier 1, level 2 surface |
25 | | `_ | |
28 | +------------------------------------------+-------------------------------------------------------+
29 | | `COPERNICUS/S2 | Sentinel-2, level 1C, top of atmosphere reflectance. |
30 | | `_ | |
32 | +------------------------------------------+-------------------------------------------------------+
33 | | `COPERNICUS/S2_SR | Sentinel-2, level 2A, surface reflectance. |
34 | | `_ | |
36 | +------------------------------------------+-------------------------------------------------------+
37 | | `COPERNICUS/S2_HARMONIZED | Harmonised Sentinel-2, level 1C, top of atmosphere |
38 | | `_ | |
41 | +------------------------------------------+-------------------------------------------------------+
42 | | `COPERNICUS/S2_SR_HARMONIZED | Harmonised Sentinel-2, level 2A, surface reflectance. |
43 | | `_ | |
46 | +------------------------------------------+-------------------------------------------------------+
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 | sys.path.insert(0, os.path.abspath('..'))
16 | from geedim.version import __version__
17 | from geedim.schema import cloud_coll_table
18 |
19 |
20 | # -- Project information -----------------------------------------------------
21 |
22 | project = 'geedim'
23 | copyright = '2022-2023, Leftfield Geospatial'
24 | author = 'Leftfield Geospatial'
25 |
26 | # The full version, including alpha/beta/rc tags
27 | release = __version__
28 |
29 |
30 | # -- General configuration ---------------------------------------------------
31 |
32 | # Add any Sphinx extension module names here, as strings. They can be
33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
34 | # ones.
35 | extensions = [
36 | 'sphinx_click',
37 | 'sphinx.ext.autodoc',
38 | 'sphinx.ext.autosummary',
39 | 'sphinx.ext.napoleon',
40 | 'jupyter_sphinx',
41 | 'nbsphinx'
42 | ] # yapf: disable
43 |
44 | # Add any paths that contain templates here, relative to this directory.
45 | templates_path = ['_templates']
46 |
47 | # List of patterns, relative to source directory, that match files and
48 | # directories to ignore when looking for source files.
49 | # This pattern also affects html_static_path and html_extra_path.
50 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
51 |
52 |
53 | # -- Options for HTML output -------------------------------------------------
54 |
55 | # The theme to use for HTML and HTML Help pages. See the documentation for
56 | # a list of builtin themes.
57 | #
58 | html_theme = 'sphinx_rtd_theme'
59 |
60 | # Add any paths that contain custom static files (such as style sheets) here,
61 | # relative to this directory. They are copied after the builtin static files,
62 | # so a file named "default.css" will overwrite the builtin "default.css".
63 | html_static_path = ['_static']
64 |
65 | # -- Options for autodoc -------------------------------------------------
66 | # see https://www.sphinx-doc.org/en/master/usage/extensions/autodoc.html
67 | # autodoc_mock_imports = ['rasterio', 'click']
68 | autosummary_generate = True
69 | autoclass_content = 'class'
70 | autodoc_class_signature = 'separated'
71 | autodoc_member_order = 'bysource'
72 | autodoc_typehints = 'both'
73 | # autodoc_typehints_format = 'short'
74 |
75 | # -- Options for nbsphinx --------------------------------------------------
76 | # env.docname will be e.g. examples/l7_composite.ipynb. The `../` is to
77 | # reference it from itself. preferable to link to actual version of the file
78 | # at the time of the doc build, than a hyperlink to github.
79 | # see https://github.com/aazuspan/wxee/blob/main/docs/conf.py for other examples
80 | nbsphinx_prolog = """
81 | .. note::
82 |
83 | This page was generated from a Jupyter notebook. To run and interact with it,
84 | you can download it :download:`here <../{{ env.docname }}.ipynb>`.
85 | """
86 | nbsphinx_widgets_path=''
87 | nbsphinx_requirejs_path=''
88 |
89 | # -- Generate cloud/shadow supported collection tables for github README and RTD
90 | # docs
91 | with open('cloud_coll_rtd.rst', 'w') as f:
92 | f.write(cloud_coll_table(descr_join='\n\n'))
93 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | Contributing
2 | ============
3 |
4 | Contributions are welcome. Please report bugs and make feature requests with the github `issue tracker
5 | `_.
6 |
7 | Development environment
8 | -----------------------
9 |
10 | ``geedim`` uses the `rasterio `_ package, which has binary dependencies. Under
11 | Windows, it is easiest to resolve these dependencies by working in a ``conda`` environment. You can set this up with:
12 |
13 | .. code:: shell
14 |
15 | conda create -n python=3.9 -c conda-forge
16 | conda activate
17 | conda install -c conda-forge earthengine-api rasterio click requests tqdm tabulate pytest
18 |
19 | If you are using Linux, or macOS, you may want to create a clean virtual python environment. Once the environment is
20 | set up, create a fork of the ``geedim`` github repository, and clone it:
21 |
22 | .. code:: shell
23 |
24 | git clone https://github.com//geedim.git
25 |
26 | Finally, install the local ``geedim`` package into your python environment:
27 |
28 | .. code:: shell
29 |
30 | pip install -e geedim
31 |
32 |
33 | Development guide
34 | -----------------
35 |
36 | Cloud/shadow masking
37 | ^^^^^^^^^^^^^^^^^^^^
38 |
39 | If you want to add cloud/shadow masking support for a new Earth Engine image collection, you should subclass
40 | ``geedim.mask.CloudMaskedImage``, and implement at least the ``_aux_image()`` method. Then add a new entry to
41 | ``geedim.schema.collection_schema``.
42 |
43 | Testing
44 | ^^^^^^^
45 |
46 | Please include `pytest `__ tests with your code. The existing tests require the user
47 | to be registered with `Google Earth Engine `__. Installing the `pytest-xdist
48 | `_ plugin will help speed the testing process. For ``conda`` users:
49 |
50 | .. code:: shell
51 |
52 | conda install -c conda-forge pytest-xdist
53 |
54 | Or, using ``pip``:
55 |
56 | .. code:: shell
57 |
58 | pip install pytest-xdist
59 |
60 | You can then run the tests from the root of the ``geedim`` repository with:
61 |
62 | .. code:: shell
63 |
64 | pytest -v -n auto tests
65 |
66 | Style
67 | ^^^^^
68 |
69 | Please include `NumPy docstrings `_ with
70 | your code. Try to conform to the ``geedim`` code style. You can auto-format with
71 | `yapf `__ and the included
72 | `.style.yapf `__ configuration file:
73 |
74 | .. code::
75 |
76 | yapf --style .style.yapf -i
77 |
78 |
--------------------------------------------------------------------------------
/docs/description.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 | :start-after: description_start
3 | :end-before: description_end
4 |
5 | .. include:: ../README.rst
6 | :start-after: supp_im_start
7 | :end-before: supp_im_end
8 |
9 | .. include:: cloud_coll_rtd.rst
10 |
--------------------------------------------------------------------------------
/docs/examples/api_getting_started.py:
--------------------------------------------------------------------------------
1 | # [initialise-start]
2 | import geedim as gd
3 |
4 | gd.Initialize()
5 | # [initialise-end]
6 |
7 | # [search-start]
8 | # geojson search polygon
9 | region = {
10 | 'type': 'Polygon',
11 | 'coordinates': [[(19, -34), (19, -33.8), (18.8, -33.8), (18.8, -34), (19.0, -34)]],
12 | }
13 |
14 | # create & search a landsat-8 collection, reporting cloudless portions
15 | coll = gd.MaskedCollection.from_name('LANDSAT/LC08/C02/T1_L2')
16 | filt_coll = coll.search('2019-01-01', '2019-02-15', region, cloudless_portion=0)
17 |
18 | # display the search results
19 | print(filt_coll.schema_table)
20 | print(filt_coll.properties_table)
21 | # [search-end]
22 |
23 | # [image-download-start]
24 | # create a landsat-8 image from its ID
25 | im = gd.MaskedImage.from_id('LANDSAT/LC08/C02/T1_L2/LC08_175083_20190117', mask=False)
26 |
27 | # download a region of the image with 'average' resampling to 60m pixels, and
28 | # data type conversion to 16 bit unsigned int
29 | im.download('landsat_8_image.tif', region=region, resampling='average', scale=60, dtype='uint16')
30 | # [image-download-end]
31 |
32 | # [composite-start]
33 | # find a 'q-mosaic' composite image of the search result images, prioritising
34 | # the least cloudy image by specifying `region`
35 | comp_im = filt_coll.composite(method='q-mosaic', region=region)
36 | # download the composite, specifying crs, region, and scale
37 | comp_im.download('landsat_8_comp_image.tif', region=region, crs='EPSG:32634', scale=30)
38 | # [composite-end]
39 |
40 | # [mask-start]
41 | # create a cloud/shadow masked Sentinel-2 image, specifying a cloud
42 | # score threshold of 0.7
43 | im = gd.MaskedImage.from_id(
44 | 'COPERNICUS/S2_SR_HARMONIZED/20190101T082331_20190101T084846_T34HCH', mask=True, score=0.7
45 | )
46 | # download a region of the masked image, downsampling to 20m pixels
47 | im.download('s2_sr_hm_image.tif', region=region, scale=20, resampling='average')
48 | # [mask-end]
49 |
50 | # [metadata-start]
51 | import rasterio as rio
52 |
53 | with rio.open('s2_sr_image.tif', 'r') as ds:
54 | print('Image properties:\n', ds.tags())
55 | print('Band names:\n', ds.descriptions)
56 | print('Band 1 properties:\n', ds.tags(1))
57 | # [metadata-end]
58 |
59 | # [max_tile_size-start]
60 | import ee
61 |
62 | # create a computed ee.Image
63 | ee_im = ee.Image('COPERNICUS/S2_SR_HARMONIZED/20190101T082331_20190101T084846_T34HCH')
64 | comp_im = ee_im.select('B3').entropy(ee.Kernel.square(5))
65 |
66 | # encapsulate in MaskedImage, and download with max_tile_size=8
67 | im = gd.MaskedImage(comp_im)
68 | im.download('s2_entropy.tif', region=region, max_tile_size=8)
69 | # [max_tile_size-end]
70 |
71 | # [export-asset-download-start]
72 | # create EE asset ID & export computed image to asset
73 | asset_id = f'projects//assets/s2_entropy'
74 | _ = im.export(asset_id, type='asset', region=region, wait=True)
75 |
76 | # create and download the asset image
77 | im = gd.MaskedImage.from_id(asset_id)
78 | im.download('s2_entropy.tif')
79 | # [export-asset-download-end]
80 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | geedim
2 | ======
3 |
4 | .. include:: ../README.rst
5 | :start-after: short_descr_start
6 | :end-before: short_descr_end
7 |
8 | .. include:: description.rst
9 |
10 | .. include:: installation.rst
11 |
12 | Contents
13 | --------
14 |
15 | .. toctree::
16 | :maxdepth: 2
17 |
18 | description
19 | installation
20 | cli
21 | api
22 | contributing
23 |
24 | * :ref:`genindex`
25 |
26 |
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../README.rst
2 | :start-after: install_start
3 | :end-before: install_end
4 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | %SPHINXBUILD% >NUL 2>NUL
14 | if errorlevel 9009 (
15 | echo.
16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
17 | echo.installed, then set the SPHINXBUILD environment variable to point
18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
19 | echo.may add the Sphinx directory to PATH.
20 | echo.
21 | echo.If you don't have Sphinx installed, grab it from
22 | echo.https://www.sphinx-doc.org/
23 | exit /b 1
24 | )
25 |
26 | if "%1" == "" goto help
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | sphinx!=5.2.0.post0
2 | sphinx-click
3 | sphinx-rtd-theme
4 | nbsphinx
5 | ipykernel
6 | jupyter-sphinx
--------------------------------------------------------------------------------
/geedim/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | from geedim.collection import MaskedCollection
18 | from geedim.enums import CloudMaskMethod, CloudScoreBand, CompositeMethod, ExportType, ResamplingMethod
19 | from geedim.mask import MaskedImage
20 | from geedim.utils import Initialize
21 |
--------------------------------------------------------------------------------
/geedim/enums.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | from enum import Enum
18 |
19 | class _StrChoiceEnum(str, Enum):
20 | """String value enumeration class that can be used with a ``click.Choice()`` parameter type."""
21 |
22 | def __repr__(self):
23 | return self._value_
24 |
25 | def __str__(self):
26 | return self._value_
27 |
28 | @property
29 | def name(self):
30 | # override for click>=8.2.0 Choice options which match passed values to Enum names
31 | return self._value_
32 |
33 |
34 | class CompositeMethod(_StrChoiceEnum):
35 | """
36 | Enumeration for the compositing method, i.e. the method for finding a composite pixel from the stack of
37 | corresponding input image pixels.
38 | """
39 |
40 | q_mosaic = 'q-mosaic'
41 | """
42 | Use the unmasked pixel with the highest cloud distance (distance to nearest cloud). Where more than one pixel has
43 | the same cloud distance, the first one in the stack is selected.
44 | """
45 |
46 | mosaic = 'mosaic'
47 | """ Use the first unmasked pixel in the stack. """
48 |
49 | medoid = 'medoid'
50 | """
51 | Use the medoid of the unmasked pixels. This is the pixel from the image having the minimum sum of spectral
52 | distances to the rest of the images.
53 | Maintains the original relationship between bands. See https://www.mdpi.com/2072-4292/5/12/6481 for detail.
54 | """
55 |
56 | median = 'median'
57 | """ Use the median of the unmasked pixels. """
58 |
59 | mode = 'mode'
60 | """ Use the mode of the unmasked pixels. """
61 |
62 | mean = 'mean'
63 | """ Use the mean of the unmasked pixels. """
64 |
65 | def __repr__(self):
66 | return self._name_
67 |
68 | def __str__(self):
69 | return self._name_
70 |
71 |
72 | class CloudMaskMethod(_StrChoiceEnum):
73 | """Enumeration for the Sentinel-2 cloud masking method."""
74 |
75 | cloud_prob = 'cloud-prob'
76 | """Threshold the Sentinel-2 Cloud Probability."""
77 |
78 | qa = 'qa'
79 | """Bit mask the `QA60` quality assessment band."""
80 |
81 | cloud_score = 'cloud-score'
82 | """Threshold the Sentinel-2 Cloud Score+."""
83 |
84 | def __repr__(self):
85 | return self._name_
86 |
87 | def __str__(self):
88 | return self._name_
89 |
90 |
91 | class CloudScoreBand(_StrChoiceEnum):
92 | """Enumeration for the Sentinel-2 Cloud Score+ band used with the :attr:`~CloudMaskMethod.cloud_score` cloud
93 | masking method.
94 | """
95 |
96 | cs = 'cs'
97 | """Pixel quality score based on spectral distance from a (theoretical) clear reference."""
98 |
99 | cs_cdf = 'cs_cdf'
100 | """Value of the cumulative distribution function of possible cs values for the estimated cs value."""
101 |
102 | def __repr__(self):
103 | return self._name_
104 |
105 | def __str__(self):
106 | return self._name_
107 |
108 |
109 | class ResamplingMethod(_StrChoiceEnum):
110 | """Enumeration for the resampling method."""
111 |
112 | near = 'near'
113 | """ Nearest neighbour. """
114 |
115 | bilinear = 'bilinear'
116 | """ Bilinear. """
117 |
118 | bicubic = 'bicubic'
119 | """ Bicubic. """
120 |
121 | average = 'average'
122 | """ Average (recommended for downsampling). """
123 |
124 | def __repr__(self):
125 | return self._name_
126 |
127 | def __str__(self):
128 | return self._name_
129 |
130 |
131 | class ExportType(_StrChoiceEnum):
132 | """Enumeration for the export type."""
133 |
134 | drive = 'drive'
135 | """ Export to Google Drive. """
136 |
137 | asset = 'asset'
138 | """ Export to an Earth Engine asset. """
139 |
140 | cloud = 'cloud'
141 | """ Export to Google Cloud Storage. """
142 |
143 | def __repr__(self):
144 | return self._name_
145 |
146 | def __str__(self):
147 | return self._name_
148 |
149 |
150 | class SpectralDistanceMetric(_StrChoiceEnum):
151 | """Enumeration for the spectral distance metric."""
152 |
153 | sam = 'sam'
154 | """ Spectral angle mapper. """
155 |
156 | sid = 'sid'
157 | """ Spectral information divergence. """
158 |
159 | sed = 'sed'
160 | """ Squared euclidean distance. """
161 |
162 | emd = 'emd'
163 | """ Earth movers distance. """
164 |
165 | def __repr__(self):
166 | return self._name_
167 |
168 | def __str__(self):
169 | return self._name_
170 |
--------------------------------------------------------------------------------
/geedim/errors.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 |
18 | class GeedimError(Exception):
19 | """Base exception class."""
20 |
21 |
22 | class UnfilteredError(GeedimError):
23 | """Raised when attempting to retrieve the properties of an unfiltered image collection."""
24 |
25 |
26 | class InputImageError(GeedimError):
27 | """Raised when there is a problem with the images making up a collection."""
28 |
29 |
30 | class TileError(GeedimError):
31 | """Raised when there is a problem downloading an image tile."""
32 |
33 |
34 | class GeedimWarning(UserWarning):
35 | """Base warning class."""
36 |
--------------------------------------------------------------------------------
/geedim/medoid.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2022 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 | import ee
17 | from geedim.enums import SpectralDistanceMetric
18 | from typing import Optional, List
19 | """
20 | This module contains Medoid related functionality adapted from 'Google Earth Engine tools' under MIT
21 | license. See https://github.com/gee-community/gee_tools.
22 | """
23 |
24 |
25 | def sum_distance(
26 | image: ee.Image, collection: ee.ImageCollection, bands: Optional[List] = None,
27 | metric: SpectralDistanceMetric = SpectralDistanceMetric.sed,
28 | ) -> ee.Image:
29 | """ Find the sum of the spectral distances between the provided ``image`` and image ``collection``. """
30 | metric = SpectralDistanceMetric(metric)
31 | if not bands:
32 | bands = collection.first().bandNames()
33 | image = ee.Image(image).select(bands)
34 |
35 | # Notes on masking:
36 | # - Where ``image`` is masked, the summed distance should be masked.
37 | # - Where any other image in ``collection`` is masked, the summed distance should omit its contribution.
38 | #
39 | # The above requirements are satisfied by leaving images masked, creating an ee.ImageCollection of distances
40 | # between ``image`` and other images in the collection (these distances are masked where either image is masked),
41 | # and using ImageCollection.sum() to sum distances, omitting masked areas from the sum.
42 | # The sum is only masked where all component distances are masked i.e. where ``image`` is masked.
43 |
44 | def accum_dist_to_image(to_image: ee.Image, dist_list: ee.List) -> ee.Image:
45 | """
46 | Earth engine iterator function to create a list of spectral distances between ``image`` and ``to_image``.
47 | """
48 | to_image = ee.Image(to_image).select(bands)
49 |
50 | # Find the distance between image and to_image. Both images are not unmasked so that distance will be
51 | # masked where one or both are masked.
52 | dist = image.spectralDistance(to_image, metric.value)
53 | if metric == SpectralDistanceMetric.sed:
54 | # sqrt scaling is necessary for summing with other distances and equivalence to original method
55 | dist = dist.sqrt()
56 |
57 | # Append distance to list.
58 | return ee.List(dist_list).add(dist)
59 |
60 | dist_list = ee.List(collection.iterate(accum_dist_to_image, ee.List([])))
61 | # TODO: are we better off using mean here to avoid overflow?
62 | return ee.ImageCollection(dist_list).sum()
63 |
64 |
65 | def medoid_score(
66 | collection: ee.ImageCollection, bands: Optional[List] = None, name: str = 'sumdist',
67 | ) -> ee.ImageCollection:
68 | """
69 | Add medoid score band (i.e. summed distance to all other images) to all images in ``collection``.
70 |
71 | Parameters
72 | ----------
73 | collection: ee.ImageCollection
74 | Collection to add medoid score band to.
75 | bands: list of str, optional
76 | Bands to calculate the medoid score from (default: use all bands).
77 | name: str, optional
78 | Name of score band to add (default: 'sumdist').
79 |
80 | Returns
81 | -------
82 | ee.ImageCollection
83 | Collection with added medoid score band.
84 | """
85 |
86 | def add_score_band(image: ee.Image):
87 | """ Add medoid score band to provided ``image``. """
88 | image = ee.Image(image)
89 |
90 | # Compute the sum of the euclidean distance between the current image
91 | # and every image in the rest of the collection
92 | # TODO: many (~50%) of these distance calcs are duplicates, can we make this more efficient?
93 | dist = sum_distance(image, collection, bands=bands)
94 |
95 | # multiply by -1 so that highest score is lowest summed distance
96 | return image.addBands(dist.multiply(-1).rename(name))
97 |
98 | return collection.map(add_score_band)
99 |
100 |
101 | def medoid(collection: ee.ImageCollection, bands: Optional[List] = None) -> ee.Image:
102 | """
103 | Find the medoid composite of an image collection. Adapted from https://www.mdpi.com/2072-4292/5/12/6481, and
104 | https://github.com/gee-community/gee_tools.
105 |
106 | Parameters
107 | ----------
108 | collection: ee.ImageCollection
109 | Image collection to composite.
110 | bands: list of str, optional
111 | Bands to calculate the medoid score from (default: use all bands).
112 |
113 | Returns
114 | -------
115 | ee.Image
116 | Medoid composite image.
117 | """
118 | name = 'sumdist'
119 | medoid_coll = medoid_score(collection, bands, name=name)
120 | comp_im = medoid_coll.qualityMosaic(name)
121 | # remove score band and return
122 | keep_bands = comp_im.bandNames().remove(name)
123 | return comp_im.select(keep_bands)
124 |
--------------------------------------------------------------------------------
/geedim/schema.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | from textwrap import wrap
18 |
19 | # schema definitions for MaskedImage.from_id(), geedim <-> EE collection names, and search properties
20 | from tabulate import tabulate
21 |
22 | import geedim.mask
23 |
24 | default_prop_schema = {
25 | 'system:id': {'abbrev': 'ID', 'description': 'Earth Engine image id'},
26 | 'system:time_start': {'abbrev': 'DATE', 'description': 'Image capture date/time (UTC)'},
27 | 'FILL_PORTION': {'abbrev': 'FILL', 'description': 'Portion of region pixels that are valid (%)'},
28 | }
29 |
30 | landsat_prop_schema = {
31 | 'system:id': {'abbrev': 'ID', 'description': 'Earth Engine image id'},
32 | 'system:time_start': {'abbrev': 'DATE', 'description': 'Image capture date/time (UTC)'},
33 | 'FILL_PORTION': {'abbrev': 'FILL', 'description': 'Portion of region pixels that are valid (%)'},
34 | 'CLOUDLESS_PORTION': {
35 | 'abbrev': 'CLOUDLESS',
36 | 'description': 'Portion of filled pixels that are cloud/shadow free (%)',
37 | },
38 | 'GEOMETRIC_RMSE_MODEL': {'abbrev': 'GRMSE', 'description': 'Orthorectification RMSE (m)'},
39 | 'SUN_AZIMUTH': {'abbrev': 'SAA', 'description': 'Solar azimuth angle (deg)'},
40 | 'SUN_ELEVATION': {'abbrev': 'SEA', 'description': 'Solar elevation angle (deg)'},
41 | }
42 |
43 | s2_prop_schema = {
44 | 'system:id': {'abbrev': 'ID', 'description': 'Earth Engine image id'},
45 | 'system:time_start': {'abbrev': 'DATE', 'description': 'Image capture date/time (UTC)'},
46 | 'FILL_PORTION': {'abbrev': 'FILL', 'description': 'Portion of region pixels that are valid (%)'},
47 | 'CLOUDLESS_PORTION': {
48 | 'abbrev': 'CLOUDLESS',
49 | 'description': 'Portion of filled pixels that are cloud/shadow free (%)',
50 | },
51 | 'RADIOMETRIC_QUALITY': {'abbrev': 'RADQ', 'description': 'Radiometric quality check'},
52 | 'GEOMETRIC_QUALITY': {'abbrev': 'GEOMQ', 'description': 'Geometric quality check'},
53 | 'MEAN_SOLAR_AZIMUTH_ANGLE': {'abbrev': 'SAA', 'description': 'Solar azimuth angle (deg)'},
54 | 'MEAN_SOLAR_ZENITH_ANGLE': {'abbrev': 'SZA', 'description': 'Solar zenith angle (deg)'},
55 | 'MEAN_INCIDENCE_AZIMUTH_ANGLE_B1': {'abbrev': 'VAA', 'description': 'View (B1) azimuth angle (deg)'},
56 | 'MEAN_INCIDENCE_ZENITH_ANGLE_B1': {'abbrev': 'VZA', 'description': 'View (B1) zenith angle (deg)'},
57 | }
58 |
59 | collection_schema = {
60 | 'LANDSAT/LT04/C02/T1_L2': {
61 | 'gd_coll_name': 'l4-c2-l2',
62 | 'prop_schema': landsat_prop_schema,
63 | 'image_type': geedim.mask.LandsatImage,
64 | 'ee_url': 'https://developers.google.com/earth-engine/datasets/catalog/LANDSAT_LT04_C02_T1_L2',
65 | 'description': 'Landsat 4, collection 2, tier 1, level 2 surface reflectance.',
66 | },
67 | 'LANDSAT/LT05/C02/T1_L2': {
68 | 'gd_coll_name': 'l5-c2-l2',
69 | 'prop_schema': landsat_prop_schema,
70 | 'image_type': geedim.mask.LandsatImage,
71 | 'ee_url': 'https://developers.google.com/earth-engine/datasets/catalog/LANDSAT_LT05_C02_T1_L2',
72 | 'description': 'Landsat 5, collection 2, tier 1, level 2 surface reflectance.',
73 | },
74 | 'LANDSAT/LE07/C02/T1_L2': {
75 | 'gd_coll_name': 'l7-c2-l2',
76 | 'prop_schema': landsat_prop_schema,
77 | 'image_type': geedim.mask.LandsatImage,
78 | 'ee_url': 'https://developers.google.com/earth-engine/datasets/catalog/LANDSAT_LE07_C02_T1_L2',
79 | 'description': 'Landsat 7, collection 2, tier 1, level 2 surface reflectance.',
80 | },
81 | 'LANDSAT/LC08/C02/T1_L2': {
82 | 'gd_coll_name': 'l8-c2-l2',
83 | 'prop_schema': landsat_prop_schema,
84 | 'image_type': geedim.mask.LandsatImage,
85 | 'ee_url': 'https://developers.google.com/earth-engine/datasets/catalog/LANDSAT_LC08_C02_T1_L2',
86 | 'description': 'Landsat 8, collection 2, tier 1, level 2 surface reflectance.',
87 | },
88 | 'LANDSAT/LC09/C02/T1_L2': {
89 | 'gd_coll_name': 'l9-c2-l2',
90 | 'prop_schema': landsat_prop_schema,
91 | 'image_type': geedim.mask.LandsatImage,
92 | 'ee_url': 'https://developers.google.com/earth-engine/datasets/catalog/LANDSAT_LC09_C02_T1_L2',
93 | 'description': 'Landsat 9, collection 2, tier 1, level 2 surface reflectance.',
94 | },
95 | 'COPERNICUS/S2': {
96 | 'gd_coll_name': 's2-toa',
97 | 'prop_schema': s2_prop_schema,
98 | 'image_type': geedim.mask.Sentinel2ToaClImage,
99 | 'ee_url': 'https://developers.google.com/earth-engine/datasets/catalog/COPERNICUS_S2',
100 | 'description': 'Sentinel-2, level 1C, top of atmosphere reflectance.',
101 | },
102 | 'COPERNICUS/S2_SR': {
103 | 'gd_coll_name': 's2-sr',
104 | 'prop_schema': s2_prop_schema,
105 | 'image_type': geedim.mask.Sentinel2SrClImage,
106 | 'ee_url': 'https://developers.google.com/earth-engine/datasets/catalog/COPERNICUS_S2_SR',
107 | 'description': 'Sentinel-2, level 2A, surface reflectance.',
108 | },
109 | 'COPERNICUS/S2_HARMONIZED': {
110 | 'gd_coll_name': 's2-toa-hm',
111 | 'prop_schema': s2_prop_schema,
112 | 'image_type': geedim.mask.Sentinel2ToaClImage,
113 | 'ee_url': 'https://developers.google.com/earth-engine/datasets/catalog/COPERNICUS_S2_HARMONIZED',
114 | 'description': 'Harmonised Sentinel-2, level 1C, top of atmosphere reflectance.',
115 | },
116 | 'COPERNICUS/S2_SR_HARMONIZED': {
117 | 'gd_coll_name': 's2-sr-hm',
118 | 'prop_schema': s2_prop_schema,
119 | 'image_type': geedim.mask.Sentinel2SrClImage,
120 | 'ee_url': 'https://developers.google.com/earth-engine/datasets/catalog/COPERNICUS_S2_SR_HARMONIZED',
121 | 'description': 'Harmonised Sentinel-2, level 2A, surface reflectance.',
122 | },
123 | 'MODIS/061/MCD43A4': {
124 | 'gd_coll_name': 'modis-nbar',
125 | 'prop_schema': default_prop_schema,
126 | 'image_type': geedim.mask.MaskedImage,
127 | 'ee_url': 'https://developers.google.com/earth-engine/datasets/catalog/MODIS_061_MCD43A4',
128 | 'description': 'MODIS nadir BRDF adjusted daily reflectance.',
129 | },
130 | }
131 |
132 |
133 | # Dict to convert from geedim to Earth Engine collection names
134 | ee_to_gd = dict([(k, v['gd_coll_name']) for k, v in collection_schema.items()])
135 |
136 | # Dict to convert from Earth Engine to geedim collection names
137 | gd_to_ee = dict([(v['gd_coll_name'], k) for k, v in collection_schema.items()])
138 |
139 | # "Two way" dict to convert Earth Engine to/from geedim collection names
140 | coll_names = dict(**gd_to_ee, **ee_to_gd)
141 |
142 | # A list of cloud/shadow mask supported EE collection names
143 | cloud_coll_names = [k for k, v in collection_schema.items() if v['image_type'] != geedim.mask.MaskedImage]
144 |
145 |
146 | def cli_cloud_coll_table() -> str:
147 | """Return a table of cloud/shadow mask supported collections for use in CLI help strings."""
148 | headers = dict(gd_coll_name='geedim name', ee_coll_name='EE name')
149 | data = []
150 | for key, val in collection_schema.items():
151 | if val['image_type'] != geedim.mask.MaskedImage:
152 | data.append(dict(gd_coll_name=val['gd_coll_name'], ee_coll_name=key))
153 | return tabulate(data, headers=headers, tablefmt='rst')
154 |
155 |
156 | def cloud_coll_table(descr_join='\n') -> str:
157 | """
158 | Return a table of cloud/shadow mask supported collections.
159 | * Use descr_join='\n' for github README friendly formatting.
160 | * Use descr_join='\n\n' for RTD/Sphinx friendly formatting.
161 |
162 | Instructions for adding cloud/shadow supported collections to CLI help and github README:
163 | * print(cli_cloud_coll_table()) and paste into cli.search() and cli.config() command docstrings.
164 | * print(cloud_coll_table()) and paste into the README.
165 | * The equivalent RTD table is auto-generated in docs/conf.py.
166 | """
167 | headers = dict(ee_coll_name='EE name', descr='Description')
168 | data = []
169 | for key, val in collection_schema.items():
170 | if val['image_type'] != geedim.mask.MaskedImage:
171 | ee_coll_name = '\n'.join(wrap(f'`{key} <{val["ee_url"]}>`_', width=40))
172 | descr = descr_join.join(wrap(val['description'], width=60)) # for RTD multiline table
173 | data.append(dict(ee_coll_name=ee_coll_name, descr=descr))
174 |
175 | return tabulate(data, headers=headers, tablefmt='grid')
176 |
--------------------------------------------------------------------------------
/geedim/stac.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 | import json
17 | import logging
18 | import threading
19 | from concurrent.futures import ThreadPoolExecutor, as_completed
20 | from typing import Dict, Union
21 |
22 | from geedim import utils
23 |
24 | logger = logging.getLogger(__name__)
25 | root_stac_url = 'https://earthengine-stac.storage.googleapis.com/catalog/catalog.json'
26 |
27 |
28 | class StacItem:
29 |
30 | def __init__(self, name: str, item_dict: Dict):
31 | """
32 | Image/collection STAC container class. Provides access to band properties and root property descriptions.
33 |
34 | Parameters
35 | ----------
36 | name: str
37 | The image/collection ID/name
38 | item_dict: dict
39 | The raw STAC dict for the image/collection.
40 | """
41 | self._name = name
42 | self._item_dict = item_dict
43 | self._descriptions = self._get_descriptions(item_dict)
44 | self._band_props = self._get_band_props(item_dict)
45 |
46 | def _get_descriptions(self, item_dict: Dict) -> Union[Dict[str, str], None]:
47 | """ Return a dictionary with property names as keys, and descriptions as values. """
48 | if 'summaries' not in item_dict:
49 | return None
50 | if 'gee:schema' in item_dict['summaries']:
51 | gee_schema = item_dict['summaries']['gee:schema']
52 | elif 'gee:collection_schema' in item_dict['summaries']:
53 | gee_schema = item_dict['summaries']['gee:collection_schema']
54 | else:
55 | return None
56 | descriptions = {item['name']: item['description'] for item in gee_schema}
57 | return descriptions
58 |
59 | def _get_band_props(self, item_dict: Dict) -> Union[Dict[str, Dict], None]:
60 | """
61 | Return a dictionary of band properties, with band names as keys, and properties as values.
62 | """
63 | if not ('summaries' in item_dict and 'eo:bands' in item_dict['summaries']):
64 | logger.warning(f'There is no STAC band information for {self._name}')
65 | return None
66 |
67 | summaries = item_dict['summaries']
68 | ee_band_props = summaries['eo:bands']
69 | # if the gsd is the same across all bands, there is a `gsd` key in summaries, otherwise there are `gsd` keys
70 | # for each item in ee_band_props
71 | global_gsd = summaries['gsd'] if 'gsd' in summaries else None
72 | band_props = {}
73 | # a list of the EE band properties we want to copy
74 | prop_keys = [
75 | 'name', 'description', 'center_wavelength', 'gee:wavelength', 'gee:units', 'gee:scale', 'gee:offset'
76 | ]
77 |
78 | def strip_gee(key: str):
79 | """ Remove 'gee:' from the start of `key` if it is there. """
80 | return key[4:] if key.startswith('gee:') else key
81 |
82 | for ee_band_dict in ee_band_props:
83 | band_dict = {
84 | strip_gee(prop_key): ee_band_dict[prop_key]
85 | for prop_key in prop_keys
86 | if prop_key in ee_band_dict
87 | } # yapf: disable
88 | gsd = ee_band_dict['gsd'] if 'gsd' in ee_band_dict else global_gsd
89 | gsd = gsd[0] if isinstance(gsd, (list, tuple)) else gsd
90 | band_dict.update(gsd=gsd) if gsd else None
91 | band_props[ee_band_dict['name']] = band_dict
92 | return band_props
93 |
94 | @property
95 | def name(self) -> str:
96 | """ ID/name of the contained image/collection STAC. """
97 | return self._name
98 |
99 | @property
100 | def descriptions(self) -> Union[Dict[str, str], None]:
101 | """ Dictionary of property descriptions with property names as keys, and descriptions as values. """
102 | return self._descriptions
103 |
104 | @property
105 | def band_props(self) -> Union[Dict[str, Dict], None]:
106 | """ Dictionary of band properties, with band names as keys, and properties as values. """
107 | return self._band_props
108 |
109 | @property
110 | def license(self) -> Union[str, None]:
111 | """ Terms of use / license. """
112 | url = None
113 | if 'links' in self._item_dict:
114 | for link in self._item_dict['links']:
115 | if ('rel' in link) and (link['rel'] == 'license') and ('href' in link):
116 | url = link['href']
117 | break
118 | return url
119 |
120 |
121 | @utils.singleton
122 | class StacCatalog:
123 |
124 | def __init__(self):
125 | """ Singleton class to interface to the EE STAC, and retrieve image/collection STAC data. """
126 | self._filename = utils.root_path.joinpath('geedim/data/ee_stac_urls.json')
127 | self._session = utils.retry_session()
128 | self._url_dict = None
129 | self._cache = {}
130 | self._lock = threading.Lock()
131 |
132 | @property
133 | def url_dict(self) -> Dict[str, str]:
134 | """ Dictionary with image/collection IDs/names as keys, and STAC URLs as values. """
135 | if not self._url_dict:
136 | # delay reading the json file until it is needed.
137 | with open(self._filename, 'r') as f:
138 | self._url_dict = json.load(f)
139 | return self._url_dict
140 |
141 | def _traverse_stac(self, url: str, url_dict: Dict) -> Dict:
142 | """
143 | Recursive & threaded EE STAC tree traversal that returns the `url_dict` i.e. a dict with image/collection
144 | IDs/names as keys, and the corresponding json STAC URLs as values.
145 | """
146 | response = self._session.get(url)
147 | if not response.ok:
148 | logger.warning(f'Error reading {url}: ' + str(response.content))
149 | return url_dict
150 | response_dict = response.json()
151 | if 'type' in response_dict:
152 | if (response_dict['type'].lower() == 'collection'):
153 | # we have reached a leaf node
154 | if (('gee:type' in response_dict) and
155 | (response_dict['gee:type'].lower() in ['image_collection', 'image'])):
156 | # we have reached an image / image collection leaf node
157 | with self._lock:
158 | url_dict[response_dict['id']] = url
159 | logger.debug(f'ID: {response_dict["id"]}, Type: {response_dict["gee:type"]}, URL: {url}')
160 | return url_dict
161 |
162 | with ThreadPoolExecutor() as executor:
163 | # traverse the sub-tree links in a thread pool
164 | futures = []
165 | for link in response_dict['links']:
166 | if link['rel'].lower() == 'child':
167 | futures.append(executor.submit(self._traverse_stac, link['href'], url_dict))
168 | for future in as_completed(futures):
169 | url_dict = future.result()
170 | return url_dict
171 |
172 | def refresh_url_dict(self):
173 | """ Update `url_dict` with the latest from EE STAC. """
174 | url_dict = {}
175 | url_dict = self._traverse_stac(root_stac_url, url_dict)
176 | self._url_dict = dict(sorted(url_dict.items()))
177 |
178 | def write_url_dict(self, filename=None):
179 | """ Write the ``url_dict`` to file. """
180 | if filename is None:
181 | filename = self._filename
182 | with open(filename, 'w') as f:
183 | json.dump(self.url_dict, f)
184 |
185 | def get_item_dict(self, name: str):
186 | """
187 | Get the raw STAC dict for a given an image/collection name/ID.
188 |
189 | Parameters
190 | ----------
191 | name: str
192 | ID/name of the image/collection whose STAC data to retrieve.
193 |
194 | Returns
195 | -------
196 | dict
197 | Image/collection STAC data in a dict, if it exists, otherwise None.
198 | """
199 | coll_name = utils.split_id(name)[0]
200 | if coll_name in self.url_dict:
201 | name = coll_name
202 |
203 | # store item dicts in a cache so we don't have to request them more than once
204 | if name not in self._cache:
205 | if name not in self.url_dict:
206 | logger.warning(f'There is no STAC entry for: {name}')
207 | self._cache[name] = None
208 | else:
209 | response = self._session.get(self.url_dict[name])
210 | self._cache[name] = response.json()
211 | return self._cache[name]
212 |
213 | def get_item(self, name: str) -> StacItem:
214 | """
215 | Get a STAC container instance for a given an image/collection name/ID.
216 |
217 | Parameters
218 | ----------
219 | name: str
220 | The ID/name of the image/collection whose STAC container to retrieve.
221 |
222 | Returns
223 | -------
224 | StacItem
225 | image/collection STAC container, if it exists, otherwise None.
226 | """
227 | coll_name = utils.split_id(name)[0]
228 | if coll_name in self.url_dict:
229 | name = coll_name
230 | item_dict = self.get_item_dict(name)
231 | return StacItem(name, item_dict) if item_dict else None
232 |
--------------------------------------------------------------------------------
/geedim/tile.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | import logging
18 | import threading
19 | import time
20 | from io import BytesIO
21 |
22 | import numpy as np
23 | import rasterio as rio
24 | import requests
25 | from rasterio import Affine
26 | from rasterio.errors import RasterioIOError
27 | from rasterio.windows import Window
28 | from requests.exceptions import JSONDecodeError, RequestException
29 | from tqdm.auto import tqdm
30 |
31 | from geedim import utils
32 | from geedim.errors import TileError
33 |
34 | logger = logging.getLogger(__name__)
35 |
36 |
37 | class Tile:
38 | # lock to prevent concurrent calls to ee.Image.getDownloadURL(), which can cause a seg fault in the standard
39 | # python networking libraries.
40 | _ee_lock = threading.Lock()
41 |
42 | def __init__(self, exp_image, window: Window):
43 | """
44 | Class for downloading an Earth Engine image tile (a rectangular region of interest in the image).
45 |
46 | Parameters
47 | ----------
48 | exp_image: BaseImage
49 | BaseImage instance to derive the tile from.
50 | window: Window
51 | rasterio window into `exp_image`, specifying the region of interest for this tile.
52 | """
53 | self._exp_image = exp_image
54 | self._window = window
55 | # offset the image geo-transform origin so that it corresponds to the UL corner of the tile.
56 | self._transform = exp_image.transform * Affine.translation(window.col_off, window.row_off)
57 | self._shape = (window.height, window.width)
58 |
59 | @property
60 | def window(self) -> Window:
61 | """rasterio tile window into the source image."""
62 | return self._window
63 |
64 | @staticmethod
65 | def _raise_for_status(response: requests.Response):
66 | """Raise a TileError if the tile cannot be downloaded."""
67 | download_size = int(response.headers.get('content-length', 0))
68 | if download_size == 0 or not response.status_code == 200:
69 | msg = f'Error downloading tile: {response.status_code} - {response.reason}. URL: {response.url}.'
70 | try:
71 | resp_dict = response.json()
72 | if 'error' in resp_dict and 'message' in resp_dict['error']:
73 | # raise an exception with the response error message
74 | msg = resp_dict['error']['message']
75 | msg = f'Error downloading tile: {msg} URL: {response.url}.'
76 | if 'user memory limit exceeded' in msg.lower():
77 | msg += (
78 | '\nThe `max_tile_size` or `max_tile_dim` parameters can be decreased to work around this '
79 | 'error. Alternatively you can export to Earth Engine asset, and then download the asset '
80 | 'image.'
81 | )
82 | except JSONDecodeError:
83 | pass
84 |
85 | raise TileError(msg)
86 |
87 | def _download_to_array(self, url: str, session: requests.Session = None, bar: tqdm = None) -> np.ndarray:
88 | """Download the image tile into a numpy array."""
89 | # get image download response
90 | session = session or requests
91 | response = session.get(url, stream=True, timeout=(30, 300))
92 |
93 | # raise a TileError if the tile cannot be downloaded
94 | self._raise_for_status(response)
95 |
96 | # find raw download size
97 | download_size = int(response.headers.get('content-length', 0))
98 | dtype_size = np.dtype(self._exp_image.dtype).itemsize
99 | raw_download_size = self._shape[0] * self._shape[1] * self._exp_image.count * dtype_size
100 |
101 | # download & read the tile
102 | downloaded_size = 0
103 | buf = BytesIO()
104 | try:
105 | # download gtiff into buffer
106 | for data in response.iter_content(chunk_size=10240):
107 | if bar:
108 | # update with raw download progress (0-1)
109 | bar.update(raw_download_size * (len(data) / download_size))
110 | buf.write(data)
111 | downloaded_size += len(data)
112 | buf.flush()
113 |
114 | # read the tile array from the GeoTIFF buffer
115 | buf.seek(0)
116 | env = rio.Env(GDAL_NUM_THREADS='ALL_CPUs', GTIFF_FORCE_RGBA=False)
117 | with utils.suppress_rio_logs(), env, rio.open(buf, 'r') as ds:
118 | array = ds.read()
119 | return array
120 |
121 | except (RequestException, RasterioIOError):
122 | if bar:
123 | # reverse progress bar
124 | bar.update(-raw_download_size * (downloaded_size / download_size))
125 | pass
126 | raise
127 |
128 | def download(
129 | self,
130 | session: requests.Session = None,
131 | bar: tqdm = None,
132 | max_retries: int = 5,
133 | backoff_factor: float = 2.0,
134 | ) -> np.ndarray:
135 | """
136 | Download the image tile into a numpy array.
137 |
138 | Parameters
139 | ----------
140 | session: requests.Session, optional
141 | requests session to use for downloading
142 | bar: tqdm, optional
143 | tqdm progress bar instance to update with incremental (0-1) download progress.
144 | max_retries: int, optional
145 | Number of times to retry downloading the tile. This is independent of the ``session``, which may have its
146 | own retry configuration.
147 | backoff_factor: float, optional
148 | Backoff factor to apply between tile download retries. The delay between retries is: {backoff_factor} *
149 | (2 ** ({number of previous retries})) seconds. This is independent of the ``session``, which may have its
150 | own retry configuration.
151 |
152 | Returns
153 | -------
154 | array: numpy.ndarray
155 | 3D numpy array of the tile pixel data with bands down the first dimension.
156 | """
157 | session = session or requests
158 |
159 | # get download URL
160 | url = self._exp_image.ee_image.getDownloadURL(
161 | dict(
162 | crs=self._exp_image.crs,
163 | crs_transform=tuple(self._transform)[:6],
164 | dimensions=self._shape[::-1],
165 | format='GEO_TIFF',
166 | )
167 | )
168 |
169 | # download and read the tile, with retries
170 | for retry in range(max_retries + 1):
171 | try:
172 | return self._download_to_array(url, session=session, bar=bar)
173 | except (RequestException, RasterioIOError) as ex:
174 | if retry < max_retries:
175 | time.sleep(backoff_factor * (2**retry))
176 | logger.warning(f'Tile download failed, retry {retry + 1} of {max_retries}. URL: {url}. {str(ex)}.')
177 | else:
178 | raise TileError(f'Tile download failed, reached the maximum retries. URL: {url}.') from ex
179 |
--------------------------------------------------------------------------------
/geedim/utils.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | from __future__ import annotations
18 |
19 | import itertools
20 | import json
21 | import logging
22 | import os
23 | import pathlib
24 | import sys
25 | import time
26 | from contextlib import contextmanager
27 | from threading import Thread
28 | from typing import Optional, Tuple
29 |
30 | import ee
31 | import numpy as np
32 | import rasterio as rio
33 | import requests
34 | from rasterio import warp
35 | from rasterio.env import GDALVersion
36 | from rasterio.windows import Window
37 | from requests.adapters import HTTPAdapter, Retry
38 | from tqdm.auto import tqdm
39 |
40 | from geedim.enums import ResamplingMethod
41 |
42 | if "__file__" in globals():
43 | root_path = pathlib.Path(__file__).absolute().parents[1]
44 | else:
45 | root_path = pathlib.Path(os.getcwd())
46 |
47 | _GDAL_AT_LEAST_35 = GDALVersion.runtime().at_least("3.5")
48 |
49 |
50 | def Initialize(
51 | opt_url: Optional[str] = "https://earthengine-highvolume.googleapis.com", **kwargs
52 | ):
53 | """
54 | Initialise Earth Engine.
55 |
56 | Credentials will be read from the `EE_SERVICE_ACC_PRIVATE_KEY` environment variable if it exists
57 | (useful for integrating with e.g. GitHub actions).
58 |
59 | .. note::
60 |
61 | Earth Engine recommends using the `high volume endpoint` for applications like ``geedim``.
62 | See `the docs `_ for more information.
63 |
64 | Parameters
65 | ----------
66 | opt_url: str
67 | The Earth Engine endpoint to use. If ``None``, the default is used.
68 | kwargs
69 | Optional arguments to pass to `ee.Initialize`.
70 | """
71 |
72 | if not ee.data._credentials:
73 | # Adpated from https://gis.stackexchange.com/questions/380664/how-to-de-authenticate-from-earth-engine-api.
74 | env_key = "EE_SERVICE_ACC_PRIVATE_KEY"
75 |
76 | if env_key in os.environ:
77 | # authenticate with service account
78 | key_dict = json.loads(os.environ[env_key])
79 | credentials = ee.ServiceAccountCredentials(
80 | key_dict["client_email"], key_data=key_dict["private_key"]
81 | )
82 | ee.Initialize(
83 | credentials, opt_url=opt_url, project=key_dict["project_id"], **kwargs
84 | )
85 | else:
86 | ee.Initialize(opt_url=opt_url, **kwargs)
87 |
88 |
89 | def singleton(cls):
90 | """Class decorator to make it a singleton."""
91 | instances = {}
92 |
93 | def getinstance() -> cls:
94 | if cls not in instances:
95 | instances[cls] = cls()
96 | return instances[cls]
97 |
98 | return getinstance
99 |
100 |
101 | def split_id(image_id: str) -> Tuple[str, str]:
102 | """
103 | Split Earth Engine image ID into collection and index components.
104 |
105 | Parameters
106 | ----------
107 | image_id: str
108 | Earth engine image ID.
109 |
110 | Returns
111 | -------
112 | tuple(str, str)
113 | A tuple of strings: (collection name, image index).
114 | """
115 | if not image_id:
116 | return None, None
117 | index = image_id.split("/")[-1]
118 | ee_coll_name = "/".join(image_id.split("/")[:-1])
119 | return ee_coll_name, index
120 |
121 |
122 | @contextmanager
123 | def suppress_rio_logs(level: int = logging.ERROR):
124 | """A context manager that sets the `rasterio` logging level, then returns it to its original value."""
125 | # TODO: this should not be necessary if logging level changes are limited to geedim. if it has to be used,
126 | # it should be made thread-safe.
127 | try:
128 | # GEE sets GeoTIFF `colorinterp` tags incorrectly. This suppresses `rasterio` warning relating to this:
129 | # 'Sum of Photometric type-related color channels and ExtraSamples doesn't match SamplesPerPixel'
130 | rio_level = logging.getLogger("rasterio").getEffectiveLevel()
131 | logging.getLogger("rasterio").setLevel(level)
132 | yield
133 | finally:
134 | logging.getLogger("rasterio").setLevel(rio_level)
135 |
136 |
137 | def get_bounds(filename: pathlib.Path, expand: float = 5):
138 | """
139 | Get a geojson polygon representing the bounds of an image.
140 |
141 | Parameters
142 | ----------
143 | filename: str, pathlib.Path
144 | Path of the image file whose bounds to find.
145 | expand : int, optional
146 | Percentage (0-100) by which to expand the bounds (default: 5).
147 |
148 | Returns
149 | -------
150 | dict
151 | Geojson polygon.
152 | """
153 | with suppress_rio_logs(), rio.Env(GTIFF_FORCE_RGBA=False), rio.open(filename) as im:
154 | bbox = im.bounds
155 | if (im.crs.linear_units == "metre") and (expand > 0): # expand the bounding box
156 | expand_x = (bbox.right - bbox.left) * expand / 100.0
157 | expand_y = (bbox.top - bbox.bottom) * expand / 100.0
158 | bbox_expand = rio.coords.BoundingBox(
159 | bbox.left - expand_x,
160 | bbox.bottom - expand_y,
161 | bbox.right + expand_x,
162 | bbox.top + expand_y,
163 | )
164 | else:
165 | bbox_expand = bbox
166 |
167 | coordinates = [
168 | [bbox_expand.right, bbox_expand.bottom],
169 | [bbox_expand.right, bbox_expand.top],
170 | [bbox_expand.left, bbox_expand.top],
171 | [bbox_expand.left, bbox_expand.bottom],
172 | [bbox_expand.right, bbox_expand.bottom],
173 | ]
174 |
175 | bbox_expand_dict = dict(type="Polygon", coordinates=[coordinates])
176 | src_bbox_wgs84 = warp.transform_geom(
177 | im.crs, "WGS84", bbox_expand_dict
178 | ) # convert to WGS84 geojson
179 | return src_bbox_wgs84
180 |
181 |
182 | def get_projection(image: ee.Image, min_scale: bool = True) -> ee.Projection:
183 | """
184 | Get the min/max scale projection of image bands. Server side - no calls to getInfo().
185 | Adapted from from https://github.com/gee-community/gee_tools, MIT license.
186 |
187 | Parameters
188 | ----------
189 | image : ee.Image
190 | Image whose min/max projection to retrieve.
191 | min_scale: bool, optional
192 | Retrieve the projection corresponding to the band with the minimum (True) or maximum (False) scale.
193 | (default: True)
194 |
195 | Returns
196 | -------
197 | ee.Projection
198 | Requested projection.
199 | """
200 | if not isinstance(image, ee.Image):
201 | raise TypeError("image is not an instance of ee.Image")
202 |
203 | bands = image.bandNames()
204 | scales = bands.map(
205 | lambda band: image.select(ee.String(band)).projection().nominalScale()
206 | )
207 | projs = bands.map(lambda band: image.select(ee.String(band)).projection())
208 | projs = projs.sort(scales)
209 |
210 | return ee.Projection(projs.get(0) if min_scale else projs.get(-1))
211 |
212 |
213 | class Spinner(Thread):
214 | def __init__(self, label="", interval=0.2, leave=True, **kwargs):
215 | """
216 | Thread sub-class to run a non-blocking spinner.
217 |
218 | Parameters
219 | ----------
220 | label: str, optional
221 | Prepend spinner with this label.
222 | interval: float, optional
223 | Spinner update interval (s).
224 | leave: optional, bool, str
225 | What to do with the spinner display on stop():
226 | False: clear the label + spinner.
227 | True: leave the label + spinner as is.
228 | : print this message in place of the spinner
229 | kwargs: optional
230 | Additional kwargs to pass to Thread.__init__()
231 | """
232 | Thread.__init__(self, **kwargs)
233 | self._label = label
234 | self._interval = interval
235 | self._run = True
236 | self._leave = leave
237 | self._file = sys.stderr
238 |
239 | def __enter__(self):
240 | self.start()
241 | return self
242 |
243 | def __exit__(self, exc_type, exc_val, exc_tb):
244 | self.stop()
245 | self.join()
246 |
247 | def run(self):
248 | """Run the spinner thread."""
249 | cursors_it = itertools.cycle(r"/-\|")
250 |
251 | while self._run:
252 | cursor = next(cursors_it)
253 | tqdm.write("\r" + self._label + cursor, file=self._file, end="")
254 | self._file.flush()
255 | time.sleep(self._interval)
256 |
257 | if self._leave == True:
258 | tqdm.write("", file=self._file, end="\n")
259 | elif self._leave == False:
260 | tqdm.write("\r", file=self._file, end="")
261 | elif isinstance(self._leave, str):
262 | tqdm.write(
263 | "\r" + self._label + self._leave + " ", file=self._file, end="\n"
264 | )
265 | self._file.flush()
266 |
267 | def start(self):
268 | """Start the spinner thread."""
269 | self._run = True
270 | Thread.start(self)
271 |
272 | def stop(self):
273 | """Stop the spinner thread."""
274 | self._run = False
275 |
276 |
277 | def resample(ee_image: ee.Image, method: ResamplingMethod) -> ee.Image:
278 | """
279 | Resample an ee.Image. Extends ee.Image.resample by only resampling when the image has a fixed projection, and by
280 | providing an additional 'average' method for downsampling. This logic is performed server-side.
281 |
282 | Note that for the :attr:`ResamplingMethod.average` ``method``, the returned image has a minimum scale default
283 | projection.
284 |
285 | See https://developers.google.com/earth-engine/guides/resample for more info.
286 |
287 | Parameters
288 | ----------
289 | ee_image: ee.Image
290 | Image to resample.
291 | method: ResamplingMethod
292 | Resampling method to use.
293 |
294 | Returns
295 | -------
296 | ee_image: ee.Image
297 | Resampled image.
298 | """
299 | # TODO : use STAC to only resample continuous qty type bands
300 | method = ResamplingMethod(method)
301 | if method == ResamplingMethod.near:
302 | return ee_image
303 |
304 | # resample the image, if it has a fixed projection
305 | proj = get_projection(ee_image, min_scale=True)
306 | has_fixed_proj = (
307 | proj.crs()
308 | .compareTo("EPSG:4326")
309 | .neq(0)
310 | .Or(proj.nominalScale().toInt64().neq(111319))
311 | )
312 |
313 | def _resample(ee_image: ee.Image) -> ee.Image:
314 | """Resample the given image, allowing for additional 'average' method."""
315 | if method == ResamplingMethod.average:
316 | # set the default projection to the minimum scale projection (required for e.g. S2 images that have
317 | # non-fixed projection bands)
318 | ee_image = ee_image.setDefaultProjection(proj)
319 | return ee_image.reduceResolution(reducer=ee.Reducer.mean(), maxPixels=1024)
320 | else:
321 | return ee_image.resample(method.value)
322 |
323 | return ee.Image(ee.Algorithms.If(has_fixed_proj, _resample(ee_image), ee_image))
324 |
325 |
326 | def retry_session(
327 | retries: int = 5,
328 | backoff_factor: float = 2.0,
329 | status_forcelist: Tuple = (429, 500, 502, 503, 504),
330 | session: requests.Session = None,
331 | ) -> requests.Session:
332 | """requests session configured for retries."""
333 | session = session or requests.Session()
334 | retry = Retry(
335 | total=retries,
336 | read=retries,
337 | connect=retries,
338 | backoff_factor=backoff_factor,
339 | status_forcelist=status_forcelist,
340 | )
341 | adapter = HTTPAdapter(max_retries=retry)
342 | session.mount("http://", adapter)
343 | session.mount("https://", adapter)
344 | return session
345 |
346 |
347 | def expand_window_to_grid(
348 | win: Window, expand_pixels: Tuple[int, int] = (0, 0)
349 | ) -> Window:
350 | """
351 | Expand rasterio window extents to the nearest whole numbers i.e. for ``expand_pixels`` >= (0, 0), it will return a
352 | window that contains the original extents.
353 |
354 | Parameters
355 | ----------
356 | win: rasterio.windows.Window
357 | Window to expand.
358 | expand_pixels: tuple, optional
359 | Tuple specifying the number of (rows, columns) pixels to expand the window by.
360 |
361 | Returns
362 | -------
363 | rasterio.windows.Window
364 | Expanded window.
365 | """
366 | col_off, col_frac = np.divmod(win.col_off - expand_pixels[1], 1)
367 | row_off, row_frac = np.divmod(win.row_off - expand_pixels[0], 1)
368 | width = np.ceil(win.width + 2 * expand_pixels[1] + col_frac)
369 | height = np.ceil(win.height + 2 * expand_pixels[0] + row_frac)
370 | exp_win = Window(int(col_off), int(row_off), int(width), int(height))
371 | return exp_win
372 |
373 |
374 | def rio_crs(crs: str | rio.CRS) -> str | rio.CRS:
375 | """Convert a GEE CRS string to a rasterio compatible CRS string."""
376 | if crs == "SR-ORG:6974":
377 | # This is a workaround for https://issuetracker.google.com/issues/194561313, that replaces the alleged GEE
378 | # SR-ORG:6974 with actual WKT for SR-ORG:6842 taken from
379 | # https://github.com/OSGeo/spatialreference.org/blob/master/scripts/sr-org.json.
380 | crs = """PROJCS["Sinusoidal",
381 | GEOGCS["GCS_Undefined",
382 | DATUM["Undefined",
383 | SPHEROID["User_Defined_Spheroid",6371007.181,0.0]],
384 | PRIMEM["Greenwich",0.0],
385 | UNIT["Degree",0.0174532925199433]],
386 | PROJECTION["Sinusoidal"],
387 | PARAMETER["False_Easting",0.0],
388 | PARAMETER["False_Northing",0.0],
389 | PARAMETER["Central_Meridian",0.0],
390 | UNIT["Meter",1.0]]"""
391 | return crs
392 |
393 |
394 | def asset_id(image_id: str, folder: str = None):
395 | """
396 | Convert an EE image ID and EE asset project into an EE asset ID.
397 |
398 | If ``folder`` is not specified, ``image_id`` is returned as is. Otherwise, ``image_id`` is converted to a name by
399 | changing forward slashes to dashes, ``folder`` is split into and sections, and a string
400 | is returned with EE asset ID format:
401 |
402 | projects//assets//.
403 | """
404 | if not folder:
405 | return image_id
406 | im_name = image_id.replace("/", "-")
407 | folder = pathlib.PurePosixPath(folder)
408 | cloud_folder = pathlib.PurePosixPath(folder.parts[0])
409 | asset_path = pathlib.PurePosixPath("/".join(folder.parts[1:])).joinpath(im_name)
410 | return f"projects/{str(cloud_folder)}/assets/{str(asset_path)}"
411 |
--------------------------------------------------------------------------------
/geedim/version.py:
--------------------------------------------------------------------------------
1 | __version__ = '1.9.1'
2 |
--------------------------------------------------------------------------------
/meta.yaml:
--------------------------------------------------------------------------------
1 | {% set name = "geedim" %}
2 | {% set version = "1.2.0" %}
3 |
4 | package:
5 | name: {{ name|lower }}
6 | version: {{ version }}
7 |
8 | source:
9 | url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/geedim-{{ version }}.tar.gz
10 | sha256: 7e272200b0c0d01d74bd6e4fddf27d992c44b453b01c0d6025cf9c76150f4fae
11 |
12 | build:
13 | entry_points:
14 | - geedim=geedim.cli:cli
15 | noarch: python
16 | script: {{ PYTHON }} -m pip install . -vv
17 | number: 0
18 |
19 | requirements:
20 | host:
21 | - pip
22 | - python >=3.6
23 | run:
24 | - python >=3.6
25 | - numpy >=1.19
26 | - rasterio >=1.1
27 | - click >=8
28 | - tqdm >=4.6
29 | - earthengine-api >=0.1.2
30 | - requests >=2.2
31 | - tabulate >=0.8
32 |
33 | test:
34 | imports:
35 | - geedim
36 | commands:
37 | - geedim --help
38 |
39 | about:
40 | home: https://github.com/leftfield-geospatial/geedim
41 | summary: Search, composite, and download Google Earth Engine imagery.
42 | license: Apache-2.0
43 | license_file: LICENSE
44 | description: |
45 | `geedim` provides a command line interface and API for searching, compositing and downloading
46 | satellite imagery from Google Earth Engine (EE). It optionally performs cloud / shadow
47 | masking, and cloud / shadow-free compositing on supported collections. Images and composites
48 | can be downloaded, or exported to Google Drive. Images larger than the EE size limit are
49 | split and downloaded as separate tiles, then re-assembled into a single GeoTIFF.
50 |
51 | extra:
52 | recipe-maintainers:
53 | - dugalh
54 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [project]
2 | name = 'geedim'
3 | description = 'Search, composite and download Google Earth Engine imagery.'
4 | readme = 'README.rst'
5 | requires-python = '>=3.8'
6 | dependencies = [
7 | 'numpy>=1.19',
8 | 'rasterio>=1.3.8',
9 | 'click>=8',
10 | 'tqdm>=4.6',
11 | 'earthengine-api>=0.1.379',
12 | 'requests>=2.2',
13 | 'tabulate>=0.8'
14 | ]
15 | authors = [{name = 'Leftfield Geospatial'}]
16 | keywords = [
17 | 'earth engine', 'satellite imagery', 'search', 'download', 'composite', 'cloud', 'shadow'
18 | ]
19 | classifiers = [
20 | 'Programming Language :: Python :: 3',
21 | 'License :: OSI Approved :: Apache Software License',
22 | 'Operating System :: OS Independent',
23 | ]
24 | dynamic = ['version']
25 |
26 | [project.scripts]
27 | geedim = 'geedim.cli:cli'
28 |
29 | [project.optional-dependencies]
30 | tests = ['pytest', 'pytest-xdist']
31 | docs = ['sphinx', 'sphinx-click', 'sphinx-rtd-theme', 'nbsphinx', 'ipykernel', 'jupyter-sphinx']
32 |
33 | [project.urls]
34 | Homepage = 'https://github.com/leftfield-geospatial/geedim'
35 | Documentation = "https://geedim.readthedocs.org"
36 | Source = 'https://github.com/leftfield-geospatial/geedim'
37 | Changelog = 'https://github.com/leftfield-geospatial/geedim/releases'
38 | Issues = 'https://github.com/leftfield-geospatial/geedim/issues'
39 |
40 | [build-system]
41 | requires = ['setuptools']
42 | build-backend = 'setuptools.build_meta'
43 |
44 | [tool.setuptools]
45 | packages = ['geedim']
46 |
47 | [tool.setuptools.dynamic]
48 | version = {attr = 'geedim.version.__version__'}
49 |
50 | [tool.setuptools.package-data]
51 | geedim = ['data/ee_stac_urls.json']
52 |
53 | [tool.black]
54 | line-length = 120
55 | skip-string-normalization = true
56 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | import pathlib
18 | from typing import Dict, List
19 |
20 | import ee
21 | import pytest
22 | from click.testing import CliRunner
23 |
24 | from geedim import Initialize, MaskedImage
25 | from geedim.mask import Sentinel2SrClImage
26 | from geedim.utils import root_path
27 |
28 |
29 | @pytest.fixture(scope='session', autouse=True)
30 | def ee_init():
31 | Initialize()
32 | return
33 |
34 |
35 | @pytest.fixture(scope='session')
36 | def region_25ha() -> Dict:
37 | """A geojson polygon defining a 500x500m region."""
38 | return {
39 | "type": "Polygon",
40 | "coordinates": [
41 | [[21.6389, -33.4520], [21.6389, -33.4474], [21.6442, -33.4474], [21.6442, -33.4520], [21.6389, -33.4520]]
42 | ],
43 | }
44 |
45 |
46 | @pytest.fixture(scope='session')
47 | def region_100ha() -> Dict:
48 | """A geojson polygon defining a 1x1km region."""
49 | return {
50 | "type": "Polygon",
51 | "coordinates": [
52 | [[21.6374, -33.4547], [21.6374, -33.4455], [21.6480, -33.4455], [21.6480, -33.4547], [21.6374, -33.4547]]
53 | ],
54 | }
55 |
56 |
57 | @pytest.fixture(scope='session')
58 | def region_10000ha() -> Dict:
59 | """A geojson polygon defining a 10x10km region."""
60 | return {
61 | "type": "Polygon",
62 | "coordinates": [
63 | [[21.5893, -33.4964], [21.5893, -33.4038], [21.6960, -33.4038], [21.6960, -33.4964], [21.5893, -33.4964]]
64 | ],
65 | }
66 |
67 |
68 | @pytest.fixture
69 | def const_image_25ha_file() -> pathlib.Path:
70 | return root_path.joinpath('tests/data/const_image_25ha.tif')
71 |
72 |
73 | @pytest.fixture(scope='session')
74 | def l4_image_id() -> str:
75 | """Landsat-4 EE ID for image that covering `region_*ha`, with partial cloud/shadow for `region10000ha` only."""
76 | return 'LANDSAT/LT04/C02/T1_L2/LT04_173083_19880310'
77 |
78 |
79 | @pytest.fixture(scope='session')
80 | def l5_image_id() -> str:
81 | """Landsat-5 EE ID for image covering `region_*ha` with partial cloud/shadow."""
82 | return 'LANDSAT/LT05/C02/T1_L2/LT05_173083_20051112'
83 |
84 |
85 | @pytest.fixture(scope='session')
86 | def l7_image_id() -> str:
87 | """Landsat-7 EE ID for image covering `region_*ha` with partial cloud/shadow."""
88 | return 'LANDSAT/LE07/C02/T1_L2/LE07_173083_20220119'
89 |
90 |
91 | @pytest.fixture(scope='session')
92 | def l8_image_id() -> str:
93 | """Landsat-8 EE ID for image covering `region_*ha` with partial cloud/shadow."""
94 | return 'LANDSAT/LC08/C02/T1_L2/LC08_173083_20180217'
95 |
96 |
97 | @pytest.fixture(scope='session')
98 | def l9_image_id() -> str:
99 | """Landsat-9 EE ID for image covering `region_*ha` with partial cloud/shadow."""
100 | return 'LANDSAT/LC09/C02/T1_L2/LC09_173083_20220308'
101 |
102 |
103 | @pytest.fixture(scope='session')
104 | def landsat_image_ids(l4_image_id, l5_image_id, l7_image_id, l8_image_id, l9_image_id) -> List[str]:
105 | """Landsat4-9 EE IDs for images covering `region_*ha` with partial cloud/shadow."""
106 | return [l4_image_id, l5_image_id, l7_image_id, l8_image_id, l9_image_id]
107 |
108 |
109 | @pytest.fixture(scope='session')
110 | def s2_sr_image_id() -> str:
111 | """Sentinel-2 SR EE ID for image with QA* data, covering `region_*ha` with partial cloud/shadow."""
112 | return 'COPERNICUS/S2_SR/20200929T080731_20200929T083634_T34HEJ'
113 |
114 |
115 | @pytest.fixture(scope='session')
116 | def s2_toa_image_id() -> str:
117 | """Sentinel-2 TOA EE ID for image with QA* data, covering `region_*ha` with partial cloud/shadow."""
118 | return 'COPERNICUS/S2/20210216T081011_20210216T083703_T34HEJ'
119 |
120 |
121 | @pytest.fixture(scope='session')
122 | def s2_sr_hm_image_id(s2_sr_image_id: str) -> str:
123 | """Harmonised Sentinel-2 SR EE ID for image with QA* data, covering `region_*ha` with partial cloud/shadow."""
124 | return 'COPERNICUS/S2_SR_HARMONIZED/' + s2_sr_image_id.split('/')[-1]
125 |
126 |
127 | @pytest.fixture(scope='session')
128 | def s2_sr_hm_qa_zero_image_id() -> str:
129 | """Harmonised Sentinel-2 SR EE ID for image with zero QA* data, covering `region_*ha` with partial cloud/shadow."""
130 | return 'COPERNICUS/S2_SR_HARMONIZED/20230721T080609_20230721T083101_T34HEJ'
131 |
132 |
133 | @pytest.fixture(scope='session')
134 | def s2_toa_hm_image_id(s2_toa_image_id: str) -> str:
135 | """Harmonised Sentinel-2 TOA EE ID for image with QA* data, covering `region_*ha` with partial cloud/shadow."""
136 | return 'COPERNICUS/S2_HARMONIZED/' + s2_toa_image_id.split('/')[-1]
137 |
138 |
139 | @pytest.fixture(scope='session')
140 | def modis_nbar_image_id() -> str:
141 | """Global MODIS NBAR image ID."""
142 | return 'MODIS/061/MCD43A4/2022_01_01'
143 |
144 |
145 | @pytest.fixture(scope='session')
146 | def gch_image_id() -> str:
147 | """
148 | Global Canopy Height (10m) image derived from Sentinel-2 and GEDI. WGS84 @ 10m.
149 | https://nlang.users.earthengine.app/view/global-canopy-height-2020.
150 | """
151 | return 'users/nlang/ETH_GlobalCanopyHeight_2020_10m_v1'
152 |
153 |
154 | @pytest.fixture(scope='session')
155 | def s1_sar_image_id() -> str:
156 | """Sentinel-1 SAR GRD EE image ID. 10m."""
157 | return 'COPERNICUS/S1_GRD/S1A_IW_GRDH_1SDV_20220112T171750_20220112T171815_041430_04ED28_0A04'
158 |
159 |
160 | @pytest.fixture(scope='session')
161 | def gedi_agb_image_id() -> str:
162 | """GEDI aboveground biomass density EE image ID. 1km."""
163 | return 'LARSE/GEDI/GEDI04_B_002'
164 |
165 |
166 | @pytest.fixture(scope='session')
167 | def gedi_cth_image_id() -> str:
168 | """GEDI canopy top height EE image ID. 25m."""
169 | return 'LARSE/GEDI/GEDI02_A_002_MONTHLY/202009_018E_036S'
170 |
171 |
172 | @pytest.fixture(scope='session')
173 | def landsat_ndvi_image_id() -> str:
174 | """Landsat 8-day NDVI composite EE image iD. Composite in WGS84 with underlying 30m scale."""
175 | return 'LANDSAT/COMPOSITES/C02/T1_L2_8DAY_NDVI/20211211'
176 |
177 |
178 | @pytest.fixture(scope='session')
179 | def google_dyn_world_image_id(s2_sr_hm_image_id) -> str:
180 | """Google Dynamic World EE ID. 10m with positive y-axis transform."""
181 | return 'GOOGLE/DYNAMICWORLD/V1/' + s2_sr_hm_image_id.split('/')[-1]
182 |
183 |
184 | @pytest.fixture()
185 | def s2_sr_hm_image_ids(s2_sr_image_id: str, s2_toa_image_id: str) -> List[str]:
186 | """A list of harmonised Sentinel-2 SR image IDs, covering `region_*ha` with partial cloud/shadow.."""
187 | return [
188 | 'COPERNICUS/S2_SR_HARMONIZED/' + s2_sr_image_id.split('/')[-1],
189 | 'COPERNICUS/S2_SR_HARMONIZED/' + s2_toa_image_id.split('/')[-1],
190 | 'COPERNICUS/S2_SR_HARMONIZED/20191229T081239_20191229T083040_T34HEJ',
191 | ]
192 |
193 |
194 | @pytest.fixture(scope='session')
195 | def generic_image_ids(
196 | modis_nbar_image_id, gch_image_id, s1_sar_image_id, gedi_agb_image_id, gedi_cth_image_id, landsat_ndvi_image_id
197 | ) -> List[str]:
198 | """A list of various EE image IDs for non-cloud/shadow masked images."""
199 | return [
200 | modis_nbar_image_id,
201 | gch_image_id,
202 | s1_sar_image_id,
203 | gedi_agb_image_id,
204 | gedi_cth_image_id,
205 | landsat_ndvi_image_id,
206 | ]
207 |
208 |
209 | @pytest.fixture(scope='session')
210 | def l4_masked_image(l4_image_id) -> MaskedImage:
211 | """Landsat-4 MaskedImage covering `region_*ha`, with partial cloud for `region10000ha` only."""
212 | return MaskedImage.from_id(l4_image_id)
213 |
214 |
215 | @pytest.fixture(scope='session')
216 | def l5_masked_image(l5_image_id) -> MaskedImage:
217 | """Landsat-5 MaskedImage covering `region_*ha` with partial cloud/shadow."""
218 | return MaskedImage.from_id(l5_image_id)
219 |
220 |
221 | @pytest.fixture(scope='session')
222 | def l7_masked_image(l7_image_id) -> MaskedImage:
223 | """Landsat-7 MaskedImage covering `region_*ha` with partial cloud/shadow."""
224 | return MaskedImage.from_id(l7_image_id)
225 |
226 |
227 | @pytest.fixture(scope='session')
228 | def l8_masked_image(l8_image_id) -> MaskedImage:
229 | """Landsat-8 MaskedImage that cover `region_*ha` with partial cloud cover."""
230 | return MaskedImage.from_id(l8_image_id)
231 |
232 |
233 | @pytest.fixture(scope='session')
234 | def l9_masked_image(l9_image_id) -> MaskedImage:
235 | """Landsat-9 MaskedImage covering `region_*ha` with partial cloud/shadow."""
236 | return MaskedImage.from_id(l9_image_id)
237 |
238 |
239 | @pytest.fixture(scope='session')
240 | def s2_sr_masked_image(s2_sr_image_id) -> MaskedImage:
241 | """Sentinel-2 SR MaskedImage with QA* data, covering `region_*ha` with partial cloud/shadow."""
242 | return MaskedImage.from_id(s2_sr_image_id)
243 |
244 |
245 | @pytest.fixture(scope='session')
246 | def s2_toa_masked_image(s2_toa_image_id) -> MaskedImage:
247 | """Sentinel-2 TOA MaskedImage with QA* data, covering `region_*ha` with partial cloud/shadow."""
248 | return MaskedImage.from_id(s2_toa_image_id)
249 |
250 |
251 | @pytest.fixture(scope='session')
252 | def s2_sr_hm_masked_image(s2_sr_hm_image_id) -> MaskedImage:
253 | """Harmonised Sentinel-2 SR MaskedImage with QA* data, covering `region_*ha` with partial cloud/shadow."""
254 | return MaskedImage.from_id(s2_sr_hm_image_id)
255 |
256 |
257 | @pytest.fixture(scope='session')
258 | def s2_sr_hm_nocp_masked_image(s2_sr_hm_image_id) -> MaskedImage:
259 | """Harmonised Sentinel-2 SR MaskedImage with no corresponding cloud probability, covering `region_*ha` with partial
260 | cloud/shadow.
261 | """
262 | # create an image with unknown id to prevent linking to cloud probability
263 | ee_image = ee.Image(s2_sr_hm_image_id)
264 | ee_image = ee_image.set('system:index', 'COPERNICUS/S2_HARMONIZED/unknown')
265 | return Sentinel2SrClImage(ee_image, mask_method='cloud-prob')
266 |
267 |
268 | @pytest.fixture(scope='session')
269 | def s2_sr_hm_nocs_masked_image(s2_sr_hm_image_id) -> MaskedImage:
270 | """Harmonised Sentinel-2 SR MaskedImage with no corresponding cloud score, covering `region_*ha` with partial
271 | cloud/shadow.
272 | """
273 | # create an image with unknown id to prevent linking to cloud score
274 | ee_image = ee.Image(s2_sr_hm_image_id)
275 | ee_image = ee_image.set('system:index', 'COPERNICUS/S2_HARMONIZED/unknown')
276 | return Sentinel2SrClImage(ee_image, mask_method='cloud-score')
277 |
278 |
279 | @pytest.fixture(scope='session')
280 | def s2_sr_hm_qa_zero_masked_image(s2_sr_hm_qa_zero_image_id: str) -> MaskedImage:
281 | """Harmonised Sentinel-2 SR MaskedImage with zero QA* bands, covering `region_*ha` with partial cloud/shadow."""
282 | return MaskedImage.from_id(s2_sr_hm_qa_zero_image_id, mask_method='qa')
283 |
284 |
285 | @pytest.fixture(scope='session')
286 | def s2_toa_hm_masked_image(s2_toa_hm_image_id) -> MaskedImage:
287 | """Harmonised Sentinel-2 TOA MaskedImage with QA* data, covering `region_*ha` with partial cloud/shadow."""
288 | return MaskedImage.from_id(s2_toa_hm_image_id)
289 |
290 |
291 | @pytest.fixture(scope='session')
292 | def user_masked_image() -> MaskedImage:
293 | """A MaskedImage instance where the encapsulated image has no fixed projection or ID."""
294 | return MaskedImage(ee.Image([1, 2, 3]))
295 |
296 |
297 | @pytest.fixture(scope='session')
298 | def modis_nbar_masked_image(modis_nbar_image_id) -> MaskedImage:
299 | """MODIS NBAR MaskedImage with global coverage."""
300 | return MaskedImage.from_id(modis_nbar_image_id)
301 |
302 |
303 | @pytest.fixture(scope='session')
304 | def gch_masked_image(gch_image_id) -> MaskedImage:
305 | """Global Canopy Height (10m) MaskedImage."""
306 | return MaskedImage.from_id(gch_image_id)
307 |
308 |
309 | @pytest.fixture(scope='session')
310 | def s1_sar_masked_image(s1_sar_image_id) -> MaskedImage:
311 | """Sentinel-1 SAR GRD MaskedImage. 10m."""
312 | return MaskedImage.from_id(s1_sar_image_id)
313 |
314 |
315 | @pytest.fixture(scope='session')
316 | def gedi_agb_masked_image(gedi_agb_image_id) -> MaskedImage:
317 | """GEDI aboveground biomass density MaskedImage. 1km."""
318 | return MaskedImage.from_id(gedi_agb_image_id)
319 |
320 |
321 | @pytest.fixture(scope='session')
322 | def gedi_cth_masked_image(gedi_cth_image_id) -> MaskedImage:
323 | """GEDI canopy top height MaskedImage. 25m."""
324 | return MaskedImage.from_id(gedi_cth_image_id)
325 |
326 |
327 | @pytest.fixture(scope='session')
328 | def landsat_ndvi_masked_image(landsat_ndvi_image_id) -> MaskedImage:
329 | """Landsat 8-day NDVI composite MaskedImage. Composite in WGS84 with underlying 30m scale."""
330 | return MaskedImage.from_id(landsat_ndvi_image_id)
331 |
332 |
333 | @pytest.fixture
334 | def runner():
335 | """click runner for command line execution."""
336 | return CliRunner()
337 |
338 |
339 | @pytest.fixture
340 | def region_25ha_file() -> pathlib.Path:
341 | """Path to region_25ha geojson file."""
342 | return root_path.joinpath('tests/data/region_25ha.geojson')
343 |
344 |
345 | @pytest.fixture
346 | def region_100ha_file() -> pathlib.Path:
347 | """Path to region_100ha geojson file."""
348 | return root_path.joinpath('tests/data/region_100ha.geojson')
349 |
350 |
351 | @pytest.fixture
352 | def region_10000ha_file() -> pathlib.Path:
353 | """Path to region_10000ha geojson file."""
354 | return root_path.joinpath('tests/data/region_10000ha.geojson')
355 |
--------------------------------------------------------------------------------
/tests/data/const_image_25ha.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/leftfield-geospatial/geedim/95c654be64748088730869c24af09eb9287a9e94/tests/data/const_image_25ha.tif
--------------------------------------------------------------------------------
/tests/data/region.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "Polygon",
3 | "coordinates": [
4 | [
5 | [
6 | 24,
7 | -33.6
8 | ],
9 | [
10 | 24,
11 | -33.53
12 | ],
13 | [
14 | 23.93,
15 | -33.53
16 | ],
17 | [
18 | 23.93,
19 | -33.6
20 | ],
21 | [
22 | 24,
23 | -33.6
24 | ]
25 | ]
26 | ]
27 | }
--------------------------------------------------------------------------------
/tests/data/region_10000ha.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "Polygon",
3 | "coordinates": [
4 | [
5 | [
6 | 21.589345633930122,
7 | -33.49637826839637
8 | ],
9 | [
10 | 21.589345633930122,
11 | -33.403838620261126
12 | ],
13 | [
14 | 21.696023283863752,
15 | -33.403838620261126
16 | ],
17 | [
18 | 21.696023283863752,
19 | -33.49637826839637
20 | ],
21 | [
22 | 21.589345633930122,
23 | -33.49637826839637
24 | ]
25 | ]
26 | ]
27 | }
28 |
--------------------------------------------------------------------------------
/tests/data/region_100ha.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "Polygon",
3 | "coordinates": [
4 | [
5 | [
6 | 21.637350576400266,
7 | -33.454735426735532
8 | ],
9 | [
10 | 21.637350576400266,
11 | -33.445481461922007
12 | ],
13 | [
14 | 21.64801834139363,
15 | -33.445481461922007
16 | ],
17 | [
18 | 21.64801834139363,
19 | -33.454735426735532
20 | ],
21 | [
22 | 21.637350576400266,
23 | -33.454735426735532
24 | ]
25 | ]
26 | ]
27 | }
--------------------------------------------------------------------------------
/tests/data/region_25ha.geojson:
--------------------------------------------------------------------------------
1 | {
2 | "type": "Polygon",
3 | "coordinates": [
4 | [
5 | [
6 | 21.638902721644083,
7 | -33.452036969823055
8 | ],
9 | [
10 | 21.638902721644083,
11 | -33.447414434963527
12 | ],
13 | [
14 | 21.644231477107148,
15 | -33.447414434963527
16 | ],
17 | [
18 | 21.644231477107148,
19 | -33.452036969823055
20 | ],
21 | [
22 | 21.638902721644083,
23 | -33.452036969823055
24 | ]
25 | ]
26 | ]
27 | }
--------------------------------------------------------------------------------
/tests/integration.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | import json
18 | from pathlib import Path
19 |
20 | import ee
21 | import numpy as np
22 | import pytest
23 | import rasterio as rio
24 | from click.testing import CliRunner
25 | from httplib2 import Http
26 | from rasterio.coords import BoundingBox
27 | from rasterio.crs import CRS
28 | from rasterio.features import bounds
29 | from rasterio.warp import transform_geom
30 |
31 | import geedim as gd
32 | from geedim import cli, utils
33 | from geedim.download import BaseImage
34 |
35 |
36 | @pytest.fixture(scope='session', autouse=True)
37 | def ee_init():
38 | """Override the ee_init fixture, so that we only initialise as geemap does, below."""
39 | return
40 |
41 |
42 | def test_geemap_integration(tmp_path: Path):
43 | """Simulate the geemap download example."""
44 | gd.Initialize(opt_url=None, http_transport=Http()) # a replica of geemap Initialize
45 | ee_image = ee.ImageCollection("LANDSAT/LC08/C02/T1_TOA").first()
46 | gd_image = gd.download.BaseImage(ee_image)
47 | out_file = tmp_path.joinpath('landsat.tif')
48 | gd_image.download(out_file, scale=100)
49 | assert out_file.exists()
50 | assert out_file.stat().st_size > 100e6
51 |
52 |
53 | def test_geeml_integration(tmp_path: Path):
54 | """Test the geeml `user memory limit exceeded` example."""
55 | gd.Initialize()
56 | region = {
57 | 'geodesic': False,
58 | 'crs': {'type': 'name', 'properties': {'name': 'EPSG:4326'}},
59 | 'type': 'Polygon',
60 | 'coordinates': [
61 | [
62 | [6.030749828407996, 53.66867883985145],
63 | [6.114742307473171, 53.66867883985145],
64 | [6.114742307473171, 53.76381042843971],
65 | [6.030749828407996, 53.76381042843971],
66 | [6.030749828407996, 53.66867883985145],
67 | ]
68 | ],
69 | } # yapf: disable
70 |
71 | ee_image = (
72 | ee.ImageCollection('COPERNICUS/S2_SR_HARMONIZED')
73 | .filterDate('2019-01-01', '2020-01-01')
74 | .filterBounds(region)
75 | .select(['B4', 'B3', 'B2', 'B8'])
76 | .reduce(ee.Reducer.percentile([35]))
77 | ) # yapf: disable
78 |
79 | gd_image = gd.download.BaseImage(ee_image)
80 | out_file = tmp_path.joinpath('test.tif')
81 | # test we get user memory limit exceeded error with default max_tile_size
82 | # (EE does not always raise this - memory limit is dynamic?, or percentile implementation changed?)
83 | # with pytest.raises(IOError) as ex:
84 | # gd_image.download(
85 | # out_file, crs='EPSG:4326', region=region, scale=10, num_threads=1, dtype='float64', overwrite=True
86 | # )
87 | # assert 'user memory limit exceeded' in str(ex).lower()
88 |
89 | # test we can download the image with a max_tile_size of 16 MB
90 | gd_image.download(
91 | out_file,
92 | crs='EPSG:4326',
93 | region=region,
94 | scale=10,
95 | dtype='float64',
96 | overwrite=True,
97 | max_tile_size=16,
98 | )
99 | assert out_file.exists()
100 | with rio.open(out_file, 'r') as ds:
101 | assert ds.count == 4
102 | assert ds.dtypes[0] == 'float64'
103 | assert np.isinf(ds.nodata)
104 | region_cnrs = np.array(region['coordinates'][0])
105 | region_bounds = rio.coords.BoundingBox(*region_cnrs.min(axis=0), *region_cnrs.max(axis=0))
106 | # sometimes the top/bottom bounds of the dataset are swapped, so extract and compare UL and BR corners
107 | print(f'region_bounds: {region_bounds}')
108 | print(f'ds.bounds: {ds.bounds}')
109 | ds_ul = np.array([min(ds.bounds.left, ds.bounds.right), min(ds.bounds.top, ds.bounds.bottom)])
110 | ds_lr = np.array([max(ds.bounds.left, ds.bounds.right), max(ds.bounds.top, ds.bounds.bottom)])
111 | assert region_cnrs.min(axis=0) == pytest.approx(ds_ul, abs=1e-3)
112 | assert region_cnrs.max(axis=0) == pytest.approx(ds_lr, abs=1e-3)
113 |
114 |
115 | def test_cli_asset_export(l8_image_id, region_25ha_file: Path, runner: CliRunner, tmp_path: Path):
116 | """Export a test image to an asset using the CLI."""
117 | # create a randomly named folder to allow parallel tests without overwriting the same asset
118 | gd.Initialize()
119 | folder = f'geedim/int_test_asset_export_{np.random.randint(1 << 31)}'
120 | asset_folder = f'projects/{Path(folder).parts[0]}/assets/{Path(folder).parts[1]}'
121 | crs = 'EPSG:3857'
122 | scale = 30
123 |
124 | try:
125 | # export image to asset via CLI
126 | test_asset_id = utils.asset_id(l8_image_id, folder)
127 | ee.data.createAsset(dict(type='Folder'), asset_folder)
128 | cli_str = (
129 | f'export -i {l8_image_id} -r {region_25ha_file} -f {folder} --crs {crs} --scale {scale} '
130 | f'--dtype uint16 --mask --resampling bilinear --wait --type asset'
131 | )
132 | result = runner.invoke(cli.cli, cli_str.split())
133 | assert result.exit_code == 0
134 | assert ee.data.getAsset(test_asset_id) is not None
135 |
136 | # download the asset image
137 | asset_image = gd.download.BaseImage.from_id(test_asset_id)
138 | download_filename = tmp_path.joinpath(f'integration_test.tif')
139 | asset_image.download(download_filename)
140 | assert download_filename.exists()
141 |
142 | finally:
143 | # clean up the asset and its folder
144 | try:
145 | ee.data.deleteAsset(test_asset_id)
146 | ee.data.deleteAsset(asset_folder)
147 | except ee.ee_exception.EEException:
148 | pass
149 |
150 | # test downloaded asset image
151 | with open(region_25ha_file) as f:
152 | region = json.load(f)
153 | with rio.open(download_filename, 'r') as im:
154 | im: rio.DatasetReader
155 | exp_region = transform_geom('EPSG:4326', im.crs, region)
156 | exp_bounds = BoundingBox(*bounds(exp_region))
157 | assert im.crs == CRS.from_string(crs)
158 | assert im.transform[0] == scale
159 | assert im.count > 1
160 | assert (
161 | (im.bounds[0] <= exp_bounds[0])
162 | and (im.bounds[1] <= exp_bounds[1])
163 | and (im.bounds[2] >= exp_bounds[2])
164 | and (im.bounds[3] >= exp_bounds[3])
165 | )
166 |
167 |
168 | @pytest.mark.parametrize('dtype', ['float32', 'float64', 'uint8', 'int8', 'uint16', 'int16', 'uint32', 'int32'])
169 | def test_ee_geotiff_nodata(dtype: str, l9_image_id: str):
170 | """Test the nodata value of the Earth Engine GeoTIFF returned by ``ee.data.computePixels()`` or
171 | ``ee.Image.getDownloadUrl()`` equals the geedim expected value (see
172 | https://issuetracker.google.com/issues/350528377 for context).
173 | """
174 | # use geedim to prepare an image for downloading as dtype
175 | gd.Initialize()
176 | masked_image = gd.MaskedImage.from_id(l9_image_id)
177 | shape = (10, 10)
178 | exp_image, profile = masked_image._prepare_for_download(shape=shape, dtype=dtype)
179 |
180 | # download a small tile with ee.data.computePixels
181 | request = {
182 | 'expression': exp_image.ee_image,
183 | 'bandIds': ['SR_B3'],
184 | 'grid': {'dimensions': {'width': shape[1], 'height': shape[0]}},
185 | 'fileFormat': 'GEO_TIFF',
186 | }
187 | im_bytes = ee.data.computePixels(request)
188 |
189 | # test nodata with rasterio
190 | with rio.MemoryFile(im_bytes) as mf, mf.open() as ds:
191 | assert ds.nodata == profile['nodata']
192 | # test the EE dtype is not lower precision compared to expected dtype
193 | assert np.promote_types(profile['dtype'], ds.dtypes[0]) == ds.dtypes[0]
194 |
--------------------------------------------------------------------------------
/tests/test_mask.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | from typing import Dict, Iterable
18 |
19 | import ee
20 | import numpy as np
21 | import pytest
22 | import rasterio as rio
23 |
24 | from geedim import CloudMaskMethod
25 | from geedim.enums import CloudScoreBand
26 | from geedim.mask import class_from_id, CloudMaskedImage, MaskedImage
27 |
28 |
29 | def test_class_from_id(landsat_image_ids, s2_sr_image_id, s2_toa_hm_image_id, generic_image_ids):
30 | """Test class_from_id()."""
31 | from geedim.mask import LandsatImage, Sentinel2SrClImage, Sentinel2ToaClImage
32 |
33 | assert all([class_from_id(im_id) == LandsatImage for im_id in landsat_image_ids])
34 | assert all([class_from_id(im_id) == MaskedImage for im_id in generic_image_ids])
35 | assert class_from_id(s2_sr_image_id) == Sentinel2SrClImage
36 | assert class_from_id(s2_toa_hm_image_id) == Sentinel2ToaClImage
37 |
38 |
39 | def test_from_id():
40 | """Test MaskedImage.from_id() sets _id."""
41 | ee_id = 'MODIS/006/MCD43A4/2022_01_01'
42 | gd_image = MaskedImage.from_id(ee_id)
43 | assert gd_image._id == ee_id
44 |
45 |
46 | @pytest.mark.parametrize(
47 | 'masked_image',
48 | [
49 | 'user_masked_image',
50 | 'modis_nbar_masked_image',
51 | 'gch_masked_image',
52 | 's1_sar_masked_image',
53 | 'gedi_agb_masked_image',
54 | 'gedi_cth_masked_image',
55 | 'landsat_ndvi_masked_image',
56 | ],
57 | )
58 | def test_gen_aux_bands_exist(masked_image: str, request: pytest.FixtureRequest):
59 | """Test the presence of auxiliary band (i.e. FILL_MASK) in generic masked images."""
60 | masked_image: MaskedImage = request.getfixturevalue(masked_image)
61 | band_names = masked_image.ee_image.bandNames().getInfo()
62 | assert 'FILL_MASK' in band_names
63 |
64 |
65 | @pytest.mark.parametrize(
66 | 'masked_image',
67 | [
68 | 's2_sr_masked_image',
69 | 's2_toa_masked_image',
70 | 's2_sr_hm_masked_image',
71 | 's2_toa_hm_masked_image',
72 | 'l9_masked_image',
73 | 'l8_masked_image',
74 | 'l7_masked_image',
75 | 'l5_masked_image',
76 | 'l4_masked_image',
77 | ],
78 | )
79 | def test_cloud_mask_aux_bands_exist(masked_image: str, request: pytest.FixtureRequest):
80 | """Test the presence of auxiliary bands in cloud masked images."""
81 | masked_image: MaskedImage = request.getfixturevalue(masked_image)
82 | band_names = masked_image.ee_image.bandNames().getInfo()
83 | exp_band_names = {'FILL_MASK', 'CLOUDLESS_MASK', 'CLOUD_DIST'}
84 | assert exp_band_names.intersection(band_names) == exp_band_names
85 |
86 |
87 | @pytest.mark.parametrize(
88 | 'masked_image',
89 | [
90 | 's2_sr_masked_image',
91 | 's2_toa_masked_image',
92 | 's2_sr_hm_masked_image',
93 | 's2_toa_hm_masked_image',
94 | 'l9_masked_image',
95 | 'l8_masked_image',
96 | 'l7_masked_image',
97 | 'l5_masked_image',
98 | 'l4_masked_image',
99 | 'user_masked_image',
100 | 'modis_nbar_masked_image',
101 | 'gch_masked_image',
102 | 's1_sar_masked_image',
103 | 'gedi_agb_masked_image',
104 | 'gedi_cth_masked_image',
105 | 'landsat_ndvi_masked_image',
106 | ],
107 | )
108 | def test_set_region_stats(masked_image: str, region_100ha, request: pytest.FixtureRequest):
109 | """
110 | Test MaskedImage._set_region_stats() generates the expected properties and that these are in the valid range.
111 | """
112 | masked_image: MaskedImage = request.getfixturevalue(masked_image)
113 | masked_image._set_region_stats(region_100ha, scale=masked_image._ee_proj.nominalScale())
114 | for stat_name in ['FILL_PORTION', 'CLOUDLESS_PORTION']:
115 | assert stat_name in masked_image.properties
116 | assert masked_image.properties[stat_name] >= 0 and masked_image.properties[stat_name] <= 100
117 |
118 |
119 | @pytest.mark.parametrize(
120 | 'masked_image, exp_scale',
121 | [
122 | ('s2_sr_hm_masked_image', 60),
123 | ('l9_masked_image', 30),
124 | ('l4_masked_image', 30),
125 | ('s1_sar_masked_image', 10),
126 | ('gedi_agb_masked_image', 1000),
127 | # include fixtures with bands that have no fixed projection
128 | ('s2_sr_hm_qa_zero_masked_image', 60),
129 | ('s2_sr_hm_nocp_masked_image', 60),
130 | ('s2_sr_hm_nocs_masked_image', 60),
131 | ],
132 | )
133 | def test_ee_proj(masked_image: str, exp_scale: float, request: pytest.FixtureRequest):
134 | """Test MaskedImage._ee_proj has the correct scale and CRS."""
135 | masked_image: MaskedImage = request.getfixturevalue(masked_image)
136 | proj = masked_image._ee_proj.getInfo()
137 |
138 | assert np.abs(proj['transform'][0]) == pytest.approx(exp_scale, rel=1e-3)
139 | assert proj.get('crs', 'wkt') != 'EPSG:4326'
140 |
141 |
142 | @pytest.mark.parametrize('image_id', ['l9_image_id', 'l8_image_id', 'l7_image_id', 'l5_image_id', 'l4_image_id'])
143 | def test_landsat_cloudless_portion(image_id: str, request: pytest.FixtureRequest):
144 | """Test `geedim` CLOUDLESS_PORTION for the whole image against related Landsat CLOUD_COVER property."""
145 | image_id: MaskedImage = request.getfixturevalue(image_id)
146 | masked_image = MaskedImage.from_id(image_id, mask_shadows=False, mask_cirrus=False)
147 | masked_image._set_region_stats()
148 |
149 | # landsat provided cloudless portion
150 | landsat_cloudless_portion = 100 - float(masked_image.properties['CLOUD_COVER'])
151 | assert masked_image.properties['CLOUDLESS_PORTION'] == pytest.approx(landsat_cloudless_portion, abs=5)
152 |
153 |
154 | @pytest.mark.parametrize('image_id', ['s2_toa_image_id', 's2_sr_image_id', 's2_toa_hm_image_id', 's2_sr_hm_image_id'])
155 | def test_s2_cloudless_portion(image_id: str, request: pytest.FixtureRequest):
156 | """Test `geedim` CLOUDLESS_PORTION for the whole image against CLOUDY_PIXEL_PERCENTAGE Sentinel-2 property."""
157 | # Note that CLOUDY_PIXEL_PERCENTAGE does not use Cloud Score+ data and does not include shadows, which Cloud Score+
158 | # does. So CLOUDLESS_PORTION (with cloud-score method) will only roughly match CLOUDY_PIXEL_PERCENTAGE.
159 | image_id: str = request.getfixturevalue(image_id)
160 | masked_image = MaskedImage.from_id(image_id, mask_method='cloud-score')
161 | masked_image._set_region_stats()
162 |
163 | # S2 provided cloudless portion
164 | s2_cloudless_portion = 100 - float(masked_image.properties['CLOUDY_PIXEL_PERCENTAGE'])
165 | assert masked_image.properties['CLOUDLESS_PORTION'] == pytest.approx(s2_cloudless_portion, abs=10)
166 |
167 |
168 | @pytest.mark.parametrize('image_id', ['l9_image_id'])
169 | def test_landsat_cloudmask_params(image_id: str, request: pytest.FixtureRequest):
170 | """Test Landsat cloud/shadow masking `mask_shadows` and `mask_cirrus` parameters."""
171 | image_id: str = request.getfixturevalue(image_id)
172 | masked_image = MaskedImage.from_id(image_id, mask_shadows=False, mask_cirrus=False)
173 | masked_image._set_region_stats()
174 | # cloud-free portion
175 | cloud_only_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
176 | masked_image = MaskedImage.from_id(image_id, mask_shadows=True, mask_cirrus=False)
177 | masked_image._set_region_stats()
178 | # cloud and shadow-free portion
179 | cloud_shadow_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
180 | masked_image = MaskedImage.from_id(image_id, mask_shadows=True, mask_cirrus=True)
181 | masked_image._set_region_stats()
182 | # cloud, cirrus and shadow-free portion
183 | cloudless_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
184 |
185 | # test `mask_shadows` and `mask_cirrus` affect CLOUDLESS_PORTION as expected
186 | assert cloud_only_portion > cloud_shadow_portion
187 | assert cloud_shadow_portion > cloudless_portion
188 |
189 |
190 | @pytest.mark.parametrize('image_id', ['s2_sr_hm_image_id', 's2_toa_hm_image_id'])
191 | def test_s2_cloudmask_mask_shadows(image_id: str, region_10000ha: Dict, request: pytest.FixtureRequest):
192 | """Test S2 cloud/shadow masking `mask_shadows` parameter."""
193 | image_id: str = request.getfixturevalue(image_id)
194 | masked_image = MaskedImage.from_id(image_id, mask_method='cloud-prob', mask_shadows=False)
195 | masked_image._set_region_stats(region_10000ha)
196 | # cloud-free portion
197 | cloud_only_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
198 | masked_image = MaskedImage.from_id(image_id, mask_method='cloud-prob', mask_shadows=True)
199 | masked_image._set_region_stats(region_10000ha)
200 | # cloud and shadow-free portion
201 | cloudless_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
202 | assert cloud_only_portion > cloudless_portion
203 |
204 |
205 | @pytest.mark.parametrize('image_id', ['s2_sr_hm_image_id', 's2_toa_hm_image_id'])
206 | def test_s2_cloudmask_prob(image_id: str, region_10000ha: Dict, request: pytest.FixtureRequest):
207 | """Test S2 cloud/shadow masking `prob` parameter with the `cloud-prob` method."""
208 | image_id: str = request.getfixturevalue(image_id)
209 | cl_portions = []
210 | for prob in [80, 40]:
211 | masked_image = MaskedImage.from_id(image_id, mask_shadows=True, prob=prob, mask_method='cloud-prob')
212 | masked_image._set_region_stats(region_10000ha)
213 | cl_portions.append(100 * masked_image.properties['CLOUDLESS_PORTION'])
214 | # test there is more cloud (less CLOUDLESS_PORTION) with prob=40 as compared to prob=80
215 | assert cl_portions[0] > cl_portions[1] > 0
216 |
217 |
218 | @pytest.mark.parametrize('image_id', ['s2_sr_hm_image_id', 's2_toa_hm_image_id'])
219 | def test_s2_cloudmask_score(image_id: str, region_10000ha: Dict, request: pytest.FixtureRequest):
220 | """Test S2 cloud/shadow masking `score` parameter with the `cloud-score` method."""
221 | image_id: str = request.getfixturevalue(image_id)
222 | cl_portions = []
223 | for score in [0.6, 0.3]:
224 | masked_image = MaskedImage.from_id(image_id, mask_shadows=True, score=score, mask_method='cloud-score')
225 | masked_image._set_region_stats(region_10000ha)
226 | cl_portions.append(100 * masked_image.properties['CLOUDLESS_PORTION'])
227 | # test there is more cloud (less CLOUDLESS_PORTION) with score=0.3 as compared to score=0.6
228 | assert cl_portions[0] < cl_portions[1] > 0
229 |
230 |
231 | @pytest.mark.parametrize('image_id', ['s2_sr_hm_image_id', 's2_toa_hm_image_id'])
232 | def test_s2_cloudmask_cs_band(image_id: str, region_10000ha: Dict, request: pytest.FixtureRequest):
233 | """Test S2 cloud/shadow masking `cs_band` parameter with the `cloud-score` method."""
234 | image_id: str = request.getfixturevalue(image_id)
235 | cl_portions = []
236 | for cs_band in CloudScoreBand:
237 | masked_image = MaskedImage.from_id(image_id, mask_method='cloud-score', cs_band=cs_band)
238 | masked_image._set_region_stats(region_10000ha)
239 | cl_portions.append(100 * masked_image.properties['CLOUDLESS_PORTION'])
240 |
241 | # test `cs_band` changes CLOUDLESS_PORTION but not by much
242 | assert len(set(cl_portions)) == len(cl_portions)
243 | assert all([cl_portions[0] != pytest.approx(cp, abs=10) for cp in cl_portions[1:]])
244 | assert all([cp != pytest.approx(0, abs=1) for cp in cl_portions])
245 |
246 |
247 | @pytest.mark.parametrize('image_id', ['s2_sr_hm_image_id', 's2_toa_hm_image_id'])
248 | def test_s2_cloudmask_method(image_id: str, region_10000ha: Dict, request: pytest.FixtureRequest):
249 | """Test S2 cloud/shadow masking `mask_method` parameter."""
250 | image_id: str = request.getfixturevalue(image_id)
251 | cl_portions = []
252 | for mask_method in CloudMaskMethod:
253 | masked_image = MaskedImage.from_id(image_id, mask_method=mask_method)
254 | masked_image._set_region_stats(region_10000ha)
255 | cl_portions.append(100 * masked_image.properties['CLOUDLESS_PORTION'])
256 |
257 | # test `mask_method` changes CLOUDLESS_PORTION but not by much
258 | assert len(set(cl_portions)) == len(cl_portions)
259 | assert all([cl_portions[0] != pytest.approx(cp, abs=10) for cp in cl_portions[1:]])
260 | assert all([cp != pytest.approx(0, abs=1) for cp in cl_portions])
261 |
262 |
263 | @pytest.mark.parametrize('image_id', ['s2_sr_hm_image_id', 's2_toa_hm_image_id'])
264 | def test_s2_cloudmask_mask_cirrus(image_id: str, region_10000ha: Dict, request: pytest.FixtureRequest):
265 | """Test S2 cloud/shadow masking `mask_cirrus` parameter with the `qa` method."""
266 | image_id: str = request.getfixturevalue(image_id)
267 | masked_image = MaskedImage.from_id(image_id, mask_method='qa', mask_cirrus=False)
268 | # cloud and shadow free portion
269 | masked_image._set_region_stats(region_10000ha)
270 | non_cirrus_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
271 | masked_image = MaskedImage.from_id(image_id, mask_method='qa', mask_cirrus=True)
272 | masked_image._set_region_stats(region_10000ha)
273 | # cloud, cirrus and shadow free portion
274 | cirrus_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
275 | assert non_cirrus_portion >= cirrus_portion
276 |
277 |
278 | @pytest.mark.parametrize('image_id', ['s2_sr_hm_image_id', 's2_toa_hm_image_id'])
279 | def test_s2_cloudmask_dark(image_id: str, region_10000ha: Dict, request: pytest.FixtureRequest):
280 | """Test S2 cloud/shadow masking `dark` parameter."""
281 | image_id: str = request.getfixturevalue(image_id)
282 | masked_image = MaskedImage.from_id(image_id, mask_method='cloud-prob', dark=0.5)
283 | masked_image._set_region_stats(region_10000ha)
284 | dark_pt5_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
285 | masked_image = MaskedImage.from_id(image_id, mask_method='cloud-prob', dark=0.1)
286 | masked_image._set_region_stats(region_10000ha)
287 | dark_pt1_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
288 | # test that increasing `dark` results in an increase in detected shadow and corresponding decrease in
289 | # CLOUDLESS_PORTION
290 | assert dark_pt1_portion > dark_pt5_portion
291 |
292 |
293 | @pytest.mark.parametrize('image_id', ['s2_sr_hm_image_id', 's2_toa_hm_image_id'])
294 | def test_s2_cloudmask_shadow_dist(image_id: str, region_10000ha: Dict, request: pytest.FixtureRequest):
295 | """Test S2 cloud/shadow masking `shadow_dist` parameter."""
296 | image_id: str = request.getfixturevalue(image_id)
297 | masked_image = MaskedImage.from_id(image_id, mask_method='cloud-prob', shadow_dist=200)
298 | masked_image._set_region_stats(region_10000ha)
299 | sd200_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
300 | masked_image = MaskedImage.from_id(image_id, mask_method='cloud-prob', shadow_dist=1000)
301 | masked_image._set_region_stats(region_10000ha)
302 | sd1000_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
303 | # test that increasing `shadow_dist` results in an increase in detected shadow and corresponding decrease in
304 | # CLOUDLESS_PORTION
305 | assert sd200_portion > sd1000_portion
306 |
307 |
308 | @pytest.mark.parametrize('image_id', ['s2_sr_hm_image_id', 's2_toa_hm_image_id'])
309 | def test_s2_cloudmask_cdi_thresh(image_id: str, region_10000ha: Dict, request: pytest.FixtureRequest):
310 | """Test S2 cloud/shadow masking `cdi_thresh` parameter."""
311 | image_id: str = request.getfixturevalue(image_id)
312 | masked_image = MaskedImage.from_id(image_id, mask_method='cloud-prob', cdi_thresh=0.5)
313 | masked_image._set_region_stats(region_10000ha)
314 | cdi_pt5_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
315 | masked_image = MaskedImage.from_id(image_id, mask_method='cloud-prob', cdi_thresh=-0.5)
316 | masked_image._set_region_stats(region_10000ha)
317 | cdi_negpt5_portion = 100 * masked_image.properties['CLOUDLESS_PORTION']
318 | # test that increasing `cdi_thresh` results in an increase in detected cloud and corresponding decrease in
319 | # CLOUDLESS_PORTION
320 | assert cdi_negpt5_portion > cdi_pt5_portion
321 |
322 |
323 | @pytest.mark.parametrize('image_id, max_cloud_dist', [('s2_sr_hm_image_id', 100), ('s2_sr_hm_image_id', 400)])
324 | def test_s2_cloud_dist_max(image_id: str, max_cloud_dist: int, region_10000ha: Dict, request: pytest.FixtureRequest):
325 | """Test S2 cloud distance `max_cloud_dist` parameter."""
326 |
327 | def get_max_cloud_dist(cloud_dist: ee.Image):
328 | """Get the maximum of `cloud_dist` over region_10000ha."""
329 | mcd = cloud_dist.reduceRegion(reducer='max', geometry=region_10000ha, bestEffort=True, maxPixels=1e4)
330 | return mcd.get('CLOUD_DIST').getInfo() * 10
331 |
332 | image_id: str = request.getfixturevalue(image_id)
333 | masked_image = MaskedImage.from_id(image_id, max_cloud_dist=max_cloud_dist, mask_method='cloud-score')
334 | cloud_dist = masked_image.ee_image.select('CLOUD_DIST')
335 | meas_max_cloud_dist = get_max_cloud_dist(cloud_dist)
336 | assert meas_max_cloud_dist == pytest.approx(max_cloud_dist, rel=0.1)
337 |
338 |
339 | @pytest.mark.parametrize(
340 | 'masked_image',
341 | ['s2_sr_hm_qa_zero_masked_image', 's2_sr_hm_nocp_masked_image', 's2_sr_hm_nocs_masked_image'],
342 | )
343 | def test_s2_region_stats_missing_data(masked_image: str, region_10000ha: dict, request: pytest.FixtureRequest):
344 | """Test S2 region stats for unmasked images missing required cloud data."""
345 | masked_image: MaskedImage = request.getfixturevalue(masked_image)
346 | masked_image._set_region_stats(region_10000ha, scale=60)
347 |
348 | assert masked_image.properties is not None
349 | assert masked_image.properties['CLOUDLESS_PORTION'] == pytest.approx(0, abs=1)
350 | assert masked_image.properties['FILL_PORTION'] == pytest.approx(100, abs=1)
351 |
352 |
353 | def _test_aux_stats(masked_image: MaskedImage, region: Dict):
354 | """Sanity tests on cloud/shadow etc. aux bands for a given MaskedImage and region."""
355 | # create a pan image for testing brightnesses
356 | ee_image = masked_image.ee_image
357 | pan = ee_image.select([0, 1, 2]).reduce(ee.Reducer.mean())
358 |
359 | # mask the pan image with cloud, shadow & cloudless masks
360 | cloud_mask = ee_image.select('CLOUD_MASK')
361 | shadow_mask = ee_image.select('SHADOW_MASK')
362 | cloudless_mask = ee_image.select('CLOUDLESS_MASK')
363 | pan_cloud = pan.updateMask(cloud_mask).rename('PAN_CLOUD')
364 | pan_shadow = pan.updateMask(shadow_mask).rename('PAN_SHADOW')
365 | pan_cloudless = pan.updateMask(cloudless_mask).rename('PAN_CLOUDLESS')
366 |
367 | # mask the cloud distance image with cloud & cloudless masks
368 | cdist = ee_image.select('CLOUD_DIST')
369 | cdist_cloud = cdist.updateMask(cloud_mask).rename('CDIST_CLOUD')
370 | cdist_cloudless = cdist.updateMask(cloudless_mask).rename('CDIST_CLOUDLESS')
371 |
372 | # find mean stats of all masked images, and min of cloud distance where it is >0
373 | stats_image = ee.Image([pan_cloud, pan_shadow, pan_cloudless, cdist_cloud, cdist_cloudless])
374 | proj = masked_image._ee_proj
375 | stats = stats_image.reduceRegion(
376 | reducer='mean', geometry=region, crs=proj, scale=proj.nominalScale(), bestEffort=True, maxPixels=1e8
377 | )
378 | cdist_min = cdist.updateMask(cdist).reduceRegion(
379 | reducer='min', geometry=region, crs=proj, scale=proj.nominalScale(), bestEffort=True, maxPixels=1e8
380 | )
381 | stats = stats.set('CDIST_MIN', cdist_min.get('CLOUD_DIST'))
382 | stats = stats.getInfo()
383 |
384 | # test cloud is brighter than cloudless
385 | assert stats['PAN_CLOUD'] > stats['PAN_CLOUDLESS']
386 | # test cloudless is brighter than shadow
387 | assert stats['PAN_CLOUDLESS'] > stats['PAN_SHADOW']
388 | # test cloudless areas have greater distance to cloud than cloudy areas
389 | assert stats['CDIST_CLOUDLESS'] > stats['CDIST_CLOUD']
390 | # test min distance to cloud is pixel size
391 | assert stats['CDIST_MIN'] * 10 == masked_image._ee_proj.nominalScale().getInfo()
392 |
393 |
394 | @pytest.mark.parametrize(
395 | 'masked_image', ['l9_masked_image', 'l8_masked_image', 'l7_masked_image', 'l5_masked_image', 'l4_masked_image']
396 | )
397 | def test_landsat_aux_bands(masked_image: str, region_10000ha: Dict, request: pytest.FixtureRequest):
398 | """Test Landsat auxiliary band values for sanity."""
399 | masked_image: MaskedImage = request.getfixturevalue(masked_image)
400 | _test_aux_stats(masked_image, region_10000ha)
401 |
402 |
403 | @pytest.mark.parametrize(
404 | 'image_id, mask_methods',
405 | [
406 | ('s2_sr_image_id', ['cloud-prob', 'qa']),
407 | ('s2_toa_image_id', ['cloud-prob', 'qa']),
408 | ('s2_sr_hm_image_id', ['cloud-prob', 'qa']),
409 | ('s2_toa_hm_image_id', ['cloud-prob', 'qa']),
410 | # missing QA60 so do cloud-prob method only
411 | ('s2_sr_hm_qa_zero_image_id', ['cloud-prob']),
412 | ],
413 | )
414 | def test_s2_aux_bands(image_id: str, mask_methods: Iterable, region_10000ha: Dict, request: pytest.FixtureRequest):
415 | """Test Sentinel-2 auxiliary band values for sanity with all masking methods."""
416 | image_id: str = request.getfixturevalue(image_id)
417 | for mask_method in mask_methods:
418 | masked_image = MaskedImage.from_id(image_id, mask_method=mask_method)
419 | _test_aux_stats(masked_image, region_10000ha)
420 |
421 |
422 | @pytest.mark.parametrize(
423 | 'masked_image',
424 | ['s2_sr_hm_nocp_masked_image', 's2_sr_hm_qa_zero_masked_image', 's2_sr_hm_nocs_masked_image'],
425 | )
426 | def test_s2_aux_bands_missing_data(masked_image: str, region_10000ha: Dict, request: pytest.FixtureRequest):
427 | """Test Sentinel-2 auxiliary band masking / transparency for unmasked images missing required cloud data."""
428 | masked_image: MaskedImage = request.getfixturevalue(masked_image)
429 |
430 | # get region sums of the auxiliary masks
431 | proj = masked_image._ee_proj
432 | aux_bands = masked_image.ee_image.select('.*MASK|CLOUD_DIST')
433 | aux_mask = aux_bands.mask()
434 | stats = aux_mask.reduceRegion(
435 | reducer='sum', geometry=region_10000ha, crs=proj, scale=proj.nominalScale(), bestEffort=True, maxPixels=1e8
436 | )
437 | stats = stats.getInfo()
438 |
439 | # test auxiliary masks are transparent
440 | assert stats['FILL_MASK'] > 0
441 | # s2_sr_hm_nocs_masked_image is missing CLOUD_MASK and SHADOW_MASK bands, so only include these when they exist
442 | band_names = ['CLOUDLESS_MASK', 'CLOUD_DIST'] + list({'CLOUD_MASK', 'SHADOW_MASK'}.intersection(stats.keys()))
443 | for band_name in band_names:
444 | assert stats[band_name] == 0, band_name
445 |
446 |
447 | @pytest.mark.parametrize(
448 | 'masked_image',
449 | [
450 | 'gedi_cth_masked_image',
451 | # use s2_sr_masked_image rather than s2_sr_hm_masked_image which complicates testing due to fully masked
452 | # MSK_CLASSI* bands
453 | 's2_sr_masked_image',
454 | 'l9_masked_image',
455 | ],
456 | )
457 | def test_mask_clouds(masked_image: str, region_100ha: Dict, tmp_path, request: pytest.FixtureRequest):
458 | """Test MaskedImage.mask_clouds() masks the fill or cloudless portion by downloading and examining dataset masks."""
459 | masked_image: MaskedImage = request.getfixturevalue(masked_image)
460 | filename = tmp_path.joinpath(f'test_image.tif')
461 | masked_image.mask_clouds()
462 | proj_scale = masked_image._ee_proj.nominalScale().getInfo()
463 | masked_image.download(filename, region=region_100ha, dtype='float32', scale=proj_scale)
464 | assert filename.exists()
465 |
466 | with rio.open(filename, 'r') as ds:
467 | mask_name = 'CLOUDLESS_MASK' if isinstance(masked_image, CloudMaskedImage) else 'FILL_MASK'
468 | mask = ds.read(ds.descriptions.index(mask_name) + 1, masked=True)
469 |
470 | # test areas outside CLOUDLESS_MASK / FILL_MASK are masked
471 | assert np.all(mask == 1)
472 |
473 | # test CLOUDLESS_MASK / FILL_MASK matches the nodata mask for each band
474 | mask = mask.filled(0).astype('bool')
475 | ds_masks = ds.read_masks().astype('bool')
476 | assert np.all(mask == ds_masks)
477 |
478 |
479 | @pytest.mark.parametrize(
480 | 'masked_image',
481 | ['s2_sr_hm_nocp_masked_image', 's2_sr_hm_qa_zero_masked_image', 's2_sr_hm_nocs_masked_image'],
482 | )
483 | def test_s2_mask_clouds_missing_data(masked_image: str, region_100ha: Dict, tmp_path, request: pytest.FixtureRequest):
484 | """Test Sentinel2SrClImage.mask_clouds() masks the entire image when it is missing required cloud data. Downloads
485 | and examines dataset masks.
486 | """
487 | masked_image: MaskedImage = request.getfixturevalue(masked_image)
488 | filename = tmp_path.joinpath(f'test_image.tif')
489 | masked_image.mask_clouds()
490 | proj_scale = masked_image._ee_proj.nominalScale().getInfo()
491 | masked_image.download(filename, region=region_100ha, dtype='float32', scale=proj_scale)
492 | assert filename.exists()
493 |
494 | # test all data is masked / nodata
495 | with rio.open(filename, 'r') as ds:
496 | ds_masks = ds.read_masks().astype('bool')
497 | assert not np.any(ds_masks)
498 |
499 |
500 | def test_skysat_region_stats():
501 | """Test _set_region_stats() works on SKYSAT image with no region."""
502 | ee_image = ee.Image('SKYSAT/GEN-A/PUBLIC/ORTHO/RGB/s02_20141004T074858Z')
503 | masked_image = MaskedImage(ee_image)
504 | masked_image._set_region_stats()
505 | assert 'FILL_PORTION' in masked_image.properties
506 | assert masked_image.properties['FILL_PORTION'] > 0.8
507 |
--------------------------------------------------------------------------------
/tests/test_stac.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | import re
18 |
19 | import pytest
20 |
21 | from geedim.stac import StacCatalog
22 | from geedim.utils import split_id
23 |
24 |
25 | @pytest.fixture(scope='session')
26 | def stac_catalog() -> StacCatalog:
27 | """The StacCatalog instance."""
28 | return StacCatalog()
29 |
30 |
31 | def test_singleton(landsat_ndvi_image_id: str):
32 | """Test StacCatalog is a singleton."""
33 | coll_name, _ = split_id(landsat_ndvi_image_id)
34 | stac1 = StacCatalog()
35 | stac2 = StacCatalog()
36 | _ = stac1.url_dict
37 | assert len(stac2._url_dict) > 0
38 | _ = stac1.get_item(coll_name)
39 | assert coll_name in stac2._cache
40 |
41 |
42 | def test_traverse_stac(stac_catalog: StacCatalog):
43 | """Test _traverse_stac() on the root of the COPERNICUS subtree."""
44 | url_dict = {}
45 | url_dict = stac_catalog._traverse_stac(
46 | 'https://storage.googleapis.com/earthengine-stac/catalog/COPERNICUS/catalog.json', url_dict
47 | )
48 | assert len(url_dict) > 0
49 | assert 'COPERNICUS/S2_SR_HARMONIZED' in url_dict
50 |
51 |
52 | @pytest.mark.parametrize(
53 | 'image_id',
54 | [
55 | 'l4_image_id',
56 | 'l5_image_id',
57 | 'l7_image_id',
58 | 'l8_image_id',
59 | 'l9_image_id',
60 | 'landsat_ndvi_image_id',
61 | 's2_sr_hm_image_id',
62 | 's2_toa_hm_image_id',
63 | 's1_sar_image_id',
64 | 'modis_nbar_image_id',
65 | 'gedi_cth_image_id',
66 | ],
67 | )
68 | def test_known_get_item(image_id: str, stac_catalog: StacCatalog, request: pytest.FixtureRequest):
69 | """Test that stac_catalog contains expected 'items'."""
70 | image_id = request.getfixturevalue(image_id)
71 | coll_name, _ = split_id(image_id)
72 | assert coll_name in stac_catalog.url_dict
73 | stac_item = stac_catalog.get_item(coll_name)
74 | assert stac_item is not None
75 |
76 |
77 | def test_unknown_get_item(stac_catalog: StacCatalog):
78 | """Test that stac_catalog returns None for unknown entries."""
79 | assert stac_catalog.get_item_dict('unknown') is None
80 | assert stac_catalog.get_item('unknown') is None
81 |
82 |
83 | @pytest.mark.parametrize(
84 | 'image_id',
85 | [
86 | 'l4_image_id',
87 | 'l5_image_id',
88 | 'l7_image_id',
89 | 'l8_image_id',
90 | 'l9_image_id',
91 | 's2_sr_hm_image_id',
92 | 's2_toa_hm_image_id',
93 | 's2_sr_hm_image_id',
94 | 's2_toa_hm_image_id',
95 | 'modis_nbar_image_id',
96 | ],
97 | )
98 | def test_refl_stac_item(image_id: str, stac_catalog: StacCatalog, request: pytest.FixtureRequest):
99 | """Test reflectance collectionStacItem properties are as expected."""
100 | image_id = request.getfixturevalue(image_id)
101 | coll_name, _ = split_id(image_id)
102 | stac_item = stac_catalog.get_item(coll_name)
103 | assert stac_item is not None
104 | if coll_name:
105 | assert stac_item.name == coll_name
106 | assert len(stac_item.license) > 0
107 | assert stac_item.band_props is not None
108 | for key in ['gsd', 'description']:
109 | has_key = [key in bd for bd in stac_item.band_props.values()]
110 | assert all(has_key)
111 | has_center_wavelength = ['center_wavelength' in bd for bd in stac_item.band_props.values()]
112 | assert sum(has_center_wavelength) >= 7
113 | for band_dict in stac_item.band_props.values():
114 | if re.search(r'^B\d|^SR_B\d|^Nadir_Reflectance_Band\d', band_dict['name']):
115 | assert 'center_wavelength' in band_dict
116 | assert 'scale' in band_dict
117 |
118 |
119 | @pytest.mark.parametrize('image_id', ['l4_image_id', 's2_sr_hm_image_id', 's1_sar_image_id'])
120 | def test_stac_item_descriptions(image_id: str, stac_catalog: StacCatalog, request: pytest.FixtureRequest):
121 | """Test StacItem.descriptions."""
122 | image_id = request.getfixturevalue(image_id)
123 | coll_name, _ = split_id(image_id)
124 | stac_item = stac_catalog.get_item(coll_name)
125 | assert stac_item is not None
126 | assert stac_item.descriptions is not None
127 | assert len(stac_item.descriptions) > 0
128 | assert len(list(stac_item.descriptions.values())[0]) > 0
129 |
--------------------------------------------------------------------------------
/tests/test_tile.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | import io
18 | import json
19 | import logging
20 | from collections import namedtuple
21 |
22 | import ee
23 | import numpy as np
24 | import pytest
25 | import rasterio as rio
26 | import requests
27 | from rasterio import Affine
28 | from rasterio.windows import Window
29 | from tqdm.auto import tqdm
30 |
31 | from geedim.errors import TileError
32 | from geedim.tile import Tile
33 | from geedim.utils import retry_session
34 |
35 | BaseImageLike = namedtuple('BaseImageLike', ['ee_image', 'crs', 'transform', 'shape', 'count', 'dtype'])
36 |
37 |
38 | @pytest.fixture(scope='module')
39 | def mock_base_image(region_25ha: dict) -> BaseImageLike:
40 | """A BaseImage mock containing a synthetic ee.Image."""
41 | ee_image = ee.Image([1, 2, 3]).reproject(crs='EPSG:4326', scale=30).clip(region_25ha)
42 | ee_info = ee_image.getInfo()
43 | band_info = ee_info['bands'][0]
44 | transform = Affine(*band_info['crs_transform']) * Affine.translation(*band_info['origin'])
45 | return BaseImageLike(ee_image, 'EPSG:3857', transform, tuple(band_info['dimensions'][::-1]), 3, 'uint8')
46 |
47 |
48 | @pytest.fixture(scope='module')
49 | def synth_tile(mock_base_image: BaseImageLike) -> Tile:
50 | """A tile representing the whole of ``mock_base_image``."""
51 | window = Window(0, 0, *mock_base_image.shape[::-1])
52 | return Tile(mock_base_image, window)
53 |
54 |
55 | @pytest.fixture(scope='function')
56 | def mock_ee_image(monkeypatch: pytest.MonkeyPatch) -> None:
57 | """Patch ee.Image.getDownloadURL() to return None."""
58 |
59 | def getDownloadURL(*args, **kwargs):
60 | return None
61 |
62 | monkeypatch.setattr(ee.Image, 'getDownloadURL', getDownloadURL)
63 |
64 |
65 | @pytest.fixture(scope='module')
66 | def gtiff_bytes(mock_base_image: BaseImageLike) -> bytes:
67 | """GeoTIFF bytes for ``mock_base_image``."""
68 | array = np.ones((mock_base_image.count, *mock_base_image.shape)) * np.array([1, 2, 3]).reshape(-1, 1, 1)
69 |
70 | buf = io.BytesIO()
71 | with rio.open(
72 | buf,
73 | 'w',
74 | **rio.default_gtiff_profile,
75 | width=mock_base_image.shape[1],
76 | height=mock_base_image.shape[0],
77 | count=mock_base_image.count,
78 | ) as ds:
79 | ds.write(array)
80 |
81 | buf.seek(0)
82 | return buf.read()
83 |
84 |
85 | def test_create(mock_base_image: BaseImageLike):
86 | """Test creation of a Tile object."""
87 | window = Window(0, 0, *mock_base_image.shape[::-1])
88 | tile = Tile(mock_base_image, window)
89 | assert tile.window == window
90 | assert tile._transform == mock_base_image.transform
91 | assert tile._shape == mock_base_image.shape
92 |
93 |
94 | @pytest.mark.parametrize('session', [None, retry_session()])
95 | def test_download(synth_tile: Tile, session):
96 | """Test downloading the synthetic image tile."""
97 | dtype_size = np.dtype(synth_tile._exp_image.dtype).itemsize
98 | raw_download_size = synth_tile._shape[0] * synth_tile._shape[1] * synth_tile._exp_image.count * dtype_size
99 | bar = tqdm(total=float(raw_download_size))
100 | array = synth_tile.download(session=session, bar=bar)
101 |
102 | assert array is not None
103 | assert array.shape == (synth_tile._exp_image.count, *synth_tile._exp_image.shape)
104 | assert array.dtype == np.dtype(synth_tile._exp_image.dtype)
105 | for i in range(array.shape[0]):
106 | assert np.all(array[i] == i + 1)
107 | assert bar.n == pytest.approx(raw_download_size, rel=0.01)
108 |
109 |
110 | def test_mem_limit_error(synth_tile: Tile, mock_ee_image: None):
111 | """Test downloading raises the 'user memory limit exceeded' error with a mock response."""
112 | # patch session.get() to return a mock response with EE memory limit error
113 | session = retry_session()
114 | msg = 'User memory limit exceeded.'
115 |
116 | def get(url, **kwargs):
117 | response = requests.Response()
118 | response.status_code = 400
119 | response.headers = {'content-length': '1'}
120 | response._content = json.dumps({'error': {'message': msg}}).encode()
121 | return response
122 |
123 | session.get = get
124 |
125 | # test memory limit error is raised on download
126 | with pytest.raises(TileError) as ex:
127 | synth_tile.download(session=session)
128 | assert msg in str(ex.value)
129 |
130 |
131 | def test_retry(synth_tile: Tile, mock_ee_image: None, gtiff_bytes: bytes, caplog: pytest.LogCaptureFixture):
132 | """Test downloading retries invalid tiles until it succeeds."""
133 | # create progress bar
134 | dtype_size = np.dtype(synth_tile._exp_image.dtype).itemsize
135 | raw_download_size = synth_tile._shape[0] * synth_tile._shape[1] * synth_tile._exp_image.count * dtype_size
136 | bar = tqdm(total=float(raw_download_size))
137 |
138 | # create mock invalid responses for each retry
139 | responses = []
140 | for _ in range(5):
141 | response = requests.Response()
142 | response.status_code = 200
143 | response.headers = {'content-length': str(len(gtiff_bytes))}
144 | response.raw = io.BytesIO(b'error')
145 | responses.append(response)
146 |
147 | # make the last response valid
148 | responses[-1].raw = io.BytesIO(gtiff_bytes)
149 |
150 | # patch session.get() to pop and return a mocked response from the list
151 | session = retry_session()
152 |
153 | def get(url, **kwargs):
154 | return responses.pop(0)
155 |
156 | session.get = get
157 |
158 | # test the tile is downloaded correctly, after retries
159 | with caplog.at_level(logging.WARNING):
160 | array = synth_tile.download(session=session, bar=bar, backoff_factor=0)
161 |
162 | assert array.shape == (synth_tile._exp_image.count, *synth_tile._exp_image.shape)
163 | assert array.dtype == np.dtype(synth_tile._exp_image.dtype)
164 | for i in range(array.shape[0]):
165 | assert np.all(array[i] == i + 1)
166 |
167 | # test progress bar is adjusted for retries
168 | assert bar.n == pytest.approx(raw_download_size, rel=0.01)
169 |
170 | # test retry logs
171 | assert 'retry' in caplog.text and 'not recognized' in caplog.text
172 |
173 |
174 | def test_retry_error(synth_tile: Tile, mock_ee_image: None, gtiff_bytes: bytes):
175 | """Test downloading raises an error when the maximum retries are reached."""
176 | # create progress bar
177 | dtype_size = np.dtype(synth_tile._exp_image.dtype).itemsize
178 | raw_download_size = synth_tile._shape[0] * synth_tile._shape[1] * synth_tile._exp_image.count * dtype_size
179 | bar = tqdm(total=float(raw_download_size))
180 |
181 | # patch session.get() to return a mock response with invalid bytes
182 | session = retry_session()
183 |
184 | def get(url, **kwargs):
185 | response = requests.Response()
186 | response.status_code = 200
187 | response.headers = {'content-length': '10'}
188 | response.raw = io.BytesIO(b'error')
189 | return response
190 |
191 | session.get = get
192 |
193 | # test max retries error is raised on download
194 | with pytest.raises(TileError) as ex:
195 | synth_tile.download(session=session, bar=bar, backoff_factor=0)
196 | assert 'maximum retries' in str(ex.value)
197 |
198 | # test progress bar is adjusted for retries
199 | assert bar.n == pytest.approx(0)
200 |
--------------------------------------------------------------------------------
/tests/test_utils.py:
--------------------------------------------------------------------------------
1 | """
2 | Copyright 2021 Dugal Harris - dugalh@gmail.com
3 |
4 | Licensed under the Apache License, Version 2.0 (the "License");
5 | you may not use this file except in compliance with the License.
6 | You may obtain a copy of the License at
7 |
8 | http://www.apache.org/licenses/LICENSE-2.0
9 |
10 | Unless required by applicable law or agreed to in writing, software
11 | distributed under the License is distributed on an "AS IS" BASIS,
12 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13 | See the License for the specific language governing permissions and
14 | limitations under the License.
15 | """
16 |
17 | import time
18 | from typing import Dict
19 |
20 | import ee
21 | import pytest
22 | from rasterio.features import bounds
23 |
24 | from geedim import MaskedImage
25 | from geedim.enums import ResamplingMethod
26 | from geedim.utils import asset_id, get_bounds, get_projection, resample, Spinner, split_id
27 |
28 |
29 | @pytest.mark.parametrize('id, exp_split', [('A/B/C', ('A/B', 'C')), ('ABC', ('', 'ABC')), (None, (None, None))])
30 | def test_split_id(id, exp_split):
31 | """Test split_id()."""
32 | assert split_id(id) == exp_split
33 |
34 |
35 | def test_get_bounds(const_image_25ha_file, region_25ha):
36 | """Test get_bounds()."""
37 | raster_bounds = bounds(get_bounds(const_image_25ha_file, expand=0))
38 | test_bounds = bounds(region_25ha)
39 | assert raster_bounds == pytest.approx(test_bounds, abs=0.001)
40 |
41 |
42 | def test_get_projection(s2_sr_masked_image):
43 | """Test get_projection()."""
44 | min_proj = get_projection(s2_sr_masked_image.ee_image, min_scale=True)
45 | min_crs = min_proj.crs().getInfo()
46 | min_scale = min_proj.nominalScale().getInfo()
47 | max_proj = get_projection(s2_sr_masked_image.ee_image, min_scale=False)
48 | max_crs = max_proj.crs().getInfo()
49 | max_scale = max_proj.nominalScale().getInfo()
50 |
51 | assert min_crs.startswith('EPSG:')
52 | assert min_crs == max_crs
53 | assert max_scale == 60
54 | assert min_scale == 10
55 |
56 |
57 | def test_spinner():
58 | """Test Spinner class."""
59 | spinner = Spinner(label='test', interval=0.1)
60 | assert not spinner.is_alive()
61 | with spinner:
62 | assert spinner._run
63 | assert spinner.is_alive()
64 | time.sleep(0.5)
65 | assert not spinner._run
66 | assert not spinner.is_alive()
67 |
68 |
69 | @pytest.mark.parametrize(
70 | 'image_id, method, scale',
71 | [
72 | ('l9_image_id', ResamplingMethod.bilinear, 15),
73 | ('s2_sr_hm_image_id', ResamplingMethod.average, 25),
74 | ('modis_nbar_image_id', ResamplingMethod.bicubic, 100),
75 | ],
76 | )
77 | def test_resample_fixed(
78 | image_id: str, method: ResamplingMethod, scale: float, region_100ha: Dict, request: pytest.FixtureRequest
79 | ):
80 | """Test that resample() smooths images with a fixed projection."""
81 | image_id = request.getfixturevalue(image_id)
82 | source_im = ee.Image(image_id)
83 | resampled_im = resample(source_im, method)
84 |
85 | # find mean of std deviations of bands for each image
86 | crs = source_im.select(0).projection().crs()
87 | stds = []
88 | for im in [source_im, resampled_im]:
89 | im = im.reproject(crs=crs, scale=scale) # required to resample at scale
90 | std = im.reduceRegion('stdDev', geometry=region_100ha).values().reduce('mean')
91 | stds.append(std)
92 | stds = ee.List(stds).getInfo()
93 |
94 | # test resampled_im is smoother than source_im
95 | assert stds[1] < stds[0]
96 |
97 |
98 | @pytest.mark.parametrize(
99 | 'masked_image, method, scale',
100 | [
101 | ('user_masked_image', ResamplingMethod.bilinear, 50),
102 | ('landsat_ndvi_masked_image', ResamplingMethod.average, 50),
103 | ],
104 | )
105 | def test_resample_comp(
106 | masked_image: str, method: ResamplingMethod, scale: float, region_100ha: Dict, request: pytest.FixtureRequest
107 | ):
108 | """Test that resample() leaves composite images unaltered."""
109 | masked_image: MaskedImage = request.getfixturevalue(masked_image)
110 | source_im = masked_image.ee_image
111 | resampled_im = resample(source_im, method)
112 |
113 | # find mean of std deviations of bands for each image
114 | crs = source_im.select(0).projection().crs()
115 | stds = []
116 | for im in [source_im, resampled_im]:
117 | im = im.reproject(crs=crs, scale=scale) # required to resample at scale
118 | std = im.reduceRegion('stdDev', geometry=region_100ha).values().reduce('mean')
119 | stds.append(std)
120 | stds = ee.List(stds).getInfo()
121 |
122 | # test no change between resampled_im and source_im
123 | assert stds[1] == stds[0]
124 |
125 |
126 | @pytest.mark.parametrize(
127 | 'filename, folder, exp_id',
128 | [
129 | ('file', 'folder', 'projects/folder/assets/file'),
130 | ('fp1/fp2/fp3', 'folder', 'projects/folder/assets/fp1-fp2-fp3'),
131 | ('file', 'folder/sub-folder', 'projects/folder/assets/sub-folder/file'),
132 | ('file', None, 'file'),
133 | ('projects/folder/assets/file', None, 'projects/folder/assets/file'),
134 | ],
135 | )
136 | def test_asset_id(filename: str, folder: str, exp_id: str):
137 | """Test asset_id() works as expected."""
138 | id = asset_id(filename, folder)
139 | assert id == exp_id
140 |
--------------------------------------------------------------------------------