AROSICS is a python package to perform automatic subpixel co-registration of two satellite image datasets based on an image matching approach working in the frequency domain, combined with a multistage workflow for effective detection of false-positives.
\n\n
It detects and corrects local as well as global misregistrations between two input images in the subpixel scale, that are often present in satellite imagery. The algorithm is robust against the typical difficulties of multi-sensoral / multi-temporal images. Clouds are automatically handled by the implemented outlier detection algorithms. The user may provide user-defined masks to exclude certain image areas from tie point creation. The image overlap area is automatically detected. AROSICS supports a wide range of input data formats and can be used from the command line (without any Python experience) or as a normal Python package.
",
4 | "license": "Apache-2.0",
5 | "upload_type": "software",
6 | "keywords": [
7 | "AROSICS",
8 | "image co-registration",
9 | "geometric pre-processing",
10 | "remote sensing",
11 | "sensor fusion"
12 | ],
13 | "creators": [
14 | {
15 | "name": "Scheffler, Daniel",
16 | "affiliation": "Helmholtz Centre Potsdam German Research Centre for Geosciences GFZ, Section 1.4 - Remote Sensing and Geoinformatics",
17 | "orcid": "0000-0003-4106-8928"
18 | }
19 | ],
20 | "references": [
21 | "Scheffler, D.; Hollstein, A.; Diedrich, H.; Segl, K.; Hostert, P. AROSICS: An Automated and Robust Open-Source Image Co-Registration Software for Multi-Sensor Satellite Data. Remote Sens. 2017, 9, 676. doi:https://doi.org/10.3390/rs9070676"
22 | ],
23 | "related_identifiers": [
24 | {
25 | "scheme": "url",
26 | "identifier": "https://git.gfz-potsdam.de/danschef/arosics",
27 | "relation": "isSupplementTo",
28 | "resource_type": "software"
29 | },
30 | {
31 | "scheme": "url",
32 | "identifier": "https://danschef.git-pages.gfz-potsdam.de/arosics/doc",
33 | "relation": "isDocumentedBy",
34 | "resource_type": "publication-softwaredocumentation"
35 | },
36 | {
37 | "scheme": "url",
38 | "identifier": "https://www.mdpi.com/2072-4292/9/7/676",
39 | "relation": "isCitedBy",
40 | "resource_type": "publication-article"
41 | }
42 | ],
43 | "grants": [
44 | {
45 | "code": "01 IS 14 010 A-C",
46 | "funder": "German Federal Ministry of Education and Research",
47 | "title": "GeoMultiSens"
48 | }
49 | ]
50 | }
51 |
--------------------------------------------------------------------------------
/AUTHORS.rst:
--------------------------------------------------------------------------------
1 | =======
2 | Credits
3 | =======
4 |
5 | Development Lead
6 | ----------------
7 |
8 | * Daniel Scheffler
9 |
10 | Contributors
11 | ------------
12 |
13 | None yet. Why not be the first?
14 |
--------------------------------------------------------------------------------
/CITATION:
--------------------------------------------------------------------------------
1 | You are welcome to use and modify the `arosics` project.
2 | See the README.rst and LICENSE files for details.
3 |
4 | If you use this software for research we would appreciate appropriate citation.
5 | This may be prepared using the bibliographic metadata contained in our DOI, accessible through the DOI system and at
6 | https://doi.org/10.5281/zenodo.3742909
7 |
8 | To cite the `arosics` Python package in your publication, please use (modify the version number if needed):
9 |
10 | Daniel Scheffler. (2017, July 3). AROSICS: An Automated and Robust Open-Source Image Co-Registration Software for Multi-Sensor Satellite Data (Version 0.2.1). Zenodo. https://doi.org/10.5281/zenodo.3742909
11 |
12 | This may need modification for the citation style of your publication.
13 | You are encouraged to include the version number of the software.
14 |
15 | A BibTeX entry for LaTeX users should look like this
16 | (find the one for the latest version here: https://zenodo.org/record/3743085/export/hx):
17 |
18 | @software{daniel_scheffler_2017_3743085,
19 | author = {Daniel Scheffler},
20 | title = {{AROSICS: An Automated and Robust Open-Source Image
21 | Co-Registration Software for Multi-Sensor
22 | Satellite Data}},
23 | month = jul,
24 | year = 2017,
25 | note = {{This is the version as used in Scheffler et al.
26 | (2017): https://www.mdpi.com/2072-4292/9/7/676.}},
27 | publisher = {Zenodo},
28 | version = {0.2.1},
29 | doi = {10.5281/zenodo.3743085},
30 | url = {https://doi.org/10.5281/zenodo.3743085}
31 | }
32 |
33 | For information on citing software products generally, see the
34 | FORCE11 document [*].
35 |
36 |
37 | [*] FORCE11 Software Citation Working Group (2016), "Software Citation Principles", (Editors: Arfon M. Smith, Daniel S. Katz, Kyle E. Niemeyer).
38 | Accessed 2017-08-08 at https://www.force11.org/sites/default/files/shared-documents/software-citation-principles.pdf
39 |
--------------------------------------------------------------------------------
/CONTRIBUTING.rst:
--------------------------------------------------------------------------------
1 | .. highlight:: shell
2 |
3 | ============
4 | Contributing
5 | ============
6 |
7 | Contributions are welcome, and they are greatly appreciated! Every
8 | little bit helps, and credit will always be given.
9 |
10 | You can contribute in many ways:
11 |
12 | Types of Contributions
13 | ----------------------
14 |
15 | Report Bugs
16 | ~~~~~~~~~~~
17 |
18 | Report bugs at https://git.gfz-potsdam.de/danschef/arosics/issues
19 |
20 | If you are reporting a bug, please include:
21 |
22 | * Your operating system name and version.
23 | * Any details about your local setup that might be helpful in troubleshooting.
24 | * Detailed steps to reproduce the bug.
25 |
26 | Fix Bugs
27 | ~~~~~~~~
28 |
29 | Look through the GitHub issues for bugs. Anything tagged with "bug"
30 | and "help wanted" is open to whoever wants to implement it.
31 |
32 | Implement Features
33 | ~~~~~~~~~~~~~~~~~~
34 |
35 | Look through the GitHub issues for features. Anything tagged with "enhancement"
36 | and "help wanted" is open to whoever wants to implement it.
37 |
38 | Write Documentation
39 | ~~~~~~~~~~~~~~~~~~~
40 |
41 | arosics could always use more documentation, whether as part of the
42 | official arosics docs, in docstrings, or even on the web in blog posts,
43 | articles, and such.
44 |
45 | Submit Feedback
46 | ~~~~~~~~~~~~~~~
47 |
48 | The best way to send feedback is to file an issue at https://git.gfz-potsdam.de/danschef/arosics/issues.
49 |
50 | If you are proposing a feature:
51 |
52 | * Explain in detail how it would work.
53 | * Keep the scope as narrow as possible, to make it easier to implement.
54 | * Remember that this is a volunteer-driven project, and that contributions
55 | are welcome :)
56 |
57 | You may also join our chat here: |Gitter|
58 |
59 | .. |Gitter| image:: https://badges.gitter.im/Join%20Chat.svg
60 | :target: https://gitter.im/arosics/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link
61 | :alt: https://gitter.im/arosics/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link
62 |
63 | Get Started!
64 | ------------
65 |
66 | Ready to contribute? Here's how to set up `arosics` for local development.
67 |
68 | #. Fork the `arosics` repo on GitHub.
69 |
70 | #. Clone your fork locally:
71 |
72 | .. code-block:: bash
73 |
74 | $ git clone https://git.gfz-potsdam.de/danschef/arosics.git
75 |
76 | #. Install your local copy into a virtualenv. Assuming you have virtualenvwrapper installed,
77 | this is how you set up your fork for local development:
78 |
79 | .. code-block:: bash
80 |
81 | $ mkvirtualenv arosics
82 | $ cd arosics/
83 | $ python setup.py develop
84 |
85 | #. Create a branch for local development:
86 |
87 | .. code-block:: bash
88 |
89 | $ git checkout -b name-of-your-bugfix-or-feature
90 |
91 | Now you can make your changes locally.
92 |
93 | #. When you're done making changes, check that your changes pass flake8 and the tests,
94 | including testing other Python versions with tox:
95 |
96 | .. code-block:: bash
97 |
98 | $ flake8 arosics tests
99 | $ python -m unittest discover
100 | $ tox
101 |
102 | To get flake8 and tox, just pip install them into your virtualenv.
103 |
104 | #. Commit your changes and push your branch to GitHub:
105 |
106 | .. code-block:: bash
107 |
108 | $ git add .
109 | $ git commit -m "Your detailed description of your changes."
110 | $ git push origin name-of-your-bugfix-or-feature
111 |
112 | #. Submit a pull request through the GitHub website.
113 |
114 | Pull Request Guidelines
115 | -----------------------
116 |
117 | Before you submit a pull request, check that it meets these guidelines:
118 |
119 | 1. The pull request should include tests.
120 | 2. If the pull request adds functionality, the docs should be updated. Put
121 | your new functionality into a function with a docstring, and add the
122 | feature to the list in README.rst.
123 | 3. The pull request should work for Python 3.6+, and for PyPy. Check
124 | https://travis-ci.org/danschef/arosics/pull_requests
125 | and make sure that the tests pass for all supported Python versions.
126 |
127 | Tips
128 | ----
129 |
130 | To run a subset of tests:
131 |
132 | .. code-block:: bash
133 |
134 | # e.g., to test if the COREG class can be properly initialized:
135 | $ python -m unittest tests.test_COREG.COREG_GLOBAL_init
136 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | https://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include AUTHORS.rst
2 | include CONTRIBUTING.rst
3 | include HISTORY.rst
4 | include LICENSE
5 | include README.rst
6 |
7 | exclude .coveragerc
8 | exclude .editorconfig
9 | exclude .gitignore
10 | exclude .gitlab-ci.yml
11 | exclude MANIFEST.in
12 |
13 | recursive-exclude .github *
14 | recursive-exclude docs *
15 | recursive-exclude tests *
16 | recursive-exclude * __pycache__
17 | recursive-exclude * *.py[co]
18 |
19 | recursive-include *.rst conf.py Makefile make.bat *.jpg *.png *.gif
20 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | .PHONY: clean clean-test clean-pyc clean-build docs help pytest
2 | .DEFAULT_GOAL := help
3 | define BROWSER_PYSCRIPT
4 | import os, webbrowser, sys
5 | try:
6 | from urllib import pathname2url
7 | except:
8 | from urllib.request import pathname2url
9 |
10 | webbrowser.open("file://" + pathname2url(os.path.abspath(sys.argv[1])))
11 | endef
12 | export BROWSER_PYSCRIPT
13 |
14 | define PRINT_HELP_PYSCRIPT
15 | import re, sys
16 |
17 | for line in sys.stdin:
18 | match = re.match(r'^([a-zA-Z_-]+):.*?## (.*)$$', line)
19 | if match:
20 | target, help = match.groups()
21 | print("%-20s %s" % (target, help))
22 | endef
23 | export PRINT_HELP_PYSCRIPT
24 | BROWSER := python -c "$$BROWSER_PYSCRIPT"
25 |
26 | help:
27 | @python -c "$$PRINT_HELP_PYSCRIPT" < $(MAKEFILE_LIST)
28 |
29 | clean: clean-build clean-pyc clean-test ## remove all build, test, coverage and Python artifacts
30 |
31 |
32 | clean-build: ## remove build artifacts
33 | rm -fr build/
34 | rm -fr dist/
35 | rm -fr .eggs/
36 | find . -name '*.egg-info' -exec rm -fr {} +
37 | find . -name '*.egg' -exec rm -f {} +
38 |
39 | clean-pyc: ## remove Python file artifacts
40 | find . -name '*.pyc' -exec rm -f {} +
41 | find . -name '*.pyo' -exec rm -f {} +
42 | find . -name '*~' -exec rm -f {} +
43 | find . -name '__pycache__' -exec rm -fr {} +
44 |
45 | clean-test: ## remove test and coverage artifacts
46 | ## don't call coverage erase here because make install calls make clean which calls make clean-test
47 | ## -> since make install should run without the test requirements we can't use coverage erase here
48 | rm -fr .tox/
49 | rm -f .coverage
50 | rm -fr .coverage.*
51 | rm -fr htmlcov/
52 | rm -fr report.html
53 | rm -fr report.xml
54 | rm -fr coverage.xml
55 | rm -fr .pytest_cache
56 |
57 |
58 | lint: ## check style with flake8
59 | flake8 --max-line-length=120 . tests > ./tests/linting/flake8.log || \
60 | (cat ./tests/linting/flake8.log && exit 1)
61 | pycodestyle . --exclude="*.ipynb,*.ipynb*" --max-line-length=120 > ./tests/linting/pycodestyle.log || \
62 | (cat ./tests/linting/pycodestyle.log && exit 1)
63 | -pydocstyle . > ./tests/linting/pydocstyle.log || \
64 | (cat ./tests/linting/pydocstyle.log && exit 1)
65 |
66 | urlcheck: ## check for dead URLs
67 | urlchecker check . --file-types .py,.rst,.md,.json --timeout 60
68 |
69 | test: ## run tests quickly with the default Python
70 | python setup.py test
71 |
72 | test-all: ## run tests on every Python version with tox
73 | tox
74 |
75 | coverage: ## check code coverage quickly with the default Python
76 | coverage erase
77 | coverage run --source arosics setup.py test
78 | coverage combine # must be called in order to make coverage work in multiprocessing
79 | coverage report -m
80 | coverage html
81 | #$(BROWSER) htmlcov/index.html
82 |
83 | pytest: clean-test ## Runs pytest with coverage and creates coverage and test report
84 | ## - puts the coverage results in the folder 'htmlcov'
85 | ## - generates cobertura 'coverage.xml' (needed to show coverage in GitLab MR changes)
86 | ## - generates 'report.html' based on pytest-reporter-html1
87 | ## - generates JUnit 'report.xml' to show the test report as a new tab in a GitLab MR
88 | ## NOTE: additional options pytest and coverage (plugin pytest-cov) are defined in .pytest.ini and .coveragerc
89 | pytest tests \
90 | --verbosity=3 \
91 | --color=yes \
92 | --tb=short \
93 | --cov=arosics \
94 | --cov-report html:htmlcov \
95 | --cov-report term-missing \
96 | --cov-report xml:coverage.xml \
97 | --template=html1/index.html --report=report.html \
98 | --junitxml report.xml
99 |
100 | docs: ## generate Sphinx HTML documentation, including API docs
101 | rm -f docs/arosics.rst
102 | rm -f docs/modules.rst
103 | sphinx-apidoc arosics -o docs/ --private --doc-project 'Python API reference'
104 | $(MAKE) -C docs clean
105 | $(MAKE) -C docs html
106 | $(BROWSER) docs/_build/html/index.html
107 |
108 | servedocs: docs ## compile the docs watching for changes
109 | watchmedo shell-command -p '*.rst' -c '$(MAKE) -C docs html' -R -D .
110 |
111 | release: dist ## package and upload a release
112 | twine upload dist/*
113 |
114 | dist: clean ## builds source package (requires twine and build)
115 | python -m build --sdist
116 | twine check dist/*
117 | ls -l dist
118 |
119 | install: clean ## install the package to the active Python's site-packages
120 | pip install -r requirements.txt
121 | pip install .
122 |
123 | gitlab_CI_docker: ## Build a docker image for CI use within gitlab
124 | cd ./tests/CI_docker/; bash ./build_arosics_testsuite_image.sh
125 |
--------------------------------------------------------------------------------
/README.rst:
--------------------------------------------------------------------------------
1 | .. figure:: https://danschef.git-pages.gfz-potsdam.de/arosics/images/arosics_logo.png
2 | :target: https://git.gfz-potsdam.de/danschef/arosics
3 | :align: center
4 |
5 | ==================================================================================================
6 | An Automated and Robust Open-Source Image Co-Registration Software for Multi-Sensor Satellite Data
7 | ==================================================================================================
8 |
9 | * Free software: Apache 2.0
10 | * **Documentation:** https://danschef.git-pages.gfz-potsdam.de/arosics/doc/
11 | * The (open-access) **paper** corresponding to this software repository can be found here:
12 | `Scheffler et al. 2017 `__
13 | (cite as: Scheffler D, Hollstein A, Diedrich H, Segl K, Hostert P. AROSICS: An Automated and Robust Open-Source
14 | Image Co-Registration Software for Multi-Sensor Satellite Data. Remote Sensing. 2017; 9(7):676).
15 | * Information on how to **cite the AROSICS Python package** can be found in the
16 | `CITATION `__ file.
17 | * Submit feedback by filing an issue `here `__
18 | or join our chat here: |Gitter|
19 |
20 | .. |Gitter| image:: https://badges.gitter.im/Join%20Chat.svg
21 | :target: https://gitter.im/arosics/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link
22 | :alt: https://gitter.im/arosics/Lobby?utm_source=share-link&utm_medium=link&utm_campaign=share-link
23 |
24 | Status
25 | ------
26 |
27 | .. image:: https://git.gfz-potsdam.de/danschef/arosics/badges/main/pipeline.svg
28 | :target: https://git.gfz-potsdam.de/danschef/arosics/commits/main
29 | .. image:: https://git.gfz-potsdam.de/danschef/arosics/badges/main/coverage.svg
30 | :target: https://danschef.git-pages.gfz-potsdam.de/arosics/coverage/
31 | .. image:: https://img.shields.io/pypi/v/arosics.svg
32 | :target: https://pypi.python.org/pypi/arosics
33 | .. image:: https://img.shields.io/conda/vn/conda-forge/arosics.svg
34 | :target: https://anaconda.org/conda-forge/arosics
35 | .. image:: https://img.shields.io/pypi/l/arosics.svg
36 | :target: https://git.gfz-potsdam.de/danschef/arosics/blob/main/LICENSE
37 | .. image:: https://img.shields.io/pypi/pyversions/arosics.svg
38 | :target: https://img.shields.io/pypi/pyversions/arosics.svg
39 | .. image:: https://img.shields.io/pypi/dm/arosics.svg
40 | :target: https://pypi.python.org/pypi/arosics
41 | .. image:: https://zenodo.org/badge/253474603.svg
42 | :target: https://zenodo.org/badge/latestdoi/253474603
43 |
44 | See also the latest coverage_ report and the pytest_ HTML report.
45 |
46 | Feature overview
47 | ----------------
48 |
49 | AROSICS is a python package to perform **automatic subpixel co-registration** of two satellite image datasets
50 | based on an image matching approach working in the frequency domain, combined with a multistage workflow for
51 | effective detection of false-positives.
52 |
53 | It detects and corrects **local as well as global misregistrations** between two input images in the subpixel scale,
54 | that are often present in satellite imagery. The algorithm is robust against the typical difficulties of
55 | multi-sensoral/multi-temporal images. Clouds are automatically handled by the implemented outlier detection algorithms.
56 | The user may provide user-defined masks to exclude certain image areas from tie point creation. The image overlap area
57 | is automatically detected. AROSICS supports a wide range of input data formats and can be used from the command
58 | line (without any Python experience) or as a normal Python package.
59 |
60 |
61 | Global co-registration - fast but only for static X/Y-shifts
62 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
63 |
64 | Only a global X/Y translation is computed within a small subset of the input images (window position is adjustable).
65 | This allows very fast co-registration but only corrects for translational (global) X/Y shifts.
66 | The calculated subpixel-shifts are (by default) applied to the geocoding information of the output image.
67 | No spatial resampling is done automatically as long as both input images have the same projection. However, AROSICS
68 | also allows to align the output image to the reference image coordinate grid if needed.
69 |
70 | Here is an example of a Landsat-8 / Sentinel-2 image pair before and after co-registration using AROSICS:
71 |
72 | .. image:: https://git.gfz-potsdam.de/danschef/arosics/raw/main/docs/images/animation_testcase1_zoom_L8_S2_global_coreg_before_after_900x456.gif
73 |
74 |
75 | Local co-registration - for spatially variable shifts but a bit slower
76 | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
77 |
78 | A dense grid of tie points is automatically computed, whereas tie points are subsequently validated using a
79 | multistage workflow. Only those tie points not marked as false-positives are used to compute the parameters of an
80 | affine transformation. Warping of the target image is done using an appropriate resampling technique
81 | (cubic by default).
82 |
83 | Here is an example of the computed shift vectors after filtering false-positives
84 | (mainly due to clouds in the target image):
85 |
86 | .. image:: https://git.gfz-potsdam.de/danschef/arosics/raw/main/docs/images/shift_vectors_testcase1__900x824.gif
87 |
88 |
89 | For further details check out the `documentation `__!
90 |
91 |
92 | History / Changelog
93 | -------------------
94 |
95 | You can find the protocol of recent changes in the AROSICS package
96 | `here `__.
97 |
98 |
99 | Credits
100 | -------
101 |
102 | AROSICS was developed by Daniel Scheffler (German Research Centre of Geosciences) within the context of the
103 | `GeoMultiSens `__ project funded by the German Federal Ministry of Education and Research
104 | (project grant code: 01 IS 14 010 A-C).
105 |
106 | This package was created with Cookiecutter_ and the `audreyr/cookiecutter-pypackage`_ project template.
107 | The test data represent modified Copernicus Sentinel-2 data (ESA 2016). The input data for the figures in the
108 | documentation have been provided by NASA (Landsat-8) and ESA (Sentinel-2).
109 |
110 | .. _Cookiecutter: https://github.com/audreyr/cookiecutter
111 | .. _`audreyr/cookiecutter-pypackage`: https://github.com/audreyr/cookiecutter-pypackage
112 | .. _coverage: https://danschef.git-pages.gfz-potsdam.de/arosics/coverage/
113 | .. _pytest: https://danschef.git-pages.gfz-potsdam.de/arosics/test_reports/report.html
114 | .. _conda: https://docs.conda.io/
115 |
116 |
--------------------------------------------------------------------------------
/arosics/DeShifter.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
4 | #
5 | # Copyright (C) 2017-2024
6 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
7 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
8 | # Germany (https://www.gfz-potsdam.de/)
9 | #
10 | # This software was developed within the context of the GeoMultiSens project funded
11 | # by the German Federal Ministry of Education and Research
12 | # (project grant code: 01 IS 14 010 A-C).
13 | #
14 | # Licensed under the Apache License, Version 2.0 (the "License");
15 | # you may not use this file except in compliance with the License.
16 | # You may obtain a copy of the License at
17 | #
18 | # https://www.apache.org/licenses/LICENSE-2.0
19 | #
20 | # Unless required by applicable law or agreed to in writing, software
21 | # distributed under the License is distributed on an "AS IS" BASIS,
22 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
23 | # See the License for the specific language governing permissions and
24 | # limitations under the License.
25 |
26 | import collections
27 | import time
28 | import warnings
29 | import numpy as np
30 | from typing import Union
31 |
32 | # internal modules
33 | from geoarray import GeoArray
34 | from py_tools_ds.geo.map_info import mapinfo2geotransform, geotransform2mapinfo
35 | from py_tools_ds.geo.coord_grid import is_coord_grid_equal
36 | from py_tools_ds.geo.projection import prj_equal
37 | from py_tools_ds.geo.raster.reproject import warp_ndarray
38 | from py_tools_ds.numeric.vector import find_nearest
39 |
40 | __author__ = 'Daniel Scheffler'
41 |
42 | _dict_rspAlg_rsp_Int = {'nearest': 0, 'bilinear': 1, 'cubic': 2, 'cubic_spline': 3, 'lanczos': 4, 'average': 5,
43 | 'mode': 6, 'max': 7, 'min': 8, 'med': 9, 'q1': 10, 'q2': 11,
44 | 0: 'nearest', 1: 'bilinear', 2: 'cubic', 3: 'cubic_spline', 4: 'lanczos', 5: 'average',
45 | 6: 'mode', 7: 'max', 8: 'min', 9: 'med', 10: 'q1', 11: 'q2'}
46 |
47 |
48 | class DESHIFTER(object):
49 | """
50 | Class to deshift an image array or one of its products by applying previously the computed coregistration info.
51 |
52 | See help(DESHIFTER) for documentation.
53 | """
54 |
55 | def __init__(self,
56 | im2shift: Union[GeoArray, str],
57 | coreg_results: dict,
58 | **kwargs
59 | ) -> None:
60 | """Get an instance of DESHIFTER.
61 |
62 | :param im2shift:
63 | path of an image to be de-shifted or alternatively a GeoArray object
64 |
65 | :param dict coreg_results:
66 | the results of the co-registration as given by COREG.coreg_info or COREG_LOCAL.coreg_info
67 |
68 | :keyword int path_out:
69 | /output/directory/filename for coregistered results
70 |
71 | :keyword str fmt_out:
72 | raster file format for output file. ignored if path_out is None. can be any GDAL
73 | compatible raster file format (e.g. 'ENVI', 'GTIFF'; default: ENVI)
74 |
75 | :keyword list out_crea_options:
76 | GDAL creation options for the output image, e.g., ["QUALITY=20", "REVERSIBLE=YES", "WRITE_METADATA=YES"]
77 |
78 | :keyword int band2process:
79 | The index of the band to be processed within the given array (starts with 1),
80 | default = None (all bands are processed)
81 |
82 | :keyword float nodata:
83 | no data value of the image to be de-shifted
84 |
85 | :keyword float out_gsd:
86 | output pixel size in units of the reference coordinate system (default = pixel size of the input array),
87 | given values are overridden by match_gsd=True
88 |
89 | :keyword bool align_grids:
90 | True: align the input coordinate grid to the reference (does not affect the output pixel size as long as
91 | input and output pixel sizes are compatible (5:30 or 10:30 but not 4:30)), default = False
92 |
93 | :keyword bool match_gsd:
94 | True: match the input pixel size to the reference pixel size, default = False
95 |
96 | :keyword list target_xyGrid:
97 | a list with an x-grid and a y-grid like [[15,45], [15,45]].
98 | This overrides 'out_gsd', 'align_grids' and 'match_gsd'.
99 |
100 | :keyword int min_points_local_corr:
101 | number of valid tie points, below which a global shift correction is performed instead of a local
102 | correction (global X/Y shift is then computed as the mean shift of the remaining points)
103 | (default: 5 tie points)
104 |
105 | :keyword str resamp_alg:
106 | the resampling algorithm to be used if necessary
107 | (valid algorithms: nearest, bilinear, cubic, cubic_spline, lanczos, average, mode, max, min, med, q1, q3)
108 |
109 | :keyword bool cliptoextent:
110 | True: clip the input image to its actual bounds while deleting possible no data areas outside the actual
111 | bounds, default = False
112 |
113 | :keyword list clipextent:
114 | xmin, ymin, xmax, ymax - if given the calculation of the actual bounds is skipped.
115 | The given coordinates are automatically snapped to the output grid.
116 |
117 | :keyword int CPUs:
118 | number of CPUs to use (default: None, which means 'all CPUs available')
119 |
120 | :keyword bool progress:
121 | show progress bars (default: True)
122 |
123 | :keyword bool v:
124 | verbose mode (default: False)
125 |
126 | :keyword bool q:
127 | quiet mode (default: False)
128 | """
129 | # private attributes
130 | self._grids_alignable = None
131 |
132 | # store args / kwargs
133 | self.init_args = dict([x for x in locals().items() if x[0] != "self" and not x[0].startswith('__')])
134 | self.init_kwargs = self.init_args['kwargs']
135 |
136 | # unpack args
137 | self.im2shift = im2shift if isinstance(im2shift, GeoArray) else GeoArray(im2shift)
138 | self.GCPList = coreg_results['GCPList'] if 'GCPList' in coreg_results else None
139 | self.ref_gt = coreg_results['reference geotransform']
140 | self.ref_grid = coreg_results['reference grid']
141 | self.ref_prj = coreg_results['reference projection']
142 |
143 | # unpack kwargs
144 | self.path_out = kwargs.get('path_out', None)
145 | self.fmt_out = kwargs.get('fmt_out', 'ENVI')
146 | self.out_creaOpt = kwargs.get('out_crea_options', [])
147 | self.band2process = kwargs.get('band2process', None) # starts with 1 # FIXME why?
148 | self.band2process = \
149 | self.band2process - 1 if self.band2process is not None else None # internally handled as band index
150 | self.nodata = kwargs.get('nodata', self.im2shift.nodata)
151 | self.align_grids = kwargs.get('align_grids', False)
152 | self.min_points_local_corr = kwargs.get('min_points_local_corr', 5)
153 | self.rspAlg = kwargs.get('resamp_alg', 'cubic') # TODO accept also integers
154 | self.cliptoextent = kwargs.get('cliptoextent', False)
155 | self.clipextent = kwargs.get('clipextent', None)
156 | self.CPUs = kwargs.get('CPUs', None)
157 | self.v = kwargs.get('v', False)
158 | self.q = kwargs.get('q', False) if not self.v else False # overridden by v
159 | self.progress = kwargs.get('progress', True) if not self.q else False # overridden by q
160 |
161 | self.im2shift.nodata = kwargs.get('nodata', self.im2shift.nodata)
162 | self.im2shift.q = self.q
163 | self.shift_prj = self.im2shift.projection
164 | self.shift_gt = list(self.im2shift.geotransform)
165 |
166 | # in case of local shift correction and local coreg results contain fewer points than min_points_local_corr:
167 | # force global correction based on mean X/Y shifts
168 | if 'GCPList' in coreg_results and len(coreg_results['GCPList']) < self.min_points_local_corr:
169 | warnings.warn(f'Only {len(self.GCPList)} valid tie point(s) could be identified. A local shift correction '
170 | f'is therefore not reasonable and could cause artifacts in the output image. The target '
171 | f'image is corrected globally with the mean X/Y shift of '
172 | f'{coreg_results["mean_shifts_px"]["x"]:.3f}/'
173 | f'{coreg_results["mean_shifts_px"]["y"]:.3f} pixels.')
174 | self.GCPList = None
175 | coreg_results['updated map info'] = coreg_results['updated map info means']
176 |
177 | # in case of global shift correction -> the updated map info from coreg_results already has the final map info
178 | # BUT: this will be updated in correct_shifts() if clipextent is given or warping is needed
179 | if not self.GCPList:
180 | mapI = coreg_results['updated map info']
181 | self.updated_map_info = mapI or geotransform2mapinfo(self.shift_gt, self.shift_prj)
182 | self.updated_gt = mapinfo2geotransform(self.updated_map_info) or self.shift_gt
183 | self.original_map_info = coreg_results['original map info']
184 | self.updated_projection = self.ref_prj
185 |
186 | self.out_grid = self._get_out_grid() # needs self.ref_grid, self.im2shift
187 | self.out_gsd = [abs(self.out_grid[0][1] - self.out_grid[0][0]),
188 | abs(self.out_grid[1][1] - self.out_grid[1][0])] # xgsd, ygsd
189 |
190 | # assertions
191 | assert self.rspAlg in _dict_rspAlg_rsp_Int.keys(), \
192 | f"'{self.rspAlg}' is not a supported resampling algorithm."
193 | if self.band2process is not None:
194 | assert self.im2shift.bands - 1 >= self.band2process >= 0, \
195 | (f"The {self.im2shift.__class__.__name__} '{self.im2shift.basename}' has {self.im2shift.bands} "
196 | f"{'bands' if self.im2shift.bands > 1 else 'band'}. So 'band2process' must be "
197 | f"{'between 1 and ' if self.im2shift.bands > 1 else ''}{self.im2shift.bands}. "
198 | f"Got {self.band2process + 1}.")
199 |
200 | # set defaults for general class attributes
201 | self.is_shifted = False # this is not included in COREG.coreg_info
202 | self.is_resampled = False # this is not included in COREG.coreg_info
203 | self.tracked_errors = []
204 | self.arr_shifted = None # set by self.correct_shifts
205 | self.GeoArray_shifted = None # set by self.correct_shifts
206 |
207 | def _get_out_grid(self):
208 | # parse given params
209 | out_gsd = self.init_kwargs.get('out_gsd', None)
210 | match_gsd = self.init_kwargs.get('match_gsd', False)
211 | out_grid = self.init_kwargs.get('target_xyGrid', None)
212 |
213 | # assertions
214 | assert out_grid is None or (isinstance(out_grid, (list, tuple)) and len(out_grid) == 2)
215 | assert out_gsd is None or (isinstance(out_gsd, (int, tuple, list)) and len(out_gsd) == 2)
216 |
217 | ref_xgsd, ref_ygsd = (self.ref_grid[0][1] - self.ref_grid[0][0], abs(self.ref_grid[1][1] - self.ref_grid[1][0]))
218 |
219 | def get_grid(gt, xgsd, ygsd): return [[gt[0], gt[0] + xgsd], [gt[3], gt[3] - ygsd]]
220 |
221 | # get out_grid
222 | if out_grid:
223 | # output grid is given
224 | pass
225 |
226 | elif out_gsd:
227 | out_xgsd, out_ygsd = [out_gsd, out_gsd] if isinstance(out_gsd, int) else out_gsd
228 |
229 | if match_gsd and (out_xgsd, out_ygsd) != (ref_xgsd, ref_ygsd):
230 | warnings.warn("\nThe parameter 'match_gsd is ignored because another output ground sampling distance "
231 | "was explicitly given.")
232 | if self.align_grids and \
233 | self._are_grids_alignable(self.im2shift.xgsd, self.im2shift.ygsd, out_xgsd, out_ygsd):
234 | # use grid of reference image with the given output gsd
235 | out_grid = get_grid(self.ref_gt, out_xgsd, out_ygsd)
236 | else: # no grid alignment
237 | # use grid of input image with the given output gsd
238 | out_grid = get_grid(self.im2shift.geotransform, out_xgsd, out_ygsd)
239 |
240 | elif match_gsd:
241 | if self.align_grids:
242 | # use reference grid
243 | out_grid = self.ref_grid
244 | else:
245 | # use grid of input image and reference gsd
246 | out_grid = get_grid(self.im2shift.geotransform, ref_xgsd, ref_ygsd)
247 |
248 | else:
249 | if self.align_grids and \
250 | self._are_grids_alignable(self.im2shift.xgsd, self.im2shift.ygsd, ref_xgsd, ref_ygsd):
251 | # use origin of reference image and gsd of input image
252 | out_grid = get_grid(self.ref_gt, self.im2shift.xgsd, self.im2shift.ygsd)
253 | else:
254 | if not self.GCPList:
255 | # in case of global co-registration:
256 | # -> use the target image grid but update the origin (shift-correction without resampling)
257 | out_grid = get_grid(self.updated_gt, self.im2shift.xgsd, self.im2shift.ygsd)
258 | else:
259 | # in case of local co-registration:
260 | # -> use input image grid
261 | out_grid = get_grid(self.im2shift.geotransform, self.im2shift.xgsd, self.im2shift.ygsd)
262 |
263 | return out_grid
264 |
265 | @property
266 | def warping_needed(self):
267 | """Return True if image warping is needed in consideration of the input parameters of DESHIFTER."""
268 | assert self.out_grid, 'Output grid must be calculated before.'
269 | equal_prj = prj_equal(self.ref_prj, self.shift_prj)
270 | return \
271 | False if (equal_prj and not self.GCPList and is_coord_grid_equal(self.updated_gt, *self.out_grid)) else True
272 |
273 | def _are_grids_alignable(self, in_xgsd, in_ygsd, out_xgsd, out_ygsd):
274 | """Check if the input image pixel grid is alignable to the output grid.
275 |
276 | :param in_xgsd:
277 | :param in_ygsd:
278 | :param out_xgsd:
279 | :param out_ygsd:
280 | :return:
281 | """
282 | if self._grids_alignable is None:
283 | def is_alignable(gsd1, gsd2):
284 | """Check if pixel sizes are divisible."""
285 | return max(gsd1, gsd2) % min(gsd1, gsd2) == 0
286 |
287 | self._grids_alignable = \
288 | False if (not is_alignable(in_xgsd, out_xgsd) or not is_alignable(in_ygsd, out_ygsd)) else True
289 |
290 | if self._grids_alignable is False and not self.q:
291 | warnings.warn(f"\nThe coordinate grid of {self.im2shift.basename} cannot be aligned to the desired "
292 | f"grid because their pixel sizes are not exact multiples of each other (input [X/Y]: "
293 | f"{in_xgsd}/{in_ygsd}; desired [X/Y]: {out_xgsd}/{out_ygsd}). Therefore the original "
294 | f"grid is chosen for the resampled output image. If you don´t like that you can use "
295 | f"the 'out_gsd' or 'match_gsd' parameters to set an appropriate output pixel size or "
296 | f"to allow changing the pixel size.\n")
297 |
298 | return self._grids_alignable
299 |
300 | def _get_out_extent(self):
301 | if self.clipextent is None:
302 | # no clip extent has been given
303 | if self.cliptoextent:
304 | # use actual image corners as clip extent
305 | self.clipextent = self.im2shift.footprint_poly.envelope.bounds
306 | else:
307 | # use outer bounds of the image as clip extent
308 | xmin, xmax, ymin, ymax = self.im2shift.box.boundsMap
309 | self.clipextent = xmin, ymin, xmax, ymax
310 |
311 | # snap clipextent to output grid
312 | # (in case of odd input coords the output coords are moved INSIDE the input array)
313 | xmin, ymin, xmax, ymax = self.clipextent
314 | x_tol, y_tol = float(np.ptp(self.out_grid[0]) / 2000), float(np.ptp(self.out_grid[1]) / 2000) # 2.000th pix
315 | xmin = find_nearest(self.out_grid[0], xmin, roundAlg='on', extrapolate=True, tolerance=x_tol)
316 | ymin = find_nearest(self.out_grid[1], ymin, roundAlg='on', extrapolate=True, tolerance=y_tol)
317 | xmax = find_nearest(self.out_grid[0], xmax, roundAlg='off', extrapolate=True, tolerance=x_tol)
318 | ymax = find_nearest(self.out_grid[1], ymax, roundAlg='off', extrapolate=True, tolerance=y_tol)
319 | return xmin, ymin, xmax, ymax
320 |
321 | def correct_shifts(self) -> collections.OrderedDict:
322 | if not self.q:
323 | print('Correcting geometric shifts...')
324 |
325 | t_start = time.time()
326 |
327 | if not self.warping_needed:
328 | """NO RESAMPLING NEEDED"""
329 |
330 | self.is_shifted = True
331 | self.is_resampled = False
332 | xmin, ymin, xmax, ymax = self._get_out_extent()
333 |
334 | if not self.q:
335 | print("NOTE: The detected shift is corrected by updating the map info of the target image only, i.e., "
336 | "without any resampling. Set the 'align_grids' parameter to True if you need the target and the "
337 | "reference coordinate grids to be aligned.")
338 |
339 | if self.cliptoextent:
340 | # TODO validate results
341 | # TODO -> output extent does not seem to be the requested one! (only relevant if align_grids=False)
342 | # get shifted array
343 | shifted_geoArr = GeoArray(self.im2shift[:], tuple(self.updated_gt), self.shift_prj)
344 |
345 | # clip with target extent
346 | # NOTE: get_mapPos() does not perform any resampling as long as source and target projection are equal
347 | self.arr_shifted, self.updated_gt, self.updated_projection = \
348 | shifted_geoArr.get_mapPos((xmin, ymin, xmax, ymax),
349 | self.shift_prj,
350 | fillVal=self.nodata,
351 | band2get=self.band2process)
352 |
353 | self.updated_map_info = geotransform2mapinfo(self.updated_gt, self.updated_projection)
354 |
355 | else:
356 | # array keeps the same; updated gt and prj are taken from coreg_info
357 | self.arr_shifted = self.im2shift[:, :, self.band2process] \
358 | if self.band2process is not None else self.im2shift[:]
359 |
360 | out_geoArr = GeoArray(self.arr_shifted, self.updated_gt, self.updated_projection, q=self.q)
361 | out_geoArr.nodata = self.nodata # equals self.im2shift.nodata after __init__()
362 | out_geoArr.metadata = self.im2shift.metadata[[self.band2process]] \
363 | if self.band2process is not None else self.im2shift.metadata
364 |
365 | self.GeoArray_shifted = out_geoArr
366 |
367 | else: # FIXME equal_prj==False ist noch NICHT implementiert
368 | """RESAMPLING NEEDED"""
369 | # FIXME avoid reading the whole band if clip_extent is passed
370 |
371 | in_arr = self.im2shift[:, :, self.band2process] \
372 | if self.band2process is not None and self.im2shift.ndim == 3 else self.im2shift[:]
373 |
374 | if not self.GCPList:
375 | # apply XY-shifts to input image gt 'shift_gt' in order to correct the shifts before warping
376 | self.shift_gt[0], self.shift_gt[3] = self.updated_gt[0], self.updated_gt[3]
377 |
378 | # get resampled array
379 | out_arr, out_gt, out_prj = \
380 | warp_ndarray(in_arr, self.shift_gt, self.shift_prj, self.ref_prj,
381 | rspAlg=_dict_rspAlg_rsp_Int[self.rspAlg],
382 | in_nodata=self.nodata,
383 | out_nodata=self.nodata,
384 | out_gsd=self.out_gsd,
385 | out_bounds=self._get_out_extent(), # always returns an extent snapped to the target grid
386 | gcpList=self.GCPList,
387 | # polynomialOrder=str(3),
388 | # options='-refine_gcps 500 1.9',
389 | # warpOptions=['-refine_gcps 500 1.9'],
390 | # options='-wm 10000',# -order 3',
391 | # options=['-order 3'],
392 | # options=['GDAL_CACHEMAX 800 '],
393 | # warpMemoryLimit=125829120, # 120MB
394 | CPUs=self.CPUs,
395 | progress=self.progress,
396 | q=self.q)
397 |
398 | out_geoArr = GeoArray(out_arr, out_gt, out_prj, q=self.q)
399 | out_geoArr.nodata = self.nodata # equals self.im2shift.nodata after __init__()
400 | out_geoArr.metadata = self.im2shift.metadata[[self.band2process]] \
401 | if self.band2process is not None else self.im2shift.metadata
402 |
403 | self.arr_shifted = out_arr
404 | self.updated_gt = out_gt
405 | self.updated_projection = out_prj
406 | self.updated_map_info = geotransform2mapinfo(out_gt, out_prj)
407 | self.GeoArray_shifted = out_geoArr
408 | self.is_shifted = True
409 | self.is_resampled = True
410 |
411 | if self.path_out:
412 | out_geoArr.save(self.path_out, fmt=self.fmt_out, creationOptions=self.out_creaOpt)
413 |
414 | # validation
415 | if not is_coord_grid_equal(self.updated_gt, *self.out_grid, tolerance=1.e8):
416 | raise RuntimeError(f'DESHIFTER output dataset has not the desired target pixel grid. Target grid '
417 | f'was {str(self.out_grid)}. Output geotransform is {str(self.updated_gt)}.')
418 | # TODO to be continued (extent, map info, ...)
419 |
420 | if self.v:
421 | print(f'Time for shift correction: {time.time() - t_start:.2f}s')
422 | return self.deshift_results
423 |
424 | @property
425 | def deshift_results(self):
426 | deshift_results = collections.OrderedDict()
427 | deshift_results.update({
428 | 'band': self.band2process,
429 | 'is shifted': self.is_shifted,
430 | 'is resampled': self.is_resampled,
431 | 'updated map info': self.updated_map_info,
432 | 'updated geotransform': self.updated_gt,
433 | 'updated projection': self.updated_projection,
434 | 'arr_shifted': self.arr_shifted,
435 | 'GeoArray_shifted': self.GeoArray_shifted
436 | })
437 | return deshift_results
438 |
439 |
440 | def deshift_image_using_coreg_info(im2shift: Union[GeoArray, str],
441 | coreg_results: dict,
442 | path_out: str = None,
443 | fmt_out: str = 'ENVI',
444 | q: bool = False):
445 | """Correct a geometrically distorted image using previously calculated coregistration info.
446 |
447 | This function can be used for example to correct spatial shifts of mask files using the same transformation
448 | parameters that have been used to correct their source images.
449 |
450 | :param im2shift: path of an image to be de-shifted or alternatively a GeoArray object
451 | :param coreg_results: the results of the co-registration as given by COREG.coreg_info or
452 | COREG_LOCAL.coreg_info respectively
453 | :param path_out: /output/directory/filename for coregistered results. If None, no output is written - only
454 | the shift corrected results are returned.
455 | :param fmt_out: raster file format for output file. ignored if path_out is None. can be any GDAL
456 | compatible raster file format (e.g. 'ENVI', 'GTIFF'; default: ENVI)
457 | :param q: quiet mode (default: False)
458 | :return:
459 | """
460 | deshift_results = DESHIFTER(im2shift, coreg_results).correct_shifts()
461 |
462 | if path_out:
463 | deshift_results['GeoArray_shifted'].save(path_out, fmt_out=fmt_out, q=q)
464 |
465 | return deshift_results
466 |
--------------------------------------------------------------------------------
/arosics/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
4 | #
5 | # Copyright (C) 2017-2024
6 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
7 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
8 | # Germany (https://www.gfz-potsdam.de/)
9 | #
10 | # This software was developed within the context of the GeoMultiSens project funded
11 | # by the German Federal Ministry of Education and Research
12 | # (project grant code: 01 IS 14 010 A-C).
13 | #
14 | # Licensed under the Apache License, Version 2.0 (the "License");
15 | # you may not use this file except in compliance with the License.
16 | # You may obtain a copy of the License at
17 | #
18 | # https://www.apache.org/licenses/LICENSE-2.0
19 | #
20 | # Unless required by applicable law or agreed to in writing, software
21 | # distributed under the License is distributed on an "AS IS" BASIS,
22 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
23 | # See the License for the specific language governing permissions and
24 | # limitations under the License.
25 |
26 | """Top-level package for arosics."""
27 |
28 | import os as __os
29 |
30 | from arosics.CoReg import COREG
31 | from arosics.CoReg_local import COREG_LOCAL
32 | from arosics.DeShifter import DESHIFTER
33 | from arosics.Tie_Point_Grid import Tie_Point_Grid
34 |
35 | from .version import __version__, __versionalias__ # noqa (E402 + F401)
36 |
37 |
38 | __author__ = """Daniel Scheffler"""
39 | __email__ = 'daniel.scheffler@gfz.de'
40 | __all__ = ['COREG',
41 | 'COREG_LOCAL',
42 | 'DESHIFTER',
43 | 'Tie_Point_Grid',
44 | '__version__',
45 | '__versionalias__'
46 | ]
47 |
48 |
49 | # $PROJ_LIB was renamed to $PROJ_DATA in proj=9.1.1, which leads to issues with fiona>=1.8.20,<1.9
50 | # https://github.com/conda-forge/pyproj-feedstock/issues/130
51 | # -> fix it by setting PROJ_DATA
52 | if 'GDAL_DATA' in __os.environ and 'PROJ_DATA' not in __os.environ and 'PROJ_LIB' not in __os.environ:
53 | __os.environ['PROJ_DATA'] = __os.path.join(__os.path.dirname(__os.environ['GDAL_DATA']), 'proj')
54 |
--------------------------------------------------------------------------------
/arosics/arosics_cli.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | # unicode_literals cause GDAL not to work properly
4 |
5 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
6 | #
7 | # Copyright (C) 2017-2024
8 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
9 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
10 | # Germany (https://www.gfz-potsdam.de/)
11 | #
12 | # This software was developed within the context of the GeoMultiSens project funded
13 | # by the German Federal Ministry of Education and Research
14 | # (project grant code: 01 IS 14 010 A-C).
15 | #
16 | # Licensed under the Apache License, Version 2.0 (the "License");
17 | # you may not use this file except in compliance with the License.
18 | # You may obtain a copy of the License at
19 | #
20 | # https://www.apache.org/licenses/LICENSE-2.0
21 | #
22 | # Unless required by applicable law or agreed to in writing, software
23 | # distributed under the License is distributed on an "AS IS" BASIS,
24 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
25 | # See the License for the specific language governing permissions and
26 | # limitations under the License.
27 |
28 | from __future__ import (division, print_function, absolute_import, unicode_literals)
29 |
30 | import time
31 | import sys
32 | import argparse
33 | from arosics import COREG, COREG_LOCAL, __version__
34 |
35 | __author__ = "Daniel Scheffler"
36 |
37 |
38 | # sub-command functions
39 | def run_global_coreg(args):
40 | COREG_obj = COREG(args.path_ref,
41 | args.path_tgt,
42 | path_out=args.path_out,
43 | fmt_out=args.fmt_out,
44 | r_b4match=args.br,
45 | s_b4match=args.bs,
46 | wp=args.wp,
47 | ws=args.ws,
48 | max_iter=args.max_iter,
49 | max_shift=args.max_shift,
50 | align_grids=args.align_grids,
51 | match_gsd=args.match_gsd,
52 | out_gsd=args.out_gsd,
53 | resamp_alg_calc=args.rsp_alg_deshift,
54 | resamp_alg_deshift=args.rsp_alg_calc,
55 | data_corners_ref=args.cor0,
56 | data_corners_tgt=args.cor1,
57 | nodata=args.nodata,
58 | calc_corners=args.calc_cor,
59 | CPUs=None if args.mp else 1,
60 | force_quadratic_win=args.quadratic_win,
61 | binary_ws=args.bin_ws,
62 | mask_baddata_ref=args.mask_ref,
63 | mask_baddata_tgt=args.mask_tgt,
64 | progress=args.progress,
65 | v=args.v,
66 | path_verbose_out=args.vo,
67 | q=args.q,
68 | ignore_errors=args.ignore_errors)
69 | COREG_obj.correct_shifts()
70 |
71 |
72 | # sub-command functions
73 | def run_local_coreg(args):
74 | CRL = COREG_LOCAL(args.path_ref,
75 | args.path_tgt,
76 | path_out=args.path_out,
77 | fmt_out=args.fmt_out,
78 | grid_res=args.grid_res,
79 | max_points=args.max_points,
80 | r_b4match=args.br,
81 | s_b4match=args.bs,
82 | window_size=args.ws,
83 | max_iter=args.max_iter,
84 | max_shift=args.max_shift,
85 | tieP_filter_level=args.tieP_filter_level,
86 | tieP_random_state=args.tieP_random_state,
87 | min_reliability=args.min_reliability,
88 | rs_max_outlier=args.rs_max_outlier,
89 | rs_tolerance=args.rs_tolerance,
90 | # align_grids=args.align_grids,
91 | # match_gsd=args.match_gsd,
92 | # out_gsd=args.out_gsd,
93 | resamp_alg_calc=args.rsp_alg_deshift,
94 | resamp_alg_deshift=args.rsp_alg_calc,
95 | data_corners_ref=args.cor0,
96 | data_corners_tgt=args.cor1,
97 | nodata=args.nodata,
98 | calc_corners=args.calc_cor,
99 | mask_baddata_ref=args.mask_ref,
100 | mask_baddata_tgt=args.mask_tgt,
101 | CPUs=None if args.mp else 1,
102 | force_quadratic_win=args.quadratic_win,
103 | binary_ws=args.bin_ws,
104 | progress=args.progress,
105 | v=args.v,
106 | q=args.q,
107 | )
108 | CRL.correct_shifts()
109 |
110 |
111 | def get_arosics_argparser():
112 | """Return argument parser for arosics console command."""
113 | parser = argparse.ArgumentParser(
114 | prog='arosics',
115 |
116 | description='Perform automatic subpixel co-registration of two satellite image datasets based on an image '
117 | 'matching approach working in the frequency domain, combined with a multistage workflow for '
118 | 'effective detection of false-positives. Python implementation by Daniel Scheffler '
119 | '(daniel.scheffler [at] gfz-potsdam [dot] de). The scientific background is described in the paper '
120 | 'Scheffler D, Hollstein A, Diedrich H, Segl K, Hostert P. AROSICS: An Automated and Robust '
121 | 'Open-Source Image Co-Registration Software for Multi-Sensor Satellite Data. Remote Sensing. 2017;'
122 | ' 9(7):676." (https://www.mdpi.com/2072-4292/9/7/676)',
123 |
124 | epilog="DETAILED DESCRIPTION: AROSICS detects and corrects global as well as local misregistrations between "
125 | "two input images in the subpixel scale, that are often present in satellite imagery. The input images "
126 | "can have any GDAL compatible image format (https://gdal.org/drivers/raster/index.html). Both of them "
127 | "must be approximately geocoded. In case of ENVI files, this means they must have a 'map info' and a "
128 | "'coordinate system string' as attributes of their header file. The input images must have a geographic "
129 | "overlap but clipping them to same geographical extent is NOT necessary. Please do not perform any "
130 | "spatial resampling of the input images before applying this algorithm. Any needed resampling of the "
131 | "data is done automatically. Thus, the input images may have different spatial resolutions. The current "
132 | "algorithm will not perform any ortho-rectification. So please use ortho-rectified input data in order "
133 | "to minimize local shifts in the input images. AROSICS supports local and global co-registration. LOCAL "
134 | "CO-REGISTRATION: A dense grid of tie points is automatically computed, whereas tie points are "
135 | "subsequently validated using a multistage workflow. Only those tie points not marked as "
136 | "false-positives are used to compute the parameters of an affine transformation. Warping of the target "
137 | "image is done using an appropriate resampling technique (cubic by default). GLOBAL CO-REGISTRATION: "
138 | "Only a global X/Y translation is computed within a small subset of the input images (window position "
139 | "is adjustable). This allows very fast co-registration but only corrects for translational (global) X/Y "
140 | "shifts. The calculated subpixel-shifts are (by default) applied to the geocoding information of the "
141 | "output image. No spatial resampling is done automatically as long as both input images have the same "
142 | "projection. If you need the output image to be aligned to the reference image coordinate grid (by "
143 | "using an appropriate resampling algorithm), use the '-align_grids' option. AROSICS is designed to "
144 | "robustly handle the typical difficulties of multi-sensoral/multi-temporal images. Clouds are "
145 | "automatically handled by the implemented outlier detection algorithms. The user may provide "
146 | "user-defined masks to exclude certain image areas from tie point creation. The image overlap area is "
147 | "automatically calculated. Thereby, no-data regions within the images are automatically respected. "
148 | "Providing the map coordinates of the actual data corners lets you save some calculation time, because "
149 | "in this case the automatic algorithm can be skipped. The no-data value of each image is automatically "
150 | "derived from the image corners. The verbose program mode gives some more output about the interim "
151 | "results, shows some figures and writes the used footprint and overlap polygons to disk. Note, that "
152 | "maybe the figures must be manually closed in in order to continue the processing (depending on your "
153 | "Python configuration). For further details regarding the implemented algorithm, example use cases, "
154 | "quality assessment and benchmarks refer to the above mentioned paper (Scheffler et al. 2017).")
155 |
156 | parser.add_argument('--version', action='version', version=__version__)
157 |
158 | #####################
159 | # GENERAL ARGUMENTS #
160 | #####################
161 |
162 | general_opts_parser = argparse.ArgumentParser(add_help=False)
163 | gop_p = general_opts_parser.add_argument
164 | gop_p('path_ref', type=str, help='source path of reference image (any GDAL compatible image format is supported)')
165 |
166 | gop_p('path_tgt', type=str,
167 | help='source path of image to be shifted (any GDAL compatible image format is supported)')
168 |
169 | gop_p('-o', nargs='?', dest='path_out', type=str, default='auto',
170 | help="target path of the coregistered image If 'auto' (default: /dir/of/im1/__shifted_to__.bsq)")
171 |
172 | gop_p('-fmt_out', nargs='?', type=str, default='ENVI',
173 | help="raster file format for output file. ignored if path_out is None. can "
174 | "be any GDAL compatible raster file format (e.g. 'ENVI', 'GTIFF'; default: ENVI)")
175 |
176 | gop_p('-br', nargs='?', type=int, default=1,
177 | help='band of reference image to be used for matching (starts with 1; default: 1)')
178 |
179 | gop_p('-bs', nargs='?', type=int, default=1,
180 | help='band of shift image to be used for matching (starts with 1; default: 1)')
181 |
182 | gop_p('-ws', nargs=2, metavar=('X size', 'Y size'), type=int, default=(256, 256),
183 | help="custom matching window size [pixels] (default: (256,256))")
184 |
185 | gop_p('-max_iter', nargs='?', type=int, default=5, help="maximum number of iterations for matching (default: 5)")
186 |
187 | gop_p('-max_shift', nargs='?', type=int, default=5,
188 | help="maximum shift distance in reference image pixel units (default: 5 px)")
189 |
190 | gop_p('-rsp_alg_deshift', nargs='?', type=int, choices=list(range(12)), default=2,
191 | help="the resampling algorithm to be used for shift correction (if necessary) "
192 | "(valid algorithms: 0=nearest neighbour, 1=bilinear, 2=cubic, 3=cubic_spline, 4=lanczos, 5=average, "
193 | "6=mode, 7=max, 8=min, 9=med, 10=q1, 11=q3), default: 2")
194 |
195 | gop_p('-rsp_alg_calc', nargs='?', type=int, choices=list(range(12)), default=2,
196 | help="the resampling algorithm to be used for all warping processes during calculation of spatial shifts "
197 | "(valid algorithms: 0=nearest neighbour, 1=bilinear, 2=cubic, 3=cubic_spline, 4=lanczos, 5=average, "
198 | "6=mode, 7=max, 8=min, 9=med, 10=q1, 11=q3), default: 2 (highly recommended)")
199 |
200 | gop_p('-cor0', nargs=8, type=float, help="map coordinates of data corners within reference image: ",
201 | metavar=tuple("UL-X UL-Y UR-X UR-Y LR-X LR-Y LL-X LL-Y".split(' ')), default=None)
202 |
203 | gop_p('-cor1', nargs=8, type=float, help="map coordinates of data corners within image to be shifted: ",
204 | metavar=tuple("UL-X UL-Y UR-X UR-Y LR-X LR-Y LL-X LL-Y".split(' ')), default=None)
205 |
206 | gop_p('-calc_cor', nargs='?', type=int, choices=[0, 1], default=1,
207 | help="calculate true positions of the dataset corners in order to get a useful matching window position "
208 | "within the actual image overlap (default: 1; deactivated if '-cor0' and '-cor1' are given")
209 |
210 | gop_p('-nodata', nargs=2, type=float, metavar=('im0', 'im1'),
211 | help='no data values for reference image and image to be shifted', default=(None, None))
212 |
213 | gop_p('-bin_ws', nargs='?', type=int,
214 | help='use binary X/Y dimensions for the matching window (default: 1)', choices=[0, 1], default=1)
215 |
216 | gop_p('-quadratic_win', nargs='?', type=int,
217 | help='force a quadratic matching window (default: 1)', choices=[0, 1], default=1)
218 |
219 | gop_p('-mask_ref', nargs='?', type=str, metavar='file path', default=None,
220 | help="path to a 2D boolean mask file for the reference image where all bad data pixels (e.g. clouds) are "
221 | "marked with True or 1 and the remaining pixels with False or 0. Must have the same geographic extent "
222 | "and projection like the reference image. The mask is used to check if the chosen matching window "
223 | "position is valid in the sense of useful data. Otherwise this window position is rejected.")
224 |
225 | gop_p('-mask_tgt', nargs='?', type=str, metavar='file path', default=None,
226 | help="path to a 2D boolean mask file for the image to be shifted where all bad data pixels (e.g. clouds) are "
227 | "marked with True or 1 and the remaining pixels with False or 0. Must have the same geographic extent "
228 | "and projection like the the image to be shifted. The mask is used to check if the chosen matching "
229 | "window position is valid in the sense of useful data. Otherwise this window position is rejected.")
230 |
231 | gop_p('-mp', nargs='?', type=int, help='enable multiprocessing (default: 1)', choices=[0, 1], default=1)
232 |
233 | gop_p('-progress', nargs='?', type=int, help='show progress bars (default: 1)', default=1, choices=[0, 1])
234 |
235 | gop_p('-v', nargs='?', type=int, help='verbose mode (default: 0)', choices=[0, 1], default=0)
236 |
237 | gop_p('-q', nargs='?', type=int, help='quiet mode (default: 0)', choices=[0, 1], default=0)
238 |
239 | gop_p('-ignore_errors', nargs='?', type=int, choices=[0, 1], default=0,
240 | help='Useful for batch processing. (default: 0) In case of error '
241 | 'COREG(_LOCAL).success == False and COREG(_LOCAL).x_shift_px/COREG(_LOCAL).y_shift_px is None')
242 |
243 | # TODO implement footprint_poly_ref, footprint_poly_tgt
244 |
245 | ##############
246 | # SUBPARSERS #
247 | ##############
248 |
249 | subparsers = parser.add_subparsers()
250 |
251 | # TODO add option to apply coreg results to multiple files
252 | #######################
253 | # SUBPARSER FOR COREG #
254 | #######################
255 |
256 | parse_coreg_global = subparsers.add_parser(
257 | 'global', parents=[general_opts_parser], formatter_class=argparse.ArgumentDefaultsHelpFormatter,
258 | description='Detects and corrects global X/Y shifts between a target and refernce image. Geometric shifts are '
259 | 'calculated at a specific (adjustable) image position. Correction performs a global shifting in '
260 | 'X- or Y direction.',
261 | help="detect and correct global X/Y shifts (sub argument parser) - "
262 | "use 'arosics global -h' for documentation and usage hints")
263 |
264 | gloArg = parse_coreg_global.add_argument
265 |
266 | gloArg('-wp', nargs=2, metavar=('X', 'Y'), type=float,
267 | help="custom matching window position as map values in the same projection like the reference image "
268 | "(default: central position of image overlap)", default=(None, None))
269 |
270 | gloArg('-align_grids', nargs='?', type=int, choices=[0, 1],
271 | help='align the coordinate grids of the output image to the reference image (default: 0)', default=0)
272 |
273 | gloArg('-match_gsd', nargs='?', type=int, choices=[0, 1],
274 | help='match the output pixel size to the pixel size of the reference image (default: 0)', default=0)
275 |
276 | gloArg('-out_gsd', nargs=2, type=float, metavar=('xgsd', 'ygsd'),
277 | help='xgsd ygsd: set the output pixel size in map units (default: original pixel size of the image to be '
278 | 'shifted)')
279 |
280 | gloArg('-vo', nargs='?', type=int, choices=[0, 1], help='an optional output directory for outputs of verbose mode'
281 | '(if not given, no outputs are written to disk)',
282 | default=0, )
283 |
284 | parse_coreg_global.set_defaults(func=run_global_coreg)
285 |
286 | #############################
287 | # SUBPARSER FOR COREG LOCAL #
288 | #############################
289 |
290 | parse_coreg_local = subparsers.add_parser(
291 | 'local', parents=[general_opts_parser], formatter_class=argparse.ArgumentDefaultsHelpFormatter,
292 | description='Applies the algorithm to detect spatial shifts to the whole overlap area of the input images. '
293 | 'Spatial shifts are calculated for each point in grid of which the parameters can be adjusted '
294 | 'using keyword arguments. Shift correction performs a polynomial transformation using the '
295 | 'calculated shifts of each point in the grid as GCPs. Thus this class can be used to correct '
296 | 'for locally varying geometric distortions of the target image.',
297 | help="detect and correct local shifts (sub argument parser)"
298 | "use 'arosics local -h' for documentation and usage hints")
299 |
300 | locArg = parse_coreg_local.add_argument
301 |
302 | locArg('grid_res', type=int, help='tie point grid resolution in pixels of the target image')
303 |
304 | locArg('-max_points', nargs='?', type=int,
305 | help="maximum number of points used to find coregistration tie points. NOTE: Points are selected randomly "
306 | "from the given point grid (specified by 'grid_res'). If the point does not provide enough points, all "
307 | "available points are chosen.")
308 |
309 | locArg('-projectDir', nargs='?', type=str, help=None, default=None)
310 |
311 | locArg('-tieP_filter_level', nargs='?', type=int, default=3, choices=[0, 1, 2, 3],
312 | help="filter tie points used for shift correction in different levels (default: 3). NOTE: lower levels are "
313 | "also included if a higher level is chosen. Level 0: no tie point filtering; Level 1: Reliability "
314 | "filtering - filter all tie points out that have a low reliability according to internal tests; "
315 | "Level 2: SSIM filtering - filters all tie points out where shift correction does not increase image "
316 | "similarity within matching window (measured by mean structural similarity index) "
317 | "Level 3: RANSAC outlier detection")
318 |
319 | locArg('-tieP_random_state', nargs='?', type=int, default=0,
320 | help="Tie point sampling random state. An integer corresponds to a fixed/pseudo-random state, "
321 | "None selects tie points randomly. Only used if the number of computed valid tie points exceeds "
322 | "the given max_points threshold or if more than 7000 tie points are available for image warping.")
323 |
324 | locArg('-min_reliability', nargs='?', type=float, default=60,
325 | help="Tie point filtering: minimum reliability threshold, below which tie points are marked as "
326 | "false-positives (default: 60 percent) - accepts values between 0 (no reliability) and 100 (perfect "
327 | "reliability) HINT: decrease this value in case of poor signal-to-noise ratio of your input data")
328 |
329 | locArg('-rs_max_outlier', nargs='?', type=float, default=10,
330 | help="RANSAC tie point filtering: proportion of expected outliers (default: 10 percent)")
331 |
332 | locArg('-rs_tolerance', nargs='?', type=float, default=2.5,
333 | help="RANSAC tie point filtering: percentage tolerance for max_outlier_percentage (default: 2.5 percent)")
334 |
335 | parse_coreg_local.set_defaults(func=run_local_coreg)
336 |
337 | return parser
338 |
339 |
340 | def main():
341 | from socket import gethostname
342 | from datetime import datetime as dt
343 | from getpass import getuser
344 |
345 | def wfa(p, c):
346 | try:
347 | with open(p, 'a') as of:
348 | of.write(c)
349 | except Exception: # noqa
350 | pass
351 |
352 | wfa('/misc/hy5/scheffler/tmp/crlf', f"{dt.now()}\t{getuser()}\t{gethostname()}\t{' '.join(sys.argv)}\n")
353 |
354 | argparser = get_arosics_argparser()
355 | parsed_args = argparser.parse_args()
356 |
357 | if len(sys.argv) == 1:
358 | # no arguments provided
359 | print(
360 | f'======================================================================\n'
361 | f'# AROSICS v{__version__} #\n'
362 | f'# An Automated and Robust Open-Source Image Co-Registration Software #\n'
363 | f'# for Multi-Sensor Satellite Data #\n'
364 | f'# - Python implementation by Daniel Scheffler #\n'
365 | f'======================================================================\n')
366 | argparser.print_help()
367 | else:
368 | t0 = time.time()
369 | parsed_args.func(parsed_args)
370 | print('\ntotal processing time: %.2fs' % (time.time() - t0))
371 |
372 |
373 | if __name__ == "__main__":
374 | sys.exit(main()) # pragma: no cover
375 |
--------------------------------------------------------------------------------
/arosics/geometry.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
4 | #
5 | # Copyright (C) 2017-2024
6 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
7 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
8 | # Germany (https://www.gfz-potsdam.de/)
9 | #
10 | # This software was developed within the context of the GeoMultiSens project funded
11 | # by the German Federal Ministry of Education and Research
12 | # (project grant code: 01 IS 14 010 A-C).
13 | #
14 | # Licensed under the Apache License, Version 2.0 (the "License");
15 | # you may not use this file except in compliance with the License.
16 | # You may obtain a copy of the License at
17 | #
18 | # https://www.apache.org/licenses/LICENSE-2.0
19 | #
20 | # Unless required by applicable law or agreed to in writing, software
21 | # distributed under the License is distributed on an "AS IS" BASIS,
22 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
23 | # See the License for the specific language governing permissions and
24 | # limitations under the License.
25 |
26 | import warnings
27 | import sys
28 | import os
29 | from typing import Union
30 |
31 | # custom
32 | import numpy as np
33 | from geopandas import GeoDataFrame
34 |
35 | # internal modules
36 | from py_tools_ds.geo.coord_calc import calc_FullDataset_corner_positions
37 | from py_tools_ds.geo.coord_trafo import pixelToMapYX, imYX2mapYX
38 | from py_tools_ds.geo.raster.reproject import warp_ndarray
39 | from geoarray import GeoArray
40 |
41 | __author__ = 'Daniel Scheffler'
42 |
43 |
44 | def angle_to_north(XY):
45 | """Calculate the angle in degrees of a given line to north in clockwise direction.
46 |
47 | Angle definition: between [origin:[0,0],north:[0,1]] and [origin:[0,0],pointXY:[X,Y]].
48 | """
49 | XY = np.array(XY)
50 | XYarr = XY if len(XY.shape) == 2 else XY.reshape((1, 2))
51 | return np.abs(np.degrees(np.array(np.arctan2(XYarr[:, 1], XYarr[:, 0]) - np.pi / 2)) % 360)
52 |
53 |
54 | def get_true_corner_mapXY(fPath_or_geoarray, band=0, noDataVal=None, mp=1, v=0, q=0): # pragma: no cover
55 | """Return the actual map corner coordinates of a given image file or GeoArray instance.
56 |
57 | :param fPath_or_geoarray:
58 | :param band: index of the band to be used (starting with 0)
59 | :param noDataVal:
60 | :param mp:
61 | :param v:
62 | :param q:
63 | :return:
64 | """
65 | # FIXME this function is not used anymore
66 | warnings.warn('This function is not in use anymore. Use it on your own risk!', DeprecationWarning)
67 | geoArr = GeoArray(fPath_or_geoarray) if not isinstance(fPath_or_geoarray, GeoArray) else fPath_or_geoarray
68 |
69 | rows, cols = geoArr.shape[:2]
70 | gt, prj = geoArr.geotransform, geoArr.projection
71 |
72 | assert gt and prj, 'GeoTransform an projection must be given for calculation of LonLat corner coordinates.'
73 |
74 | mask_1bit = np.zeros((rows, cols), dtype='uint8') # zeros -> image area later overwritten by ones
75 |
76 | if noDataVal is None:
77 | mask_1bit[:, :] = 1
78 | elif noDataVal == 'ambiguous':
79 | warnings.warn("No data value could not be automatically detected. Thus the matching window used for shift "
80 | "calculation had to be centered in the middle of the overlap area without respecting no data "
81 | "values. To avoid this provide the correct no data values for reference and shift image via "
82 | "'-nodata'")
83 | mask_1bit[:, :] = 1
84 | else:
85 | band_data = geoArr[band] # TODO implement gdal_ReadAsArray_mp (reading in multiprocessing)
86 | mask_1bit[band_data != noDataVal] = 1
87 |
88 | if v:
89 | print('detected no data value', noDataVal)
90 |
91 | try:
92 | corner_coords_YX = calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=False, algorithm='shapely')
93 | except Exception:
94 | if v:
95 | warnings.warn(f"\nCalculation of corner coordinates failed within algorithm 'shapely' "
96 | f"(Exception: {sys.exc_info()[1]}). Using algorithm 'numpy' instead.")
97 | # FIXME numpy algorithm returns wrong values for S2A_OPER_MSI_L1C_TL_SGS__20160608T153121_A005024_T33UUU_B03.jp2
98 | # FIXME (Hannes)
99 | corner_coords_YX = \
100 | calc_FullDataset_corner_positions(mask_1bit, assert_four_corners=False, algorithm='numpy')
101 |
102 | if len(corner_coords_YX) == 4: # this avoids shapely self intersection
103 | corner_coords_YX = list(np.array(corner_coords_YX)[[0, 1, 3, 2]]) # UL, UR, LL, LR => UL, UR, LR, LL
104 |
105 | # check if enough unique coordinates have been found
106 | if not len(GeoDataFrame(corner_coords_YX).drop_duplicates().values) >= 3:
107 | if not q:
108 | warnings.warn('\nThe algorithm for automatically detecting the actual image coordinates did not find '
109 | 'enough unique corners. Using outer image corner coordinates instead.')
110 | corner_coords_YX = ((0, 0), (0, cols - 1), (rows - 1, 0), (rows - 1, cols - 1))
111 |
112 | # check if all points are unique
113 | # all_coords_are_unique = len([UL, UR, LL, LR]) == len(GeoDataFrame([UL, UR, LL, LR]).drop_duplicates().values)
114 | # UL, UR, LL, LR = \
115 | # (UL, UR, LL, LR) if all_coords_are_unique else ((0, 0), (0, cols-1), (rows-1, 0), (rows-1, cols-1))
116 |
117 | def get_mapYX(YX): return pixelToMapYX(list(reversed(YX)), geotransform=gt, projection=prj)[0]
118 | corner_pos_XY = [list(reversed(i)) for i in [get_mapYX(YX) for YX in corner_coords_YX]]
119 | return corner_pos_XY
120 |
121 |
122 | def get_subset_GeoTransform(gt_fullArr, subset_box_imYX):
123 | gt_subset = list(gt_fullArr[:]) # copy
124 | gt_subset[3], gt_subset[0] = imYX2mapYX(subset_box_imYX[0], gt_fullArr)
125 | return gt_subset
126 |
127 |
128 | def get_gdalReadInputs_from_boxImYX(boxImYX):
129 | """Return row_start,col_start,rows_count,cols_count and assumes boxImYX as [UL_YX,UR_YX,LR_YX,LL_YX)."""
130 | rS, cS = boxImYX[0]
131 | clip_sz_x = abs(boxImYX[1][1] - boxImYX[0][1]) # URx-ULx
132 | clip_sz_y = abs(boxImYX[0][0] - boxImYX[3][0]) # ULy-LLy
133 | return cS, rS, clip_sz_x, clip_sz_y
134 |
135 |
136 | def get_GeoArrayPosition_from_boxImYX(boxImYX):
137 | """Return row_start,row_end,col_start,col_end and assumes boxImYX as [UL_YX,UR_YX,LR_YX,LL_YX)."""
138 | rS, cS = boxImYX[0] # UL
139 | rE, cE = boxImYX[2] # LR
140 | return rS, rE - 1, cS, cE - 1 # -1 because boxImYX represents outer box and includes the LR corner of LR pixel
141 |
142 |
143 | def has_metaRotation(path_or_geoarray: Union[GeoArray, str]):
144 | """Return True if there is a row or column rotation due to the given GDAL GeoTransform tuple."""
145 | gt = GeoArray(path_or_geoarray).gt
146 |
147 | return gt[2] or gt[4]
148 |
149 |
150 | def remove_metaRotation(gA_rot: GeoArray, rspAlg='cubic') -> GeoArray:
151 | """Remove any metadata rotation (a rotation that only exists in the map info)."""
152 | gA = GeoArray(*warp_ndarray(gA_rot[:], gA_rot.gt, gA_rot.prj,
153 | rspAlg=rspAlg,
154 | # out_gsd=(gA_rot.xgsd, gA_rot.ygsd)
155 | ),
156 | nodata=gA_rot.nodata)
157 | gA.basename = os.path.basename(gA.basename)
158 | gA.meta = gA.meta
159 |
160 | return gA
161 |
--------------------------------------------------------------------------------
/arosics/plotting.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
4 | #
5 | # Copyright (C) 2017-2024
6 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
7 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
8 | # Germany (https://www.gfz-potsdam.de/)
9 | #
10 | # This software was developed within the context of the GeoMultiSens project funded
11 | # by the German Federal Ministry of Education and Research
12 | # (project grant code: 01 IS 14 010 A-C).
13 | #
14 | # Licensed under the Apache License, Version 2.0 (the "License");
15 | # you may not use this file except in compliance with the License.
16 | # You may obtain a copy of the License at
17 | #
18 | # https://www.apache.org/licenses/LICENSE-2.0
19 | #
20 | # Unless required by applicable law or agreed to in writing, software
21 | # distributed under the License is distributed on an "AS IS" BASIS,
22 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
23 | # See the License for the specific language governing permissions and
24 | # limitations under the License.
25 |
26 | import numpy as np
27 |
28 |
29 | def _norm(array, normto):
30 | return [float(i) * (normto / max(array)) for i in array]
31 |
32 |
33 | def subplot_2dline(XY_tuples, titles=None, shapetuple=None, grid=False):
34 | from matplotlib import pyplot as plt
35 |
36 | shapetuple = (1, len(XY_tuples)) if shapetuple is None else shapetuple
37 | assert titles is None or len(titles) == len(XY_tuples), \
38 | 'List in titles keyword must have the same length as the passed XY_tuples.'
39 | fig = plt.figure(figsize=_norm(plt.figaspect(shapetuple[0] / shapetuple[1] * 1.), 10))
40 | for i, XY in enumerate(XY_tuples):
41 | ax = fig.add_subplot(shapetuple[0], shapetuple[1], i + 1)
42 | X, Y = XY
43 | ax.plot(X, Y, linestyle='-')
44 | if titles is not None:
45 | ax.set_title(titles[i])
46 | if grid:
47 | ax.grid(which='major', axis='both', linestyle='-')
48 | plt.tight_layout()
49 | plt.show(block=True)
50 |
51 | return fig
52 |
53 |
54 | def subplot_imshow(ims, titles=None, shapetuple=None, grid=False):
55 | from matplotlib import pyplot as plt
56 |
57 | ims = [ims] if not isinstance(ims, list) else ims
58 | assert titles is None or len(titles) == len(ims), 'Error: Got more or less titles than images.'
59 |
60 | shapetuple = (1, len(ims)) if shapetuple is None else shapetuple
61 | fig, axes = plt.subplots(shapetuple[0], shapetuple[1],
62 | figsize=_norm(plt.figaspect(shapetuple[0] / shapetuple[1] * 1.), 20))
63 | [axes[i].imshow(im, cmap='gray', interpolation='none', vmin=np.percentile(im, 2), vmax=np.percentile(im, 98))
64 | for i, im in enumerate(ims)]
65 | if titles is not None:
66 | [axes[i].set_title(titles[i]) for i in range(len(ims))]
67 | if grid:
68 | [axes[i].grid(which='major', axis='both', linestyle='-') for i in range(len(ims))]
69 | plt.tight_layout()
70 | plt.show(block=True)
71 |
72 | return fig
73 |
74 |
75 | def subplot_3dsurface(ims, shapetuple=None):
76 | from matplotlib import pyplot as plt
77 | from mpl_toolkits.mplot3d import Axes3D # noqa: F401 # this is needed for fig.add_subplot(..., projection='3d')
78 |
79 | ims = [ims] if not isinstance(ims, list) else ims
80 | shapetuple = (1, len(ims)) if shapetuple is None else shapetuple
81 | fig = plt.figure(figsize=_norm(plt.figaspect((shapetuple[0] / 2.) / shapetuple[1] * 1.), 20))
82 | for i, im in enumerate(ims):
83 | ax = fig.add_subplot(shapetuple[0], shapetuple[1], i + 1, projection='3d')
84 | x = np.arange(0, im.shape[0], 1)
85 | y = np.arange(0, im.shape[1], 1)
86 | X, Y = np.meshgrid(x, y)
87 | Z = im.reshape(X.shape)
88 | ax.plot_surface(X, Y, Z, cmap=plt.get_cmap('hot'))
89 | ax.contour(X, Y, Z, zdir='x', cmap=plt.get_cmap('coolwarm'), offset=0)
90 | ax.contour(X, Y, Z, zdir='y', cmap=plt.get_cmap('coolwarm'), offset=im.shape[1])
91 | ax.set_xlabel('X')
92 | ax.set_ylabel('Y')
93 | ax.set_zlabel('Z')
94 | plt.tight_layout()
95 | plt.show(block=True)
96 |
97 | return fig
98 |
--------------------------------------------------------------------------------
/arosics/version.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
4 | #
5 | # Copyright (C) 2017-2024
6 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
7 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
8 | # Germany (https://www.gfz-potsdam.de/)
9 | #
10 | # This software was developed within the context of the GeoMultiSens project funded
11 | # by the German Federal Ministry of Education and Research
12 | # (project grant code: 01 IS 14 010 A-C).
13 | #
14 | # Licensed under the Apache License, Version 2.0 (the "License");
15 | # you may not use this file except in compliance with the License.
16 | # You may obtain a copy of the License at
17 | #
18 | # https://www.apache.org/licenses/LICENSE-2.0
19 | #
20 | # Unless required by applicable law or agreed to in writing, software
21 | # distributed under the License is distributed on an "AS IS" BASIS,
22 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
23 | # See the License for the specific language governing permissions and
24 | # limitations under the License.
25 |
26 |
27 | __version__ = '1.12.1'
28 | __versionalias__ = '2024-12-11_01'
29 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS = -T
6 | SPHINXBUILD = sphinx-build
7 | PAPER =
8 | BUILDDIR = _build
9 |
10 | # User-friendly check for sphinx-build
11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1)
12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from https://sphinx-doc.org/)
13 | endif
14 |
15 | # Internal variables.
16 | PAPEROPT_a4 = -D latex_paper_size=a4
17 | PAPEROPT_letter = -D latex_paper_size=letter
18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
19 | # the i18n builder cannot share the environment and doctrees with the others
20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
21 |
22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
23 |
24 | help:
25 | @echo "Please use \`make ' where is one of"
26 | @echo " html to make standalone HTML files"
27 | @echo " dirhtml to make HTML files named index.html in directories"
28 | @echo " singlehtml to make a single large HTML file"
29 | @echo " pickle to make pickle files"
30 | @echo " json to make JSON files"
31 | @echo " htmlhelp to make HTML files and a HTML help project"
32 | @echo " qthelp to make HTML files and a qthelp project"
33 | @echo " devhelp to make HTML files and a Devhelp project"
34 | @echo " epub to make an epub"
35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
36 | @echo " latexpdf to make LaTeX files and run them through pdflatex"
37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx"
38 | @echo " text to make text files"
39 | @echo " man to make manual pages"
40 | @echo " texinfo to make Texinfo files"
41 | @echo " info to make Texinfo files and run them through makeinfo"
42 | @echo " gettext to make PO message catalogs"
43 | @echo " changes to make an overview of all changed/added/deprecated items"
44 | @echo " xml to make Docutils-native XML files"
45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes"
46 | @echo " linkcheck to check all external links for integrity"
47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)"
48 |
49 | clean:
50 | rm -rf $(BUILDDIR)/*
51 |
52 | html:
53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
54 | @echo
55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
56 |
57 | dirhtml:
58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
59 | @echo
60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
61 |
62 | singlehtml:
63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
64 | @echo
65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
66 |
67 | pickle:
68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
69 | @echo
70 | @echo "Build finished; now you can process the pickle files."
71 |
72 | json:
73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
74 | @echo
75 | @echo "Build finished; now you can process the JSON files."
76 |
77 | htmlhelp:
78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
79 | @echo
80 | @echo "Build finished; now you can run HTML Help Workshop with the" \
81 | ".hhp project file in $(BUILDDIR)/htmlhelp."
82 |
83 | qthelp:
84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
85 | @echo
86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \
87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:"
88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/arosics.qhcp"
89 | @echo "To view the help file:"
90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/arosics.qhc"
91 |
92 | devhelp:
93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
94 | @echo
95 | @echo "Build finished."
96 | @echo "To view the help file:"
97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/arosics"
98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/arosics"
99 | @echo "# devhelp"
100 |
101 | epub:
102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
103 | @echo
104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub."
105 |
106 | latex:
107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
108 | @echo
109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \
111 | "(use \`make latexpdf' here to do that automatically)."
112 |
113 | latexpdf:
114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
115 | @echo "Running LaTeX files through pdflatex..."
116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf
117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
118 |
119 | latexpdfja:
120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
121 | @echo "Running LaTeX files through platex and dvipdfmx..."
122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja
123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
124 |
125 | text:
126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
127 | @echo
128 | @echo "Build finished. The text files are in $(BUILDDIR)/text."
129 |
130 | man:
131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
132 | @echo
133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man."
134 |
135 | texinfo:
136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
137 | @echo
138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
139 | @echo "Run \`make' in that directory to run these through makeinfo" \
140 | "(use \`make info' here to do that automatically)."
141 |
142 | info:
143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
144 | @echo "Running Texinfo files through makeinfo..."
145 | make -C $(BUILDDIR)/texinfo info
146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
147 |
148 | gettext:
149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
150 | @echo
151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
152 |
153 | changes:
154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
155 | @echo
156 | @echo "The overview file is in $(BUILDDIR)/changes."
157 |
158 | linkcheck:
159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
160 | @echo
161 | @echo "Link check complete; look for any errors in the above output " \
162 | "or in $(BUILDDIR)/linkcheck/output.txt."
163 |
164 | doctest:
165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
166 | @echo "Testing of doctests in the sources finished, look at the " \
167 | "results in $(BUILDDIR)/doctest/output.txt."
168 |
169 | xml:
170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml
171 | @echo
172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml."
173 |
174 | pseudoxml:
175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml
176 | @echo
177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml."
178 |
--------------------------------------------------------------------------------
/docs/_static/custom.css:
--------------------------------------------------------------------------------
1 | .wy-nav-content {
2 | max-width: 1200px !important;
3 | }
4 |
--------------------------------------------------------------------------------
/docs/about.rst:
--------------------------------------------------------------------------------
1 | =====
2 | About
3 | =====
4 |
5 | .. image:: images/arosics_logo.png
6 | :width: 150px
7 | :alt: AROSICS Logo
8 |
9 | Perform automatic subpixel co-registration of two satellite image datasets based on an image matching approach working
10 | in the frequency domain, combined with a multistage workflow for effective detection of false-positives. Python
11 | implementation by Daniel Scheffler (daniel.scheffler [at] gfz-potsdam [dot] de).
12 |
13 | AROSICS detects and corrects local as well as global misregistrations between two input images in the subpixel scale,
14 | that are often present in satellite imagery. It is designed to robustly handle the typical difficulties of
15 | multi-sensoral/multi-temporal images. Clouds are automatically handled by the implemented outlier detection algorithms.
16 | The user may provide user-defined masks to exclude certain image areas from tie point creation. The image overlap area
17 | is automatically calculated.
18 |
19 | For detailed algorithm description and use cases refer to the corresponding (open-access) paper that can be found here:
20 | `Scheffler D, Hollstein A, Diedrich H, Segl K, Hostert P. AROSICS: An Automated and Robust Open-Source Image
21 | Co-Registration Software for Multi-Sensor Satellite Data. Remote Sensing. 2017; 9(7):676
22 | `__.
23 |
24 |
25 | * GitLab Repository: https://git.gfz-potsdam.de/danschef/arosics/
26 | * Documentation: https://danschef.git-pages.gfz-potsdam.de/arosics/doc/
27 |
28 |
29 |
30 | Feature overview
31 | ----------------
32 |
33 | Global co-registration - fast but only for static X/Y-shifts
34 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
35 |
36 | Only a global X/Y translation is computed within a small subset of the input images (window position is adjustable).
37 | This allows very fast co-registration but only corrects for translational (global) X/Y shifts.
38 | The calculated subpixel-shifts are (by default) applied to the geocoding information of the output image.
39 | No spatial resampling is done automatically as long as both input images have the same projection. However, AROSICS
40 | also allows to align the output image to the reference image coordinate grid if needed.
41 |
42 | Here is an example of a Landsat-8 / Sentinel-2 image pair before and after co-registration using AROSICS:
43 |
44 | .. image:: images/animation_testcase1_zoom_L8_S2_global_coreg_before_after_900x456.gif
45 |
46 |
47 | Local co-registration - for spatially variable shifts but a bit slower
48 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
49 |
50 | A dense grid of tie points is automatically computed, whereas tie points are subsequently validated using a
51 | multistage workflow. Only those tie points not marked as false-positives are used to compute the parameters of an
52 | affine transformation. Warping of the target image is done using an appropriate resampling technique
53 | (cubic by default).
54 |
55 | Here is an example of the computed shift vectors after filtering false-positives
56 | (mainly due to clouds in the target image):
57 |
58 | .. image:: images/shift_vectors_testcase1__900x824.gif
59 |
--------------------------------------------------------------------------------
/docs/api_cli_reference.rst:
--------------------------------------------------------------------------------
1 | ############################
2 | Python API and CLI reference
3 | ############################
4 |
5 | AROSICS offers two interfaces.
6 |
7 | | 1. You can use AROSICS as a **Python package**
8 | | (e.g., to be embed it into your Python code):
9 |
10 | .. toctree::
11 | :maxdepth: 4
12 |
13 | modules.rst
14 |
15 |
16 | | 2. You can use AROSICS from the **command line**
17 | | (you don't need to be familiar with Python in that case):
18 |
19 | .. toctree::
20 | :maxdepth: 4
21 |
22 | cli_reference.rst
23 |
--------------------------------------------------------------------------------
/docs/authors.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../AUTHORS.rst
2 |
--------------------------------------------------------------------------------
/docs/cli_reference.rst:
--------------------------------------------------------------------------------
1 | Command line interface reference
2 | ********************************
3 |
4 | arosics
5 | -------
6 |
7 | At the command line, arosics provides the **arosics** command:
8 |
9 | .. argparse::
10 | :filename: ./../arosics/arosics_cli.py
11 | :func: get_arosics_argparser
12 | :prog: arosics
13 |
14 |
15 | .. note::
16 |
17 | The verbose program mode gives some more output about the interim results,
18 | shows some figures and writes the used footprint and overlap polygons to disk.
19 | Maybe the figures must be manually closed in in order to continue the processing
20 | (depending on your Python configuration).
21 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 | #
4 | # arosics documentation build configuration file, created by
5 | # sphinx-quickstart on Tue Jul 9 22:26:36 2013.
6 | #
7 | # This file is execfile()d with the current directory set to its
8 | # containing dir.
9 | #
10 | # Note that not all possible configuration values are present in this
11 | # autogenerated file.
12 | #
13 | # All configuration values have a default; values that are commented out
14 | # serve to show the default.
15 |
16 | import sys
17 | import os
18 |
19 | # If extensions (or modules to document with autodoc) are in another
20 | # directory, add these directories to sys.path here. If the directory is
21 | # relative to the documentation root, use os.path.abspath to make it
22 | # absolute, like shown here.
23 | sys.path.insert(0, os.path.abspath('.'))
24 |
25 | # Get the project root dir, which is the parent dir of this
26 | cwd = os.getcwd()
27 | project_root = os.path.dirname(cwd)
28 |
29 | # Insert the project root dir as the first element in the PYTHONPATH.
30 | # This lets us ensure that the source package is imported, and that its
31 | # version is used.
32 | sys.path.insert(0, project_root)
33 |
34 | import arosics # noqa E402
35 |
36 | # -- General configuration ---------------------------------------------
37 |
38 | # If your documentation needs a minimal Sphinx version, state it here.
39 | # needs_sphinx = '1.0'
40 |
41 | # Add any Sphinx extension module names here, as strings. They can be
42 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
43 | extensions = [
44 | 'sphinx.ext.autodoc',
45 | 'sphinx.ext.githubpages',
46 | 'sphinx.ext.viewcode',
47 | 'sphinx.ext.todo',
48 | 'sphinxarg.ext',
49 | 'sphinx_autodoc_typehints',
50 | 'sphinx.ext.intersphinx'
51 | ]
52 |
53 | # Add any paths that contain templates here, relative to this directory.
54 | templates_path = ['_templates']
55 |
56 | # The suffix of source filenames.
57 | # source_suffix = ['.rst', '.md']
58 | source_suffix = '.rst'
59 |
60 | # The encoding of source files.
61 | # source_encoding = 'utf-8-sig'
62 |
63 | # The master toctree document.
64 | master_doc = 'index'
65 |
66 | # General information about the project.
67 | project = u'arosics'
68 | copyright = u"2017-2024, Daniel Scheffler"
69 |
70 | # The version info for the project you're documenting, acts as replacement
71 | # for |version| and |release|, also used in various other places throughout
72 | # the built documents.
73 | #
74 | # The short X.Y version.
75 | version = arosics.__version__
76 | # The full version, including alpha/beta/rc tags.
77 | release = arosics.__version__
78 |
79 | # The language for content autogenerated by Sphinx. Refer to documentation
80 | # for a list of supported languages.
81 | # language = None
82 |
83 | # There are two options for replacing |today|: either, you set today to
84 | # some non-false value, then it is used:
85 | # today = ''
86 | # Else, today_fmt is used as the format for a strftime call.
87 | # today_fmt = '%B %d, %Y'
88 |
89 | # List of patterns, relative to source directory, that match files and
90 | # directories to ignore when looking for source files.
91 | # This pattern also affects html_static_path and html_extra_path .
92 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
93 |
94 | # The reST default role (used for this markup: `text`) to use for all
95 | # documents.
96 | # default_role = None
97 |
98 | # If true, '()' will be appended to :func: etc. cross-reference text.
99 | # add_function_parentheses = True
100 |
101 | # If true, the current module name will be prepended to all description
102 | # unit titles (such as .. function::).
103 | # add_module_names = True
104 |
105 | # If true, sectionauthor and moduleauthor directives will be shown in the
106 | # output. They are ignored by default.
107 | # show_authors = False
108 |
109 | # The name of the Pygments (syntax highlighting) style to use.
110 | pygments_style = 'sphinx'
111 |
112 | # A list of ignored prefixes for module index sorting.
113 | # modindex_common_prefix = []
114 |
115 | # If true, keep warnings as "system message" paragraphs in the built
116 | # documents.
117 | # keep_warnings = False
118 |
119 | # Define how to document class docstrings
120 | # '__init__' documents only the __init__ methods, 'class' documents only the class methods and 'both' documents both
121 | autoclass_content = 'both'
122 |
123 | # If true, `todo` and `todoList` produce output, else they produce nothing.
124 | todo_include_todos = True
125 |
126 |
127 | # Apply custom sphinx styles (e.g., increase content width of generated docs)
128 | def setup(app):
129 | app.add_css_file('custom.css')
130 |
131 |
132 | # Add mappings for intersphinx extension (allows to link to the API reference of other sphinx documentations)
133 | intersphinx_mapping = {
134 | 'geoarray': ('https://danschef.git-pages.gfz-potsdam.de/geoarray/doc/', None),
135 | 'python': ('https://docs.python.org/3', None),
136 | }
137 |
138 |
139 | # -- Options for HTML output -------------------------------------------
140 |
141 | # The theme to use for HTML and HTML Help pages. See the documentation for
142 | # a list of builtin themes.
143 | # html_theme = 'default'
144 | html_theme = 'sphinx_rtd_theme' # The one installed via pip install sphinx_rtd_theme in the .gitlab.yml
145 |
146 | # Theme options are theme-specific and customize the look and feel of a
147 | # theme further. For a list of options available for each theme, see the
148 | # documentation.
149 | html_theme_options = {
150 | 'canonical_url': '',
151 | 'analytics_id': '',
152 | 'logo_only': False,
153 | 'display_version': True,
154 | 'prev_next_buttons_location': 'bottom',
155 | 'style_external_links': False,
156 | 'vcs_pageview_mode': 'view',
157 | # Toc options
158 | 'collapse_navigation': True,
159 | 'sticky_navigation': True,
160 | 'navigation_depth': 4,
161 | 'includehidden': True,
162 | 'titles_only': False
163 | }
164 |
165 | # Add any paths that contain custom themes here, relative to this directory.
166 | # html_theme_path = []
167 |
168 | # The name for this set of Sphinx documents. If None, it defaults to
169 | # " v documentation".
170 | # html_title = None
171 |
172 | # A shorter title for the navigation bar. Default is the same as
173 | # html_title.
174 | # html_short_title = None
175 |
176 | # The name of an image file (relative to this directory) to place at the
177 | # top of the sidebar.
178 | html_logo = 'images/arosics_logo.png'
179 |
180 | # The name of an image file (within the static path) to use as favicon
181 | # of the docs. This file should be a Windows icon file (.ico) being
182 | # 16x16 or 32x32 pixels large.
183 | # html_favicon = None
184 |
185 | # Add any paths that contain custom static files (such as style sheets)
186 | # here, relative to this directory. They are copied after the builtin
187 | # static files, so a file named "default.css" will overwrite the builtin
188 | # "default.css".
189 | html_static_path = ['_static']
190 |
191 | # If not '', a 'Last updated on:' timestamp is inserted at every page
192 | # bottom, using the given strftime format.
193 | # html_last_updated_fmt = '%b %d, %Y'
194 |
195 | # If true, SmartyPants will be used to convert quotes and dashes to
196 | # typographically correct entities.
197 | # html_use_smartypants = True
198 |
199 | # Custom sidebar templates, maps document names to template names.
200 | # html_sidebars = {}
201 |
202 | # Additional templates that should be rendered to pages, maps page names
203 | # to template names.
204 | # html_additional_pages = {}
205 |
206 | # If false, no module index is generated.
207 | # html_domain_indices = True
208 |
209 | # If false, no index is generated.
210 | # html_use_index = True
211 |
212 | # If true, the index is split into individual pages for each letter.
213 | # html_split_index = False
214 |
215 | # If true, links to the reST sources are added to the pages.
216 | # html_show_sourcelink = True
217 |
218 | # If true, "Created using Sphinx" is shown in the HTML footer.
219 | # Default is True.
220 | # html_show_sphinx = True
221 |
222 | # If true, "(C) Copyright ..." is shown in the HTML footer.
223 | # Default is True.
224 | # html_show_copyright = True
225 |
226 | # If true, an OpenSearch description file will be output, and all pages
227 | # will contain a tag referring to it. The value of this option
228 | # must be the base URL from which the finished HTML is served.
229 | # html_use_opensearch = ''
230 |
231 | # This is the file name suffix for HTML files (e.g. ".xhtml").
232 | # html_file_suffix = None
233 |
234 | # Output file base name for HTML help builder.
235 | htmlhelp_basename = 'arosicsdoc'
236 |
237 |
238 | # -- Options for LaTeX output ------------------------------------------
239 |
240 | latex_elements = {
241 | # The paper size ('letterpaper' or 'a4paper').
242 | # 'papersize': 'letterpaper',
243 |
244 | # The font size ('10pt', '11pt' or '12pt').
245 | # 'pointsize': '10pt',
246 |
247 | # Additional stuff for the LaTeX preamble.
248 | # 'preamble': '',
249 | }
250 |
251 | # Grouping the document tree into LaTeX files. List of tuples
252 | # (source start file, target name, title, author, documentclass
253 | # [howto/manual]).
254 | latex_documents = [
255 | ('index', 'arosics.tex',
256 | u'arosics Documentation',
257 | u'Daniel Scheffler', 'manual'),
258 | ]
259 |
260 | # The name of an image file (relative to this directory) to place at
261 | # the top of the title page.
262 | # latex_logo = None
263 |
264 | # For "manual" documents, if this is true, then toplevel headings
265 | # are parts, not chapters.
266 | # latex_use_parts = False
267 |
268 | # If true, show page references after internal links.
269 | # latex_show_pagerefs = False
270 |
271 | # If true, show URL addresses after external links.
272 | # latex_show_urls = False
273 |
274 | # Documents to append as an appendix to all manuals.
275 | # latex_appendices = []
276 |
277 | # If false, no module index is generated.
278 | # latex_domain_indices = True
279 |
280 |
281 | # -- Options for manual page output ------------------------------------
282 |
283 | # One entry per manual page. List of tuples
284 | # (source start file, name, description, authors, manual section).
285 | man_pages = [
286 | ('index', 'arosics',
287 | u'arosics Documentation',
288 | [u'Daniel Scheffler'], 1)
289 | ]
290 |
291 | # If true, show URL addresses after external links.
292 | # man_show_urls = False
293 |
294 |
295 | # -- Options for Texinfo output ----------------------------------------
296 |
297 | # Grouping the document tree into Texinfo files. List of tuples
298 | # (source start file, target name, title, author,
299 | # dir menu entry, description, category)
300 | texinfo_documents = [
301 | ('index', 'arosics',
302 | u'arosics Documentation',
303 | u'Daniel Scheffler',
304 | 'arosics',
305 | 'One line description of project.',
306 | 'Miscellaneous'),
307 | ]
308 |
309 | # Documents to append as an appendix to all manuals.
310 | # texinfo_appendices = []
311 |
312 | # If false, no module index is generated.
313 | # texinfo_domain_indices = True
314 |
315 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
316 | # texinfo_show_urls = 'footnote'
317 |
318 | # If true, do not generate a @detailmenu in the "Top" node's menu.
319 | # texinfo_no_detailmenu = False
320 |
--------------------------------------------------------------------------------
/docs/contributing.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../CONTRIBUTING.rst
2 |
--------------------------------------------------------------------------------
/docs/history.rst:
--------------------------------------------------------------------------------
1 | .. include:: ../HISTORY.rst
2 |
--------------------------------------------------------------------------------
/docs/images/CoregPoints_table.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GFZ/arosics/14810ecb12fd71a2d4b086741f3801d370a3d767/docs/images/CoregPoints_table.png
--------------------------------------------------------------------------------
/docs/images/animation_testcase1_zoom_L8_S2_global_coreg_before_after_1066x540.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GFZ/arosics/14810ecb12fd71a2d4b086741f3801d370a3d767/docs/images/animation_testcase1_zoom_L8_S2_global_coreg_before_after_1066x540.gif
--------------------------------------------------------------------------------
/docs/images/animation_testcase1_zoom_L8_S2_global_coreg_before_after_900x456.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GFZ/arosics/14810ecb12fd71a2d4b086741f3801d370a3d767/docs/images/animation_testcase1_zoom_L8_S2_global_coreg_before_after_900x456.gif
--------------------------------------------------------------------------------
/docs/images/arosics_logo.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GFZ/arosics/14810ecb12fd71a2d4b086741f3801d370a3d767/docs/images/arosics_logo.png
--------------------------------------------------------------------------------
/docs/images/output_40_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GFZ/arosics/14810ecb12fd71a2d4b086741f3801d370a3d767/docs/images/output_40_1.png
--------------------------------------------------------------------------------
/docs/images/output_44_1.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GFZ/arosics/14810ecb12fd71a2d4b086741f3801d370a3d767/docs/images/output_44_1.png
--------------------------------------------------------------------------------
/docs/images/shift_vectors_testcase1__900x824.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GFZ/arosics/14810ecb12fd71a2d4b086741f3801d370a3d767/docs/images/shift_vectors_testcase1__900x824.gif
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | AROSICS documentation
2 | =====================
3 |
4 | **This documentation is structured as follows:**
5 |
6 | .. toctree::
7 | :maxdepth: 2
8 |
9 | about
10 | installation
11 | usage
12 | api_cli_reference
13 | contributing
14 | authors
15 | history
16 |
17 | Indices and tables
18 | ==================
19 |
20 | * :ref:`genindex`
21 | * :ref:`modindex`
22 | * :ref:`search`
23 |
--------------------------------------------------------------------------------
/docs/installation.rst:
--------------------------------------------------------------------------------
1 | ============
2 | Installation
3 | ============
4 |
5 | Using Anaconda or Miniconda (recommended)
6 | -----------------------------------------
7 |
8 | Using conda_ (latest version recommended), AROSICS is installed as follows:
9 |
10 |
11 | 1. Create virtual environment for arosics (optional but recommended):
12 |
13 | .. code-block:: bash
14 |
15 | $ conda create -c conda-forge --name arosics python=3
16 | $ conda activate arosics
17 |
18 |
19 | 2. Then install AROSICS itself:
20 |
21 | .. code-block:: bash
22 |
23 | $ conda install -c conda-forge 'arosics>=1.3.0'
24 |
25 |
26 | This is the preferred method to install AROSICS, as it always installs the most recent stable release and
27 | automatically resolves all the dependencies.
28 |
29 |
30 | Using pip (not recommended)
31 | ---------------------------
32 |
33 | There is also a `pip`_ installer for AROSICS. However, please note that AROSICS depends on some
34 | open source packages that may cause problems when installed with pip. Therefore, we strongly recommend
35 | to resolve the following dependencies before the pip installer is run:
36 |
37 | * cartopy
38 | * gdal
39 | * geopandas
40 | * joblib >=1.3.0
41 | * matplotlib
42 | * numpy
43 | * pandas
44 | * pykrige
45 | * pyproj >2.2.0
46 | * scikit-image >=0.21.0
47 | * shapely
48 |
49 | .. note::
50 |
51 | The gdal library must be installed before numpy, otherwise do a
52 | `re-installation `_
53 | in order to respect the build isolation.
54 |
55 |
56 | Then, the pip installer can be run by:
57 |
58 | .. code-block:: bash
59 |
60 | $ pip install arosics
61 |
62 | If you don't have `pip`_ installed, this `Python installation guide`_ can guide
63 | you through the process.
64 |
65 |
66 |
67 | .. note::
68 |
69 | AROSICS has been tested with Python 3.8+. It should be fully compatible to all Python versions
70 | from 3.8 onwards. Python 2.7 support was dropped in AROSICS 1.3 due to its end of life status.
71 |
72 |
73 | .. _pip: https://pip.pypa.io
74 | .. _Python installation guide: https://docs.python-guide.org/en/latest/starting/installation/
75 | .. _conda: https://docs.conda.io
76 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | REM Command file for Sphinx documentation
4 |
5 | if "%SPHINXBUILD%" == "" (
6 | set SPHINXBUILD=sphinx-build
7 | )
8 | set BUILDDIR=_build
9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
10 | set I18NSPHINXOPTS=%SPHINXOPTS% .
11 | if NOT "%PAPER%" == "" (
12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
14 | )
15 |
16 | if "%1" == "" goto help
17 |
18 | if "%1" == "help" (
19 | :help
20 | echo.Please use `make ^` where ^ is one of
21 | echo. html to make standalone HTML files
22 | echo. dirhtml to make HTML files named index.html in directories
23 | echo. singlehtml to make a single large HTML file
24 | echo. pickle to make pickle files
25 | echo. json to make JSON files
26 | echo. htmlhelp to make HTML files and a HTML help project
27 | echo. qthelp to make HTML files and a qthelp project
28 | echo. devhelp to make HTML files and a Devhelp project
29 | echo. epub to make an epub
30 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
31 | echo. text to make text files
32 | echo. man to make manual pages
33 | echo. texinfo to make Texinfo files
34 | echo. gettext to make PO message catalogs
35 | echo. changes to make an overview over all changed/added/deprecated items
36 | echo. xml to make Docutils-native XML files
37 | echo. pseudoxml to make pseudoxml-XML files for display purposes
38 | echo. linkcheck to check all external links for integrity
39 | echo. doctest to run all doctests embedded in the documentation if enabled
40 | goto end
41 | )
42 |
43 | if "%1" == "clean" (
44 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i
45 | del /q /s %BUILDDIR%\*
46 | goto end
47 | )
48 |
49 |
50 | %SPHINXBUILD% 2> nul
51 | if errorlevel 9009 (
52 | echo.
53 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
54 | echo.installed, then set the SPHINXBUILD environment variable to point
55 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
56 | echo.may add the Sphinx directory to PATH.
57 | echo.
58 | echo.If you don't have Sphinx installed, grab it from
59 | echo.https://sphinx-doc.org/
60 | exit /b 1
61 | )
62 |
63 | if "%1" == "html" (
64 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
65 | if errorlevel 1 exit /b 1
66 | echo.
67 | echo.Build finished. The HTML pages are in %BUILDDIR%/html.
68 | goto end
69 | )
70 |
71 | if "%1" == "dirhtml" (
72 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
73 | if errorlevel 1 exit /b 1
74 | echo.
75 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
76 | goto end
77 | )
78 |
79 | if "%1" == "singlehtml" (
80 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
81 | if errorlevel 1 exit /b 1
82 | echo.
83 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
84 | goto end
85 | )
86 |
87 | if "%1" == "pickle" (
88 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
89 | if errorlevel 1 exit /b 1
90 | echo.
91 | echo.Build finished; now you can process the pickle files.
92 | goto end
93 | )
94 |
95 | if "%1" == "json" (
96 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
97 | if errorlevel 1 exit /b 1
98 | echo.
99 | echo.Build finished; now you can process the JSON files.
100 | goto end
101 | )
102 |
103 | if "%1" == "htmlhelp" (
104 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
105 | if errorlevel 1 exit /b 1
106 | echo.
107 | echo.Build finished; now you can run HTML Help Workshop with the ^
108 | .hhp project file in %BUILDDIR%/htmlhelp.
109 | goto end
110 | )
111 |
112 | if "%1" == "qthelp" (
113 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
114 | if errorlevel 1 exit /b 1
115 | echo.
116 | echo.Build finished; now you can run "qcollectiongenerator" with the ^
117 | .qhcp project file in %BUILDDIR%/qthelp, like this:
118 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\arosics.qhcp
119 | echo.To view the help file:
120 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\arosics.ghc
121 | goto end
122 | )
123 |
124 | if "%1" == "devhelp" (
125 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
126 | if errorlevel 1 exit /b 1
127 | echo.
128 | echo.Build finished.
129 | goto end
130 | )
131 |
132 | if "%1" == "epub" (
133 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
134 | if errorlevel 1 exit /b 1
135 | echo.
136 | echo.Build finished. The epub file is in %BUILDDIR%/epub.
137 | goto end
138 | )
139 |
140 | if "%1" == "latex" (
141 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
142 | if errorlevel 1 exit /b 1
143 | echo.
144 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
145 | goto end
146 | )
147 |
148 | if "%1" == "latexpdf" (
149 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
150 | cd %BUILDDIR%/latex
151 | make all-pdf
152 | cd %BUILDDIR%/..
153 | echo.
154 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
155 | goto end
156 | )
157 |
158 | if "%1" == "latexpdfja" (
159 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
160 | cd %BUILDDIR%/latex
161 | make all-pdf-ja
162 | cd %BUILDDIR%/..
163 | echo.
164 | echo.Build finished; the PDF files are in %BUILDDIR%/latex.
165 | goto end
166 | )
167 |
168 | if "%1" == "text" (
169 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
170 | if errorlevel 1 exit /b 1
171 | echo.
172 | echo.Build finished. The text files are in %BUILDDIR%/text.
173 | goto end
174 | )
175 |
176 | if "%1" == "man" (
177 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
178 | if errorlevel 1 exit /b 1
179 | echo.
180 | echo.Build finished. The manual pages are in %BUILDDIR%/man.
181 | goto end
182 | )
183 |
184 | if "%1" == "texinfo" (
185 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
186 | if errorlevel 1 exit /b 1
187 | echo.
188 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
189 | goto end
190 | )
191 |
192 | if "%1" == "gettext" (
193 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
194 | if errorlevel 1 exit /b 1
195 | echo.
196 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
197 | goto end
198 | )
199 |
200 | if "%1" == "changes" (
201 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
202 | if errorlevel 1 exit /b 1
203 | echo.
204 | echo.The overview file is in %BUILDDIR%/changes.
205 | goto end
206 | )
207 |
208 | if "%1" == "linkcheck" (
209 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
210 | if errorlevel 1 exit /b 1
211 | echo.
212 | echo.Link check complete; look for any errors in the above output ^
213 | or in %BUILDDIR%/linkcheck/output.txt.
214 | goto end
215 | )
216 |
217 | if "%1" == "doctest" (
218 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
219 | if errorlevel 1 exit /b 1
220 | echo.
221 | echo.Testing of doctests in the sources finished, look at the ^
222 | results in %BUILDDIR%/doctest/output.txt.
223 | goto end
224 | )
225 |
226 | if "%1" == "xml" (
227 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml
228 | if errorlevel 1 exit /b 1
229 | echo.
230 | echo.Build finished. The XML files are in %BUILDDIR%/xml.
231 | goto end
232 | )
233 |
234 | if "%1" == "pseudoxml" (
235 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml
236 | if errorlevel 1 exit /b 1
237 | echo.
238 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml.
239 | goto end
240 | )
241 |
242 | :end
243 |
--------------------------------------------------------------------------------
/docs/usage.rst:
--------------------------------------------------------------------------------
1 | .. _usage:
2 |
3 | ##################
4 | Usage instructions
5 | ##################
6 |
7 | In this section you can find some advice how to use AROSICS for the detection and correction
8 | of misregistrations locally or globally present in your input data.
9 |
10 |
11 | .. todo::
12 |
13 | This section is not yet complete but will be continously updated in future.
14 | If you miss topics, feel free to suggest new entries here!
15 |
16 |
17 | .. toctree::
18 | :maxdepth: 4
19 |
20 | usage/input_data_requirements.rst
21 | usage/global_coreg.rst
22 | usage/local_coreg.rst
23 |
24 |
25 | .. seealso::
26 |
27 | For details regarding the implemented algorithm, example use cases, quality assessment and benchmarks
28 | refer to the (open-access) paper about AROSICS:
29 | `Scheffler et al. 2017 `__
30 |
--------------------------------------------------------------------------------
/docs/usage/global_coreg.rst:
--------------------------------------------------------------------------------
1 | Global image co-registration
2 | ****************************
3 |
4 |
5 | Use the class :class:`arosics.COREG` to detect and correct global spatial shifts between a reference and a target image.
6 | It computes a global X-/Y-shift based on a single matching window with customizable position.
7 |
8 |
9 | Using the Python API
10 | --------------------
11 |
12 | calculate spatial shifts - with input data on disk
13 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
14 |
15 | .. code-block:: python
16 |
17 | >>> from arosics import COREG
18 |
19 | >>> im_reference = '/path/to/your/ref_image.bsq'
20 | >>> im_target = '/path/to/your/tgt_image.bsq'
21 |
22 | >>> CR = COREG(im_reference, im_target, wp=(354223, 5805559), ws=(256,256))
23 | >>> CR.calculate_spatial_shifts()
24 |
25 | Calculating actual data corner coordinates for reference image...
26 | Corner coordinates of reference image:
27 | [[319090.0, 5790510.0], [351800.0, 5899940.0], [409790.0, 5900040.0], [409790.0, 5790250.0], [319090.0, 5790250.0]]
28 | Calculating actual data corner coordinates for image to be shifted...
29 | Corner coordinates of image to be shifted:
30 | [[319460.0, 5790510.0], [352270.0, 5900040.0], [409790.0, 5900040.0], [409790.0, 5790250.0], [319460.0, 5790250.0]]
31 | Matching window position (X,Y): 354223/5805559
32 | Detected integer shifts (X/Y): 0/-2
33 | Detected subpixel shifts (X/Y): 0.357885632465/0.433837319984
34 | Calculated total shifts in fft pixel units (X/Y): 0.357885632465/-1.56616268002
35 | Calculated total shifts in reference pixel units (X/Y): 0.357885632465/-1.56616268002
36 | Calculated total shifts in target pixel units (X/Y): 0.357885632465/-1.56616268002
37 | Calculated map shifts (X,Y): 3.578856324660592 15.661626799963415
38 | Original map info: ['UTM', 1, 1, 300000.0, 5900040.0, 10.0, 10.0, 33, 'North', 'WGS-84']
39 | Updated map info: ['UTM', 1, 1, '300003.57885632466', '5900055.6616268', 10.0, 10.0, 33, 'North', 'WGS-84']
40 |
41 |
42 | calculate spatial shifts - without any disk access
43 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
44 |
45 | First, create some example input images for AROSICS in-memory
46 | (we use instances of :class:`GeoArray` for that):
47 |
48 | .. code-block:: python
49 |
50 | >>> from geoarray import GeoArray
51 | >>> from arosics import COREG
52 |
53 | >>> im_reference = '/path/to/your/ref_image.bsq'
54 | >>> im_target = '/path/to/your/tgt_image.bsq'
55 |
56 | # get a sample numpy array with corresponding geoinformation as reference image
57 | >>> geoArr = GeoArray(im_reference)
58 |
59 | >>> ref_ndarray = geoArr[:] # numpy.ndarray with shape (10980, 10980)
60 | >>> ref_gt = geoArr.geotransform # GDAL geotransform: (300000.0, 10.0, 0.0, 5900040.0, 0.0, -10.0)
61 | >>> ref_prj = geoArr.projection # projection as WKT string ('PROJCS["WGS 84 / UTM zone 33N....')
62 |
63 | # get a sample numpy array with corresponding geoinformation as target image
64 | >>> geoArr = GeoArray(im_target)
65 |
66 | >>> tgt_ndarray = geoArr[:] # numpy.ndarray with shape (10980, 10980)
67 | >>> tgt_gt = geoArr.geotransform # GDAL geotransform: (300000.0, 10.0, 0.0, 5900040.0, 0.0, -10.0)
68 | >>> tgt_prj = geoArr.projection # projection as WKT string ('PROJCS["WGS 84 / UTM zone 33N....')
69 |
70 | # create in-memory instances of GeoArray from the numpy array data, the GDAL geotransform tuple and the WKT
71 | # projection string
72 | >>> geoArr_reference = GeoArray(ref_ndarray, ref_gt, ref_prj)
73 | >>> geoArr_target = GeoArray(tgt_ndarray, tgt_gt, tgt_prj)
74 |
75 |
76 | Now pass these in-memory :class:`GeoArray` instances to :class:`arosics.COREG`
77 | and calculate spatial shifts:
78 |
79 | .. code-block:: python
80 |
81 | >>> CR = COREG(geoArr_reference, geoArr_target, wp=(354223, 5805559), ws=(256,256))
82 | >>> CR.calculate_spatial_shifts()
83 |
84 | Calculating actual data corner coordinates for reference image...
85 | Corner coordinates of reference image:
86 | [[300000.0, 5848140.0], [409790.0, 5848140.0], [409790.0, 5790250.0], [300000.0, 5790250.0]]
87 | Calculating actual data corner coordinates for image to be shifted...
88 | Corner coordinates of image to be shifted:
89 | [[300000.0, 5847770.0], [409790.0, 5847770.0], [409790.0, 5790250.0], [300000.0, 5790250.0]]
90 | Matching window position (X,Y): 354223/5805559
91 | Detected integer shifts (X/Y): 0/-2
92 | Detected subpixel shifts (X/Y): 0.357885632465/0.433837319984
93 | Calculated total shifts in fft pixel units (X/Y): 0.357885632465/-1.56616268002
94 | Calculated total shifts in reference pixel units (X/Y): 0.357885632465/-1.56616268002
95 | Calculated total shifts in target pixel units (X/Y): 0.357885632465/-1.56616268002
96 | Calculated map shifts (X,Y): 3.578856324660592/15.661626799963415
97 | Calculated absolute shift vector length in map units: 16.065328089207995
98 | Calculated angle of shift vector in degrees from North: 192.8717191970359
99 | Original map info: ['UTM', 1, 1, 300000.0, 5900040.0, 10.0, 10.0, 33, 'North', 'WGS-84']
100 | Updated map info: ['UTM', 1, 1, '300003.57885632466', '5900055.6616268', 10.0, 10.0, 33, 'North', 'WGS-84']
101 |
102 | 'success'
103 |
104 |
105 | correct shifts
106 | ~~~~~~~~~~~~~~
107 |
108 | :meth:`CR.correct_shifts() ` returns an
109 | :class:`OrderedDict` containing the co-registered
110 | numpy array and its corresponding geoinformation.
111 |
112 | .. code-block:: python
113 |
114 | >>> CR.correct_shifts()
115 |
116 | OrderedDict([('band', None),
117 | ('is shifted', True),
118 | ('is resampled', False),
119 | ('updated map info',
120 | ['UTM',
121 | 1,
122 | 1,
123 | 300003.57885632466,
124 | 5900025.6616268,
125 | 10.0,
126 | 10.0,
127 | 33,
128 | 'North',
129 | 'WGS-84']),
130 | ('updated geotransform',
131 | [300000.0, 10.0, 0.0, 5900040.0, 0.0, -10.0]),
132 | ('updated projection',
133 | 'PROJCS["WGS 84 / UTM zone 33N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AXIS["Latitude",NORTH],AXIS["Longitude",EAST],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32633"]]'),
134 | ('arr_shifted', array([[ 0, 0, 0, ..., 953, 972, 1044],
135 | [ 0, 0, 0, ..., 1001, 973, 1019],
136 | [ 0, 0, 0, ..., 953, 985, 1020],
137 | ...,
138 | [ 0, 0, 0, ..., 755, 763, 773],
139 | [ 0, 0, 0, ..., 760, 763, 749],
140 | [9999, 9999, 9999, ..., 9999, 9999, 9999]], dtype=uint16)),
141 | ('GeoArray_shifted',
142 | )])
143 |
144 |
145 | To write the coregistered image to disk, the :class:`arosics.COREG` class needs to be instanced with a filepath given to
146 | keyword 'path_out'. The output raster format can be any format supported by GDAL.
147 | Find a list of supported formats here: https://gdal.org/drivers/raster/index.html
148 |
149 |
150 | apply detected shifts to multiple images
151 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
152 |
153 | Sometimes it can be useful to apply the same shifts to multiple images, e.g., to different mask images derived from
154 | the same satellite dataset. For this purpose you can calculate spatial shifts using the :class:`arosics.COREG` class
155 | (see above) and then apply the calculated shifts to mulitple images using the :class:`arosics.DESHIFTER` class.
156 | Take a look at the keyword arguments of the :class:`arosics.DESHIFTER` class when you need further adjustments
157 | (e.g. output paths for the corrected images; aligned output grid, ...).
158 |
159 | .. code-block:: python
160 |
161 | >>> from arosics import DESHIFTER
162 |
163 | >>> DESHIFTER(im_target1, CR.coreg_info).correct_shifts()
164 | >>> DESHIFTER(im_target2, CR.coreg_info).correct_shifts()
165 |
166 | OrderedDict([('band', None),
167 | ('is shifted', True),
168 | ('is resampled', False),
169 | ('updated map info',
170 | ['UTM',
171 | 1,
172 | 1,
173 | 300003.57885632466,
174 | 5900025.6616268,
175 | 10.0,
176 | 10.0,
177 | 33,
178 | 'North',
179 | 'WGS-84']),
180 | ('updated geotransform',
181 | [300000.0, 10.0, 0.0, 5900040.0, 0.0, -10.0]),
182 | ('updated projection',
183 | 'PROJCS["WGS 84 / UTM zone 33N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AXIS["Latitude",NORTH],AXIS["Longitude",EAST],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32633"]]'),
184 | ('arr_shifted', array([[ 0, 0, 0, ..., 953, 972, 1044],
185 | [ 0, 0, 0, ..., 1001, 973, 1019],
186 | [ 0, 0, 0, ..., 953, 985, 1020],
187 | ...,
188 | [ 0, 0, 0, ..., 755, 763, 773],
189 | [ 0, 0, 0, ..., 760, 763, 749],
190 | [9999, 9999, 9999, ..., 9999, 9999, 9999]], dtype=uint16)),
191 | ('GeoArray_shifted',
192 | )])
193 |
194 |
195 | ----
196 |
197 |
198 | Using the Shell console
199 | -----------------------
200 |
201 | The help instructions of the console interface can be accessed like this:
202 |
203 | .. code-block:: bash
204 |
205 | $ arosics -h
206 |
207 | Follow these instructions to run AROSICS from a shell console. For example, the most simple call for a global
208 | co-registration would look like this:
209 |
210 | .. code-block:: bash
211 |
212 | $ arosics global /path/to/your/ref_image.bsq /path/to/your/tgt_image.bsq
213 |
--------------------------------------------------------------------------------
/docs/usage/input_data_requirements.rst:
--------------------------------------------------------------------------------
1 | Requirements to your input data
2 | *******************************
3 |
4 | Compatible image formats
5 | ~~~~~~~~~~~~~~~~~~~~~~~~
6 |
7 | The input images can have any GDAL compatible image format. You can find a list here:
8 | https://gdal.org/drivers/raster/index.html
9 |
10 |
11 | Geocoding
12 | ~~~~~~~~~
13 |
14 | Your target image must be approximately geocoded to your reference image.
15 | In case of ENVI files, this means they must have a 'map info' and a 'coordinate system string' as attributes of their
16 | header file.
17 |
18 | .. note::
19 |
20 | AROSICS also allows to compute the misregistration between two input images without any geocoding. In this case,
21 | it is assumed that both images have the same spatial resolution and their upper-left coordinates approximately
22 | represents the same map coordinate. The computed misregistration is then returned in image coordinate units.
23 |
24 |
25 | Supported projections
26 | ~~~~~~~~~~~~~~~~~~~~~
27 |
28 | AROSICS was initially written with support for UTM and geographic coordinates only. Full support for any other
29 | projection was added in version 1.4.0. However, make sure your input images have the same projection. Different
30 | projections for the reference and target image are currently not supported.
31 |
32 | AROSICS can also be applied to images without any projection and geocoding information. In this case, however,
33 | the input images need to have the same pixel size and must cover more or less the same spatial area
34 | (with a shift a few pixels at most).
35 |
36 |
37 | Geographic overlap
38 | ~~~~~~~~~~~~~~~~~~
39 |
40 | The input images must have a geographic overlap but clipping them to same geographical extent is NOT necessary
41 | The image overlap area is automatically calculated, given that your input images have valid geocoding and projection
42 | information). Thereby, no-data regions within the images are automatically respected.
43 |
44 |
45 | Spatial resolution
46 | ~~~~~~~~~~~~~~~~~~
47 |
48 | The input images may have different spatial resolutions. Any needed resampling of the data is done automatically.
49 |
50 | .. attention::
51 |
52 | Please try to avoid any spatial resampling of the input images before running AROSICS. It might affect
53 | the accuracy of the computed misregistration.
54 |
55 |
56 | Orthorectified datasets
57 | ~~~~~~~~~~~~~~~~~~~~~~~
58 |
59 | Please use ortho-rectified input data in order to minimize local shifts in the input images.
60 |
61 |
62 | No-data values
63 | ~~~~~~~~~~~~~~
64 |
65 | The no-data value of each image is automatically derived from the image corners. However, this may fail if the actual
66 | no-data value is not present within a 3x3 matrix at the image corners. User provided no-data values will speed up the
67 | computation and avoid wrongly derived values.
68 |
69 |
70 | Actual image corner coordinates
71 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
72 |
73 | Providing the map coordinates of the actual image corners lets you save some computation time,
74 | because in this case the implemented automatic algorithm can be skipped.
75 |
76 |
77 | Image masks / areas to be excluded from tie-point creation
78 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
79 |
80 | The user may provide user-defined masks to exclude certain image areas from tie point creation. This is useful for
81 | example in case of cloud areas or moving objects. However, the outlier detection algorithms of AROSICS will filter out
82 | tie points with large differences to the surrounding area.
83 |
84 |
85 | Unequal sensors and image acquisition conditions
86 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
87 |
88 | AROSICS is designed to robustly handle the typical difficulties of multi-sensoral/multi-temporal images.
89 |
--------------------------------------------------------------------------------
/docs/usage/local_coreg.rst:
--------------------------------------------------------------------------------
1 | Local image co-registration
2 | ***************************
3 |
4 | This local co-registration module of AROSICS has been designed to detect and correct geometric shifts present locally
5 | in your input image. The class :class:`arosics.COREG_LOCAL` calculates a grid of spatial shifts with points spread
6 | over the whole overlap area of the input images. Based on this grid a correction of local shifts can be performed.
7 |
8 |
9 | Using the Python API
10 | --------------------
11 |
12 | detect and correct local shifts - with input data on disk
13 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
14 |
15 | .. code-block:: python
16 |
17 | >>> from arosics import COREG_LOCAL
18 |
19 | >>> im_reference = '/path/to/your/ref_image.bsq'
20 | >>> im_target = '/path/to/your/tgt_image.bsq'
21 | >>> kwargs = {
22 | >>> 'grid_res' : 200,
23 | >>> 'window_size' : (256, 256),
24 | >>> 'path_out' : 'auto',
25 | >>> 'projectDir' : 'my_project',
26 | >>> 'q' : False,
27 | >>> }
28 |
29 | >>> CRL = COREG_LOCAL(im_reference,im_target,**kwargs)
30 | >>> CRL.correct_shifts()
31 |
32 | Calculating actual data corner coordinates for reference image...
33 | Corner coordinates of reference image:
34 | [[319090.0, 5790510.0], [351800.0, 5899940.0], [409790.0, 5900040.0], [409790.0, 5790250.0], [319090.0, 5790250.0]]
35 | Calculating actual data corner coordinates for image to be shifted...
36 | Corner coordinates of image to be shifted:
37 | [[319460.0, 5790510.0], [352270.0, 5900040.0], [409790.0, 5900040.0], [409790.0, 5790250.0], [319460.0, 5790250.0]]
38 | Matching window position (X,Y): 372220.10753674706/5841066.947109019
39 | Calculating tie point grid (1977 points) in mode 'multiprocessing'...
40 | progress: |==================================================| 100.0% [1977/1977] Complete 9.75 sek
41 | Found 1144 valid GCPs.
42 | Correcting geometric shifts...
43 | Translating progress |==================================================| 100.0% Complete
44 | Warping progress |==================================================| 100.0% Complete
45 | Writing GeoArray of size (10979, 10979) to /home/gfz-fe/scheffler/jupyter/arosics_jupyter/my_project/S2A_OPER_MSI_L1C_TL_SGS__20160608T153121_A005024_T33UUU_B03__shifted_to__S2A_OPER_MSI_L1C_TL_SGS__20160529T153631_A004881_T33UUU_B03.bsq.
46 |
47 |
48 | OrderedDict([('band', None),
49 | ('is shifted', True),
50 | ('is resampled', True),
51 | ('updated map info',
52 | ['UTM',
53 | 1,
54 | 1,
55 | 300000.0,
56 | 5900030.0,
57 | 10.0,
58 | 10.0,
59 | 33,
60 | 'North',
61 | 'WGS-84']),
62 | ('updated geotransform',
63 | [300000.0, 10.0, 0.0, 5900030.0, 0.0, -10.0]),
64 | ('updated projection',
65 | 'PROJCS["WGS 84 / UTM zone 33N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AXIS["Latitude",NORTH],AXIS["Longitude",EAST],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",15],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32633"]]'),
66 | ('arr_shifted', array([[ 0, 0, 0, ..., 1034, 996, 1001],
67 | [ 0, 0, 0, ..., 1046, 1114, 1124],
68 | [ 0, 0, 0, ..., 1021, 1126, 1148],
69 | ...,
70 | [ 0, 0, 0, ..., 760, 769, 805],
71 | [ 0, 0, 0, ..., 762, 755, 765],
72 | [ 0, 0, 0, ..., 0, 0, 0]], dtype=uint16)),
73 | ('GeoArray_shifted',
74 | )])
75 |
76 |
77 | detect and correct local shifts - without any disk access
78 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
79 |
80 | All you have to do is to instanciate :class:`arosics.COREG_LOCAL` with two instances of the :class:`geoarray.GeoArray`
81 | class as described above.
82 |
83 |
84 | .. code-block:: python
85 |
86 | >>> from geoarray import GeoArray
87 |
88 | >>> CRL = COREG_LOCAL(GeoArray(ref_ndarray, ref_gt, ref_prj),
89 | >>> GeoArray(tgt_ndarray, tgt_gt, tgt_prj),
90 | >>> **kwargs)
91 | >>> CRL.correct_shifts()
92 |
93 |
94 | visualize tie point grid with INITIAL shifts present in your input target image
95 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
96 |
97 | Use the method :meth:`CRL.view_CoRegPoints()` to visualize the tie point grid with
98 | the calculated absolute lenghts of the shift vectors (the unit corresponds to the input projection - UTM in the shown
99 | example, thus the unit is 'meters'.).
100 |
101 | .. note::
102 |
103 | A calculation of reliable shifts above cloud covered areas is not possible.
104 | In the current version of AROSICS these areas are not masked. A proper masking is planned.
105 |
106 |
107 | .. code-block:: python
108 |
109 | >>> CRL.view_CoRegPoints(figsize=(15,15), backgroundIm='ref')
110 |
111 | Note: array has been downsampled to 1000 x 1000 for faster visualization.
112 |
113 | .. image:: ../images/output_40_1.png
114 |
115 |
116 | The output figure shows the calculated absolute lenghts of the shift vectors - in this case with shifts up to ~25 meters.
117 |
118 |
119 | visualize tie point grid with shifts present AFTER shift correction
120 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
121 |
122 | The remaining shifts after local correction can be calculated and visualized by instanciating the
123 | :class:`arosics.COREG_LOCAL` with the output path of the above instance of :class:`COREG_LOCAL`.
124 |
125 | .. code-block:: python
126 |
127 | >>> CRL_after_corr = COREG_LOCAL(im_reference, CRL.path_out, **kwargs)
128 | >>> CRL_after_corr.view_CoRegPoints(figsize=(15,15),backgroundIm='ref')
129 |
130 | Calculating actual data corner coordinates for reference image...
131 | Corner coordinates of reference image:
132 | [[319090.0, 5790510.0], [351800.0, 5899940.0], [409790.0, 5900040.0], [409790.0, 5790250.0], [319090.0, 5790250.0]]
133 | Calculating actual data corner coordinates for image to be shifted...
134 | Corner coordinates of image to be shifted:
135 | [[319460.0, 5790540.0], [352270.0, 5900030.0], [409780.0, 5900030.0], [409780.0, 5790260.0], [322970.0, 5790250.0], [319460.0, 5790280.0]]
136 | Matching window position (X,Y): 372216.38593955856/5841068.390957352
137 | Note: array has been downsampled to 1000 x 1000 for faster visualization.
138 | Calculating tie point grid (1977 points) in mode 'multiprocessing'...
139 | progress: |==================================================| 100.0% [1977/1977] Complete 10.78 sek
140 |
141 | .. image:: ../images/output_44_1.png
142 |
143 |
144 | The output figure shows a significant reduction of geometric shifts.
145 |
146 |
147 | show the points table of the calculated tie point grid
148 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
149 |
150 | .. note::
151 |
152 | Point records where no valid match has been found are filled with -9999.
153 |
154 | .. code-block:: python
155 |
156 | >>> CRL.CoRegPoints_table
157 |
158 |
159 | .. image:: ../images/CoregPoints_table.png
160 |
161 |
162 | export tie point grid to an ESRI point shapefile
163 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
164 |
165 | .. code-block:: python
166 |
167 | >>> CRL.tiepoint_grid.to_PointShapefile(path_out='/path/to/your/output_shapefile.shp')
168 |
169 |
170 | ----
171 |
172 |
173 | Using the Shell console
174 | -----------------------
175 |
176 | Follow these instructions to run AROSICS from a shell console. For example, the most simple call for a local
177 | co-registration would look like this:
178 |
179 | .. code-block:: bash
180 |
181 | $ arosics local /path/to/your/ref_image.bsq /path/to/your/tgt_image.bsq 50
182 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
4 | #
5 | # Copyright (C) 2017-2024
6 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
7 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
8 | # Germany (https://www.gfz-potsdam.de/)
9 | #
10 | # This software was developed within the context of the GeoMultiSens project funded
11 | # by the German Federal Ministry of Education and Research
12 | # (project grant code: 01 IS 14 010 A-C).
13 | #
14 | # Licensed under the Apache License, Version 2.0 (the "License");
15 | # you may not use this file except in compliance with the License.
16 | # You may obtain a copy of the License at
17 | #
18 | # https://www.apache.org/licenses/LICENSE-2.0
19 | #
20 | # Unless required by applicable law or agreed to in writing, software
21 | # distributed under the License is distributed on an "AS IS" BASIS,
22 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
23 | # See the License for the specific language governing permissions and
24 | # limitations under the License.
25 |
26 | [build-system]
27 | requires = [
28 | "setuptools>=61.2",
29 | "setuptools-git",
30 | ]
31 | build-backend = "setuptools.build_meta"
32 |
33 | [project]
34 | name = "arosics"
35 | description = "An Automated and Robust Open-Source Image Co-Registration Software for Multi-Sensor Satellite Data"
36 | authors = [{name = "Daniel Scheffler", email = "daniel.scheffler@gfz.de"}]
37 | license = {text = "Apache-2.0"}
38 | classifiers = [
39 | "Development Status :: 5 - Production/Stable",
40 | "Intended Audience :: Science/Research",
41 | "Topic :: Scientific/Engineering",
42 | "License :: OSI Approved :: Apache Software License",
43 | "Natural Language :: English",
44 | "Programming Language :: Python :: 3",
45 | "Programming Language :: Python :: 3.8",
46 | "Programming Language :: Python :: 3.9",
47 | "Programming Language :: Python :: 3.10",
48 | "Programming Language :: Python :: 3.11",
49 | "Programming Language :: Python :: 3.12",
50 | ]
51 | keywords = [
52 | "arosics",
53 | "image co-registration",
54 | "geometric pre-processing",
55 | "remote sensing",
56 | "sensor fusion",
57 | ]
58 | requires-python = ">=3.8"
59 | dependencies = [
60 | "cartopy",
61 | "cmocean",
62 | "folium>=0.6.0,!=0.12.0",
63 | "gdal",
64 | "geoarray>=0.15.0",
65 | "geojson",
66 | "geopandas",
67 | "joblib>=1.3.0",
68 | "matplotlib",
69 | "numpy",
70 | "pandas",
71 | "plotly",
72 | "pykrige",
73 | "pyproj>2.2.0",
74 | "py_tools_ds>=0.18.0",
75 | "scikit-image>=0.21.0",
76 | "scikit-learn",
77 | "scipy>=1.7.0",
78 | "shapely",
79 | ]
80 | dynamic = ["version"]
81 |
82 | [project.readme]
83 | file = "README.rst"
84 | content-type = "text/x-rst"
85 |
86 | [project.urls]
87 | "Source code" = "https://git.gfz-potsdam.de/danschef/arosics"
88 | "Issue Tracker" = "https://git.gfz-potsdam.de/danschef/arosics/-/issues"
89 | "Documentation" = "https://danschef.git-pages.gfz-potsdam.de/arosics/doc/"
90 | "Change log" = "https://git.gfz-potsdam.de/danschef/arosics/-/blob/main/HISTORY.rst"
91 | "Algorithm paper" = "https://www.mdpi.com/2072-4292/9/7/676"
92 | "Zenodo" = "https://zenodo.org/record/5093940"
93 |
94 | [project.optional-dependencies]
95 | interactive_plotting = [
96 | "holoviews>1.12.7",
97 | "ipython",
98 | "nbformat",
99 | ]
100 | doc = [
101 | "sphinx-argparse",
102 | "sphinx_rtd_theme",
103 | "sphinx-autodoc-typehints",
104 | ]
105 | test = [
106 | "pytest",
107 | "pytest-cov",
108 | "pytest-reporter-html1",
109 | "urlchecker",
110 | "arosics[interactive_plotting]",
111 | ]
112 | lint = [
113 | "flake8",
114 | "pycodestyle",
115 | "pydocstyle",
116 | ]
117 | dev = [
118 | "arosics[test]",
119 | "arosics[doc]",
120 | "arosics[lint]",
121 | ]
122 |
123 | [project.scripts]
124 | arosics = "arosics.arosics_cli:main"
125 |
126 | [tool.setuptools]
127 | zip-safe = false
128 | include-package-data = true
129 | license-files = ["LICENSE"]
130 |
131 | [tool.setuptools.packages.find]
132 | namespaces = false
133 |
134 | [tool.setuptools.dynamic]
135 | version = {attr = "arosics.version.__version__"}
136 |
137 | [tool.distutils.bdist_wheel]
138 | universal = 1
139 |
--------------------------------------------------------------------------------
/tests/CI_docker/build_arosics_testsuite_image.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 | pkgname="arosics"
4 | repourl="https://git.gfz-potsdam.de/danschef/arosics"
5 |
6 | context_dir="./context"
7 | dockerfile="${pkgname}_ci.docker"
8 | python_script='
9 | version = {}
10 | with open("../../'${pkgname}'/version.py") as version_file:
11 | exec(version_file.read(), version)
12 | print(version["__version__"])
13 | '
14 | version=`python -c "$python_script"`
15 | tag="ds__${pkgname}_ci:$version"
16 | gitlab_runner="${pkgname}_gitlab_CI_runner"
17 | runnername_remote="${pkgname}_ci_runner__v${version}__${HOSTNAME}"
18 | taglist="${pkgname}_ci_client"
19 |
20 | echo "#### Build runner docker image"
21 | docker rmi ${tag}
22 | docker build ${context_dir} \
23 | --no-cache \
24 | -f ${context_dir}/${dockerfile} \
25 | -m 20G \
26 | -t ${tag}
27 |
28 | # create the gitlab-runner docker container for the current project
29 | # NOTE: The 'gitlab-runner' and 'gitlab-ci-multi-runner' services will run within this container.
30 | # The runner uses a 'config.toml' configuration file at /etc/gitlab-runner within the container which can be
31 | # modified through additional parameters of the 'gitlab-runner register' command.
32 | echo "#### Create gitlab-runner (daemon) container with tag; ${tag}"
33 | docker stop ${gitlab_runner}
34 | docker rm ${gitlab_runner}
35 | docker run \
36 | -d \
37 | --name ${gitlab_runner} \
38 | --restart always \
39 | -v /var/run/docker.sock:/var/run/docker.sock \
40 | gitlab/gitlab-runner:latest
41 |
42 | # register the runner at the corresponding GitLab repository via a registration-token
43 | echo "
44 | --------------------------------------------------------------------------
45 | To register the runner at GitLab, go to ${repourl}/-/runners,
46 | click on 'New project runner' and use the following settings:
47 |
48 | Tags: ${taglist}
49 | Run untagged jobs: Yes
50 | Runner description: ${runnername_remote}
51 | Paused: No
52 | Protected: No
53 | Lock to current projects: Yes
54 | Maximum job timeout: 7200
55 |
56 | Then click 'Create runner'!
57 | --------------------------------------------------------------------------"
58 | read -p "Please enter the GitLab runner authentification token (should start with 'glrt-'): " token
59 | # NOTE: In case of locally stored images (like here), the docker pull policy 'never' must be used
60 | # (see https://docs.gitlab.com/runner/executors/docker.html#how-pull-policies-work).
61 | docker exec -it ${gitlab_runner} /bin/bash -c "\
62 | gitlab-ci-multi-runner register \
63 | --non-interactive \
64 | --executor 'docker' \
65 | --docker-image '${tag}' \
66 | --url 'https://git.gfz-potsdam.de' \
67 | --token '${token}' \
68 | --description '${runnername_remote}' \
69 | --docker-pull-policy='never'
70 | "
71 | ls
72 |
--------------------------------------------------------------------------------
/tests/CI_docker/context/arosics_ci.docker:
--------------------------------------------------------------------------------
1 | FROM ci_base_ubuntu:0.5
2 |
3 | # use bash shell instead of sh shell for all docker commands
4 | SHELL ["/bin/bash", "-c"]
5 |
6 | # copy some needed stuff to /root
7 | COPY *.yml /root/
8 |
9 | # update base environment
10 | RUN mamba update -y -n base mamba conda && \
11 | conda clean -afy
12 |
13 | # create ci_env environment
14 | RUN mamba env create -n ci_env -f /root/environment_arosics.yml && \
15 | conda clean -afy
16 |
--------------------------------------------------------------------------------
/tests/CI_docker/context/environment_arosics.yml:
--------------------------------------------------------------------------------
1 | name: arosics
2 |
3 | channels: &id1
4 | - conda-forge
5 |
6 | dependencies:
7 | - python>=3.8
8 | - pip
9 | - cartopy
10 | - cmocean
11 | - folium>=0.6.0,!=0.12.0
12 | - gdal
13 | - geoarray>=0.15.0
14 | - geojson
15 | - geopandas
16 | - holoviews>1.12.7
17 | - ipython # needed to test interactive plotting
18 | - joblib>=1.3.0
19 | - matplotlib
20 | - nbformat # optional dependency of plotly, needed to test interactive plotting
21 | - numpy
22 | - pandas
23 | - plotly
24 | - pykrige
25 | - pyproj>2.2.0
26 | - py-tools-ds>=0.18.0
27 | - scikit-image>=0.21.0
28 | - scikit-learn
29 | - scipy>=1.7.0
30 | - shapely
31 |
32 | # doc/lint/test requirements
33 | - flake8
34 | - pycodestyle
35 | - pydocstyle
36 | - pylint
37 | - pytest
38 | - pytest-cov
39 | - sphinx-argparse
40 | - sphinx-autodoc-typehints
41 | - sphinx_rtd_theme
42 | - urlchecker
43 |
44 | - pip:
45 | - pytest-reporter-html1
46 |
--------------------------------------------------------------------------------
/tests/__init__.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 |
3 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
4 | #
5 | # Copyright (C) 2017-2024
6 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
7 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
8 | # Germany (https://www.gfz-potsdam.de/)
9 | #
10 | # This software was developed within the context of the GeoMultiSens project funded
11 | # by the German Federal Ministry of Education and Research
12 | # (project grant code: 01 IS 14 010 A-C).
13 | #
14 | # Licensed under the Apache License, Version 2.0 (the "License");
15 | # you may not use this file except in compliance with the License.
16 | # You may obtain a copy of the License at
17 | #
18 | # https://www.apache.org/licenses/LICENSE-2.0
19 | #
20 | # Unless required by applicable law or agreed to in writing, software
21 | # distributed under the License is distributed on an "AS IS" BASIS,
22 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
23 | # See the License for the specific language governing permissions and
24 | # limitations under the License.
25 |
26 | """Unit test package for arosics."""
27 |
28 | import matplotlib as _plt
29 |
30 |
31 | def setUpModule():
32 | _plt.rcParams.update({'figure.max_open_warning': 0})
33 |
34 |
35 | def tearDownModule() -> None:
36 | _plt.rcParams.update({'figure.max_open_warning': 20})
37 |
--------------------------------------------------------------------------------
/tests/cases.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
5 | #
6 | # Copyright (C) 2017-2024
7 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
8 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
9 | # Germany (https://www.gfz-potsdam.de/)
10 | #
11 | # This software was developed within the context of the GeoMultiSens project funded
12 | # by the German Federal Ministry of Education and Research
13 | # (project grant code: 01 IS 14 010 A-C).
14 | #
15 | # Licensed under the Apache License, Version 2.0 (the "License");
16 | # you may not use this file except in compliance with the License.
17 | # You may obtain a copy of the License at
18 | #
19 | # https://www.apache.org/licenses/LICENSE-2.0
20 | #
21 | # Unless required by applicable law or agreed to in writing, software
22 | # distributed under the License is distributed on an "AS IS" BASIS,
23 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
24 | # See the License for the specific language governing permissions and
25 | # limitations under the License.
26 |
27 | import os
28 |
29 | # custom
30 | import arosics
31 |
32 | tests_path = os.path.abspath(os.path.join(arosics.__file__, "..", "..", "tests"))
33 |
34 | # define test data paths
35 | test_cases = dict(
36 | INTER1=dict(
37 | ref_path=os.path.join(tests_path, 'data', 'testcase_inter1_S2A_S2A', 'ref_S2A_20160608T153121_T33UUU_sub.tif'),
38 | tgt_path=os.path.join(tests_path, 'data', 'testcase_inter1_S2A_S2A', 'tgt_S2A_20160529T153631_T33UUU_sub.tif'),
39 | kwargs_global=dict(
40 | path_out=os.path.join(tests_path, 'output', 'testcase_inter1_S2A_S2A/'
41 | 'tgt_S2A_20160529T153631_T33UUU_sub_CR_global.bsq'),
42 | footprint_poly_ref='POLYGON ((340870 5862000, 354000 5862000, 354000 5830000, 331320 5830000, '
43 | '340870 5862000))',
44 | footprint_poly_tgt='POLYGON ((341890 5866490, 356180 5866490, 356180 5834970, 335440 5834970, '
45 | '335490 5845270, 341890 5866490))',
46 | progress=False,
47 | v=False),
48 | wp_inside=(344720, 5848485), # inside of overlap
49 | wp_covering_nodata=(339611, 5856426), # close to the image edge of the input images -> win>64px covers nodata
50 | wp_close_to_edge=(353810, 5840516), # close to the image edge of the input images -> win>64px covers nodata
51 | wp_cloudy=(353308, 5859404), # at a cloudy position of the target image
52 | wp_outside=(349533, 5818862), # outside of overlap
53 | kwargs_local=dict(
54 | grid_res=100,
55 | path_out=os.path.join(tests_path, 'output', 'testcase_inter1_S2A_S2A',
56 | 'tgt_S2A_20160529T153631_T33UUU_sub_CR_local.bsq'),
57 | progress=False)
58 | )
59 | )
60 |
--------------------------------------------------------------------------------
/tests/data/testcase_inter1_S2A_S2A/ref_S2A_20160608T153121_T33UUU_sub.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GFZ/arosics/14810ecb12fd71a2d4b086741f3801d370a3d767/tests/data/testcase_inter1_S2A_S2A/ref_S2A_20160608T153121_T33UUU_sub.tif
--------------------------------------------------------------------------------
/tests/data/testcase_inter1_S2A_S2A/tgt_S2A_20160529T153631_T33UUU_sub.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GFZ/arosics/14810ecb12fd71a2d4b086741f3801d370a3d767/tests/data/testcase_inter1_S2A_S2A/tgt_S2A_20160529T153631_T33UUU_sub.tif
--------------------------------------------------------------------------------
/tests/linting/.gitkeep:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/GFZ/arosics/14810ecb12fd71a2d4b086741f3801d370a3d767/tests/linting/.gitkeep
--------------------------------------------------------------------------------
/tests/test_COREG.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
5 | #
6 | # Copyright (C) 2017-2024
7 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
8 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
9 | # Germany (https://www.gfz-potsdam.de/)
10 | #
11 | # This software was developed within the context of the GeoMultiSens project funded
12 | # by the German Federal Ministry of Education and Research
13 | # (project grant code: 01 IS 14 010 A-C).
14 | #
15 | # Licensed under the Apache License, Version 2.0 (the "License");
16 | # you may not use this file except in compliance with the License.
17 | # You may obtain a copy of the License at
18 | #
19 | # https://www.apache.org/licenses/LICENSE-2.0
20 | #
21 | # Unless required by applicable law or agreed to in writing, software
22 | # distributed under the License is distributed on an "AS IS" BASIS,
23 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
24 | # See the License for the specific language governing permissions and
25 | # limitations under the License.
26 |
27 | """Tests for the global co-registration module of AROSICS."""
28 |
29 | import unittest
30 | import shutil
31 | import os
32 | import numpy as np
33 | import warnings
34 |
35 | # custom
36 | from .cases import test_cases
37 | import pytest
38 | from arosics import COREG
39 | from geoarray import GeoArray
40 | from osgeo import gdal # noqa
41 | from py_tools_ds.geo.projection import EPSG2WKT
42 |
43 |
44 | class COREG_GLOBAL_init(unittest.TestCase):
45 | """Test case on object initialization of COREG_LOCAL."""
46 |
47 | def setUp(self):
48 | self.ref_path = test_cases['INTER1']['ref_path']
49 | self.tgt_path = test_cases['INTER1']['tgt_path']
50 | self.coreg_kwargs = test_cases['INTER1']['kwargs_global']
51 | self.coreg_kwargs['wp'] = test_cases['INTER1']['wp_inside']
52 |
53 | def test_coreg_init_from_disk(self):
54 | self.CRL = COREG(self.ref_path, self.tgt_path, **self.coreg_kwargs)
55 |
56 | def test_coreg_init_from_inMem_GeoArray(self):
57 | # get GeoArray instances
58 | self.ref_gA = GeoArray(self.ref_path)
59 | self.tgt_gA = GeoArray(self.tgt_path)
60 |
61 | # assure the raster data are in-memory
62 | self.ref_gA.to_mem()
63 | self.tgt_gA.to_mem()
64 |
65 | # get instance of COREG_LOCAL object
66 | self.CRL = COREG(self.ref_gA, self.tgt_gA, **self.coreg_kwargs)
67 |
68 | def test_empty_image(self):
69 | # get GeoArray instances
70 | self.ref_gA = GeoArray(self.ref_path)
71 | self.tgt_gA = GeoArray(self.tgt_path)
72 |
73 | # assure the raster data are in-memory
74 | self.ref_gA.to_mem()
75 | self.tgt_gA.to_mem()
76 |
77 | # fill the reference image with nodata
78 | self.ref_gA[:] = self.ref_gA.nodata
79 |
80 | # get instance of COREG_LOCAL object
81 | with pytest.raises(RuntimeError, match='.*only contains nodata values.'):
82 | COREG(self.ref_gA, self.tgt_gA, **self.coreg_kwargs)
83 |
84 | def test_init_warnings(self):
85 | with pytest.warns(UserWarning, match='.*window size.*rather small value.*'):
86 | COREG(self.ref_path, self.tgt_path, **dict(ws=(63, 63), **self.coreg_kwargs))
87 | # TODO: test the other warnings
88 |
89 |
90 | class CompleteWorkflow_INTER1_S2A_S2A(unittest.TestCase):
91 | """Test case for the complete workflow of global co-registration based on two Sentinel-2 datasets, one with
92 | ~25% cloud cover, the other one without any clouds. The subsets cover the S2A tiles only partly (nodata areas
93 | are present).
94 | """
95 |
96 | def setUp(self):
97 | self.ref_path = test_cases['INTER1']['ref_path']
98 | self.tgt_path = test_cases['INTER1']['tgt_path']
99 | self.coreg_kwargs = test_cases['INTER1']['kwargs_global']
100 |
101 | def tearDown(self):
102 | """Delete output."""
103 | dir_out = os.path.dirname(self.coreg_kwargs['path_out'])
104 | if os.path.isdir(dir_out):
105 | shutil.rmtree(dir_out)
106 |
107 | @staticmethod
108 | def run_shift_detection_correction(ref, tgt, **params):
109 | # get instance of COREG_LOCAL object
110 | CR = COREG(ref, tgt, **params)
111 |
112 | # calculate global X/Y shift
113 | CR.calculate_spatial_shifts()
114 |
115 | # test shift correction and output writer
116 | CR.correct_shifts()
117 |
118 | assert os.path.exists(params['path_out']), 'Output of global co-registration has not been written.'
119 |
120 | return CR
121 |
122 | def test_shift_calculation_with_default_params(self):
123 | """Test with default parameters - should compute X/Y shifts properly and write the de-shifted target image."""
124 |
125 | CR = self.run_shift_detection_correction(self.ref_path, self.tgt_path,
126 | **dict(self.coreg_kwargs,
127 | footprint_poly_ref=None,
128 | footprint_poly_tgt=None))
129 | assert CR.success
130 |
131 | def test_shift_calculation_with_image_coords_only(self):
132 | """Test with default parameters - should compute X/Y shifts properly and write the de-shifted target image."""
133 | # FIXME fails when executed alone
134 |
135 | # overwrite gt and prj
136 | ref = GeoArray(self.ref_path)
137 | ref.to_mem()
138 | ref.filePath = None
139 | ref.gt = [0, 1, 0, 0, 0, -1]
140 | ref.prj = ''
141 | tgt = GeoArray(self.tgt_path)
142 | tgt.to_mem()
143 | tgt.filePath = None
144 | tgt.gt = [0, 1, 0, 0, 0, -1]
145 | tgt.prj = ''
146 |
147 | CR = self.run_shift_detection_correction(ref, tgt,
148 | **dict(self.coreg_kwargs,
149 | wp=(1200, -1600),
150 | footprint_poly_ref=None,
151 | footprint_poly_tgt=None))
152 | assert CR.success
153 |
154 | def test_shift_calculation_with_float_coords(self):
155 | """Test with default parameters - should compute X/Y shifts properly and write the de-shifted target image."""
156 |
157 | # overwrite gt and prj
158 | ref = GeoArray(self.ref_path)
159 | ref.to_mem()
160 | ref.filePath = None
161 | ref.gt = [330000.00000001, 10.1, 0.0, 5862000.0000001, 0.0, -10.1]
162 | tgt = GeoArray(self.tgt_path)
163 | tgt.to_mem()
164 | tgt.filePath = None
165 | tgt.gt = [335440.0000001, 10.1, 0.0, 5866490.0000001, 0.0, -10.1]
166 |
167 | CR = self.run_shift_detection_correction(ref, tgt,
168 | **dict(self.coreg_kwargs,
169 | wp=(341500.0, 5861440.0),
170 | footprint_poly_ref=None,
171 | footprint_poly_tgt=None))
172 | assert CR.success
173 |
174 | def test_shift_calculation_nonquadratic_pixels(self):
175 | """Test with default parameters - should compute X/Y shifts properly and write the de-shifted target image."""
176 |
177 | # overwrite gt and prj
178 | ref = GeoArray(self.ref_path)
179 | ref.to_mem()
180 | ref.filePath = None
181 | ref.gt = [330000.00000001, 5.8932, 0.0, 5862000.0000001, 0.0, -10.1]
182 | tgt = GeoArray(self.tgt_path)
183 | tgt.to_mem()
184 | tgt.filePath = None
185 | tgt.gt = [335440.0000001, 5.8933, 0.0, 5866490.0000001, 0.0, -10.1]
186 |
187 | CR = self.run_shift_detection_correction(ref, tgt,
188 | **dict(self.coreg_kwargs,
189 | wp=(341500.0, 5861440.0),
190 | footprint_poly_ref=None,
191 | footprint_poly_tgt=None))
192 | assert CR.success
193 |
194 | def test_shift_calculation_with_metaRotation(self):
195 | """Test with default parameters - should compute X/Y shifts properly and write the de-shifted target image."""
196 |
197 | # overwrite gt and prj
198 | ref = GeoArray(self.ref_path)
199 | ref.to_mem()
200 | ref.filePath = None
201 | ref.gt = [330000, 10, 0.0, 5862000, 0.0, -10]
202 | tgt = GeoArray(self.tgt_path)
203 | tgt.to_mem()
204 | tgt.filePath = None
205 | # tgt.gt = [335440, 5.8932, 0.0, 5866490, 0.0, -10.1]
206 | tgt.gt = [335440, 10, 0.00001, 5866490, 0.00001, -10]
207 |
208 | with pytest.warns(UserWarning, match='.*target image needs to be resampled.*'):
209 | CR = self.run_shift_detection_correction(ref, tgt,
210 | **dict(self.coreg_kwargs,
211 | # ws=(512, 512),
212 | wp=(341500.0, 5861440.0),
213 | footprint_poly_ref=None,
214 | footprint_poly_tgt=None,
215 | max_shift=35))
216 | CR.show_matchWin(interactive=False, after_correction=None)
217 | assert CR.success
218 |
219 | def test_shift_calculation_with_mask(self):
220 | """Test COREG if bad data mask is given."""
221 | ref = GeoArray(self.ref_path)
222 | tgt = GeoArray(self.tgt_path)
223 |
224 | mask = np.zeros((tgt.rows, tgt.cols), dtype=bool)
225 | mask[1000:1100, 1000:1100] = True
226 | gA_mask = GeoArray(mask, tgt.gt, tgt.prj)
227 |
228 | CR = self.run_shift_detection_correction(ref, tgt,
229 | **dict(self.coreg_kwargs,
230 | mask_baddata_tgt=gA_mask))
231 | CR.show_matchWin(interactive=False, after_correction=None)
232 | assert CR.success
233 |
234 | def test_shift_calculation_inmem_gAs_path_out_auto(self):
235 | """Test input parameter path_out='auto' in case input reference/ target image are in-memory GeoArrays."""
236 | ref = GeoArray(np.random.randint(1, 100, (1000, 1000)))
237 | tgt = GeoArray(np.random.randint(1, 100, (1000, 1000)))
238 |
239 | with pytest.raises(ValueError):
240 | self.run_shift_detection_correction(ref, tgt,
241 | **dict(self.coreg_kwargs,
242 | path_out='auto',
243 | fmt_out='ENVI',
244 | v=True))
245 |
246 | # @pytest.mark.skip
247 | def test_shift_calculation_verboseMode(self):
248 | """Test the verbose mode - runs the functions of the plotting submodule."""
249 | with warnings.catch_warnings():
250 | warnings.filterwarnings(
251 | 'ignore', category=UserWarning, message='Matplotlib is currently using agg, '
252 | 'which is a non-GUI backend, so cannot show the figure.')
253 | CR = self.run_shift_detection_correction(self.ref_path, self.tgt_path,
254 | **dict(self.coreg_kwargs, v=True))
255 | assert CR.success
256 |
257 | def test_shift_calculation_windowCoveringNodata(self):
258 | """Test shift detection if the matching window (defined by 'wp' and 'ws') covers the nodata area.
259 |
260 | Detected subpixel shifts (X/Y): 0.280572488796/-0.11016529071
261 | Calculated map shifts (X,Y): -7.19427511207/-18.8983470928
262 | """
263 | # TODO compare to expected results
264 |
265 | CR = self.run_shift_detection_correction(self.ref_path, self.tgt_path,
266 | **dict(self.coreg_kwargs,
267 | wp=test_cases['INTER1']['wp_covering_nodata'],
268 | ws=(256, 256)))
269 | assert CR.success
270 |
271 | def test_shift_calculation_windowAtImageEdge(self):
272 | """Test shift detection if the matching window is close to an image edge without covering nodata pixels.
273 |
274 | Detected subpixel shifts (X/Y): 0.34361492307/-0.320197995758
275 | Calculated map shifts (X,Y): -6.56385076931/-16.7980200425
276 | """
277 | # TODO compare to expected results
278 |
279 | CR = self.run_shift_detection_correction(self.ref_path, self.tgt_path,
280 | **dict(self.coreg_kwargs,
281 | wp=test_cases['INTER1']['wp_close_to_edge'], ws=(256, 256)))
282 | assert CR.success
283 |
284 | def test_shift_calculation_noWGS84(self):
285 | """Test if a RunTimeError is raised in case the input projection datum is not WGS84."""
286 | ref = GeoArray(self.ref_path).to_mem()
287 | tgt = GeoArray(self.tgt_path).to_mem()
288 |
289 | # force to overwrite projection
290 | ref.filePath = None
291 | tgt.filePath = None
292 | ref.prj = EPSG2WKT(3035) # ETRS89_LAEA_Europe
293 | tgt.prj = EPSG2WKT(3035) # ETRS89_LAEA_Europe
294 |
295 | CR = self.run_shift_detection_correction(ref, tgt, **dict(self.coreg_kwargs))
296 | assert CR.success
297 |
298 | def test_shift_calculation_different_geographic_datum(self):
299 | """Test if a RunTimeError is raised in case of a different geographic datum."""
300 | ref = GeoArray(self.ref_path).to_mem()
301 | tgt = GeoArray(self.tgt_path).to_mem()
302 |
303 | # force to overwrite projection
304 | ref.filePath = None
305 | ref.prj = EPSG2WKT(3035) # ETRS89_LAEA_Europe
306 |
307 | with pytest.raises(RuntimeError):
308 | self.run_shift_detection_correction(ref, tgt, **dict(self.coreg_kwargs))
309 |
310 | def test_shift_calculation_windowOutside(self):
311 | """Test if shift computation raises a ValueError if the given window position is outside the image overlap."""
312 | with pytest.raises(ValueError):
313 | self.run_shift_detection_correction(self.ref_path, self.tgt_path,
314 | **dict(self.coreg_kwargs,
315 | wp=test_cases['INTER1']['wp_outside']))
316 |
317 | def test_shift_calculation_windowAtClouds(self):
318 | """Test if shift computation raises a RunTimeError if the matching window is centered at a cloudy position."""
319 | with pytest.raises(RuntimeError):
320 | self.run_shift_detection_correction(self.ref_path, self.tgt_path,
321 | **dict(self.coreg_kwargs,
322 | wp=test_cases['INTER1']['wp_cloudy'], ws=(256, 256)))
323 |
324 | def test_shift_calculation_differentInputGrids(self):
325 | """"""
326 | self.skipTest('Not yet implemented.')
327 |
328 | def test_shift_calculation_SSIMdecreases(self):
329 | """"""
330 | self.skipTest('Not yet implemented.')
331 |
332 | # @pytest.mark.skip
333 | def test_plotting_after_shift_calculation(self): # , mock_show):
334 | """Test plotting functionality."""
335 | # mock_show.return_value = None # probably not necessary here in your case
336 | CR = self.run_shift_detection_correction(self.ref_path, self.tgt_path, **self.coreg_kwargs)
337 | assert CR.success
338 |
339 | # test all the visualization functions
340 | with warnings.catch_warnings():
341 | warnings.filterwarnings(
342 | 'ignore', category=UserWarning, message='Matplotlib is currently using agg, '
343 | 'which is a non-GUI backend, so cannot show the figure.')
344 | CR.show_cross_power_spectrum()
345 | CR.show_matchWin(interactive=False, after_correction=None)
346 | CR.show_matchWin(interactive=False, after_correction=True)
347 | CR.show_matchWin(interactive=False, after_correction=False)
348 | try:
349 | # __IPYTHON__ # noqa
350 | CR.show_cross_power_spectrum(interactive=True)
351 | CR.show_matchWin(interactive=True, after_correction=None) # only works if test is started with ipython
352 | CR.show_matchWin(interactive=True, after_correction=True)
353 | CR.show_matchWin(interactive=True, after_correction=False)
354 | except NameError:
355 | pass
356 | CR.show_image_footprints()
357 |
358 | def test_correct_shifts_without_resampling(self):
359 | kw = self.coreg_kwargs.copy()
360 | kw['align_grids'] = False # =default
361 | kw['progress'] = True
362 |
363 | CR = self.run_shift_detection_correction(self.ref_path, self.tgt_path, **kw)
364 | assert CR.success
365 | assert CR.deshift_results['is shifted']
366 | assert not CR.deshift_results['is resampled']
367 | assert np.array_equal(CR.shift[:], CR.deshift_results['arr_shifted'])
368 | assert not np.array_equal(np.array(CR.shift.gt), np.array(CR.deshift_results['updated geotransform']))
369 |
370 | def test_correct_shifts_with_resampling(self):
371 | kw = self.coreg_kwargs.copy()
372 | kw['align_grids'] = True
373 | kw['progress'] = True
374 |
375 | CR = self.run_shift_detection_correction(self.ref_path, self.tgt_path, **kw)
376 | assert CR.success
377 | assert CR.deshift_results['is shifted']
378 | assert CR.deshift_results['is resampled']
379 | assert not np.array_equal(CR.shift[:], CR.deshift_results['arr_shifted'])
380 | assert np.array_equal(np.array(CR.shift.gt), np.array(CR.deshift_results['updated geotransform']))
381 |
382 | def test_correct_shifts_gdal_creation_options(self):
383 | """Test if the out_crea_options parameter works."""
384 | kw = self.coreg_kwargs.copy()
385 | kw['fmt_out'] = "GTiff"
386 | kw['out_crea_options'] = ["COMPRESS=DEFLATE", "BIGTIFF=YES", "ZLEVEL=9", "BLOCKXSIZE=512", "BLOCKYSIZE=512"]
387 |
388 | # in case the output data is not resampled
389 | self.run_shift_detection_correction(self.ref_path, self.tgt_path, **kw)
390 | assert 'COMPRESSION=DEFLATE' in gdal.Info(kw['path_out'])
391 |
392 | # in case the output data is resampled
393 | kw['align_grids'] = True
394 | self.run_shift_detection_correction(self.ref_path, self.tgt_path, **kw)
395 | assert 'COMPRESSION=DEFLATE' in gdal.Info(kw['path_out'])
396 |
397 |
398 | if __name__ == '__main__':
399 | pytest.main()
400 |
--------------------------------------------------------------------------------
/tests/test_COREG_LOCAL.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
5 | #
6 | # Copyright (C) 2017-2024
7 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
8 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
9 | # Germany (https://www.gfz-potsdam.de/)
10 | #
11 | # This software was developed within the context of the GeoMultiSens project funded
12 | # by the German Federal Ministry of Education and Research
13 | # (project grant code: 01 IS 14 010 A-C).
14 | #
15 | # Licensed under the Apache License, Version 2.0 (the "License");
16 | # you may not use this file except in compliance with the License.
17 | # You may obtain a copy of the License at
18 | #
19 | # https://www.apache.org/licenses/LICENSE-2.0
20 | #
21 | # Unless required by applicable law or agreed to in writing, software
22 | # distributed under the License is distributed on an "AS IS" BASIS,
23 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
24 | # See the License for the specific language governing permissions and
25 | # limitations under the License.
26 |
27 | """Tests for the local co-registration module of AROSICS."""
28 |
29 | import unittest
30 | from unittest.mock import patch
31 | import shutil
32 | import os
33 | import warnings
34 | from multiprocessing import cpu_count
35 |
36 | # custom
37 | import pytest
38 |
39 | from .cases import test_cases
40 | from arosics import COREG_LOCAL
41 | from geoarray import GeoArray
42 |
43 |
44 | class COREG_LOCAL_init(unittest.TestCase):
45 | """Test case on object initialization of COREG_LOCAL."""
46 |
47 | def setUp(self):
48 | self.ref_path = test_cases['INTER1']['ref_path']
49 | self.tgt_path = test_cases['INTER1']['tgt_path']
50 | self.coreg_kwargs = test_cases['INTER1']['kwargs_local']
51 |
52 | def test_coreg_init_from_disk(self):
53 | self.CRL = COREG_LOCAL(self.ref_path, self.tgt_path, **self.coreg_kwargs)
54 |
55 | def test_coreg_init_from_inMem_GeoArray(self):
56 | # get GeoArray instances
57 | self.ref_gA = GeoArray(self.ref_path)
58 | self.tgt_gA = GeoArray(self.tgt_path)
59 |
60 | # assure the raster data are in-memory
61 | self.ref_gA.to_mem()
62 | self.tgt_gA.to_mem()
63 |
64 | # get instance of COREG_LOCAL object
65 | self.CRL = COREG_LOCAL(self.ref_gA, self.tgt_gA, **self.coreg_kwargs)
66 |
67 | def test_init_warnings(self):
68 | with pytest.warns(UserWarning, match='.*window size.*rather small value.*'):
69 | COREG_LOCAL(self.ref_path, self.tgt_path, **dict(window_size=(63, 63), **self.coreg_kwargs))
70 | # TODO: test the other warnings
71 |
72 |
73 | class CompleteWorkflow_INTER1_S2A_S2A(unittest.TestCase):
74 | """Test case for the complete workflow of local co-registration based on two Sentinel-2 datasets, one with
75 | ~25% cloud cover, the other one without any clouds. The subsets cover the S2A tiles only partly (nodata areas
76 | are present).
77 | """
78 |
79 | def setUp(self):
80 | self.ref_path = test_cases['INTER1']['ref_path']
81 | self.tgt_path = test_cases['INTER1']['tgt_path']
82 | self.coreg_kwargs = test_cases['INTER1']['kwargs_local']
83 |
84 | def tearDown(self):
85 | """Delete output."""
86 | dir_out = os.path.dirname(self.coreg_kwargs['path_out'])
87 | if os.path.isdir(dir_out):
88 | shutil.rmtree(dir_out)
89 |
90 | def test_calculation_of_tie_point_grid(self):
91 | # get instance of COREG_LOCAL object
92 | CRL = COREG_LOCAL(self.ref_path, self.tgt_path, **self.coreg_kwargs)
93 |
94 | # calculate tie point grid
95 | CRL.calculate_spatial_shifts()
96 |
97 | # test tie point grid visualization
98 | with warnings.catch_warnings():
99 | warnings.filterwarnings(
100 | 'ignore', category=UserWarning, message='Matplotlib is currently using agg, '
101 | 'which is a non-GUI backend, so cannot show the figure.')
102 | CRL.view_CoRegPoints(hide_filtered=True)
103 | CRL.view_CoRegPoints(hide_filtered=False)
104 | CRL.view_CoRegPoints(shapes2plot='vectors')
105 | with pytest.warns(UserWarning, match='.*still under construction.*'):
106 | CRL.view_CoRegPoints_folium()
107 |
108 | # test shift correction and output writer
109 | CRL.correct_shifts()
110 |
111 | assert os.path.exists(self.coreg_kwargs['path_out']), 'Output of local co-registration has not been written.'
112 |
113 | def test_calculation_of_tie_point_grid_float_coords(self):
114 | # NOTE: This does not test against unequally sized output of get_image_windows_to_match().
115 |
116 | # overwrite gt and prj
117 | ref = GeoArray(self.ref_path)
118 | ref.to_mem()
119 | ref.filePath = None
120 | tgt = GeoArray(self.tgt_path)
121 | tgt.to_mem()
122 | tgt.filePath = None
123 |
124 | ref.gt = [330000.19999996503, 10.00000001, 0.0, 5862000.7999997628, 0.0, -10.00000001]
125 | # ref.gt = [330000.1, 10.1, 0.0, 5862000.1, 0.0, -10.1]
126 | tgt.gt = [335440.19999996503, 10.00000001, 0.0, 5866490.7999997628, 0.0, -10.00000001]
127 | # tgt.gt = [330000.1, 10.1, 0.0, 5862000.1, 0.0, -10.1]
128 |
129 | # get instance of COREG_LOCAL object
130 | CRL = COREG_LOCAL(ref, tgt, **dict(CPUs=cpu_count(),
131 | **self.coreg_kwargs))
132 | CRL.calculate_spatial_shifts()
133 | # CRL.view_CoRegPoints()
134 |
135 | def test_calculation_of_tie_point_grid_noepsg(self):
136 | """Test local coregistration with a proj. other than LonLat and UTM and a WKT which has no EPSG code (FORCE)."""
137 | wkt_noepsg = \
138 | """
139 | PROJCRS["BU MEaSUREs Lambert Azimuthal Equal Area - SA - V01",
140 | BASEGEOGCRS["WGS 84",
141 | DATUM["World Geodetic System 1984",
142 | ELLIPSOID["WGS 84",6378137,298.257223563,
143 | LENGTHUNIT["metre",1]]],
144 | PRIMEM["Greenwich",0,
145 | ANGLEUNIT["degree",0.0174532925199433]],
146 | ID["EPSG",4326]],
147 | CONVERSION["unnamed",
148 | METHOD["Lambert Azimuthal Equal Area",
149 | ID["EPSG",9820]],
150 | PARAMETER["Latitude of natural origin",-15,
151 | ANGLEUNIT["degree",0.0174532925199433],
152 | ID["EPSG",8801]],
153 | PARAMETER["Longitude of natural origin",-60,
154 | ANGLEUNIT["degree",0.0174532925199433],
155 | ID["EPSG",8802]],
156 | PARAMETER["False easting",0,
157 | LENGTHUNIT["metre",1],
158 | ID["EPSG",8806]],
159 | PARAMETER["False northing",0,
160 | LENGTHUNIT["metre",1],
161 | ID["EPSG",8807]]],
162 | CS[Cartesian,2],
163 | AXIS["easting",east,
164 | ORDER[1],
165 | LENGTHUNIT["metre",1]],
166 | AXIS["northing",north,
167 | ORDER[2],
168 | LENGTHUNIT["metre",1]]]
169 | """
170 | wkt_noepsg = ' '.join(wkt_noepsg.split())
171 |
172 | # overwrite prj
173 | ref = GeoArray(self.ref_path)
174 | ref.to_mem()
175 | ref.filePath = None
176 | tgt = GeoArray(self.tgt_path)
177 | tgt.to_mem()
178 | tgt.filePath = None
179 |
180 | ref.prj = wkt_noepsg
181 | tgt.prj = wkt_noepsg
182 |
183 | # get instance of COREG_LOCAL object
184 | CRL = COREG_LOCAL(ref, tgt, **dict(CPUs=cpu_count(),
185 | **self.coreg_kwargs))
186 | CRL.calculate_spatial_shifts()
187 | # CRL.view_CoRegPoints()
188 |
189 | def test_calculation_of_tie_point_grid_with_metaRotation(self):
190 | """Test with default parameters - should compute X/Y shifts properly and write the de-shifted target image."""
191 | # overwrite gt and prj
192 | ref = GeoArray(self.ref_path)
193 | ref.to_mem()
194 | ref.filePath = None
195 | ref.gt = [330000, 10, 0.00001, 5862000, 0.00001, -10]
196 | tgt = GeoArray(self.tgt_path)
197 | tgt.to_mem()
198 | tgt.filePath = None
199 | # tgt.gt = [335440, 5.8932, 0.0, 5866490, 0.0, -10.1]
200 | tgt.gt = [335440, 10, 0.00001, 5866490, 0.00001, -10]
201 |
202 | # get instance of COREG_LOCAL object
203 | with pytest.warns(UserWarning, match='.*reference image needs to be resampled.*'):
204 | CRL = COREG_LOCAL(ref, tgt, **dict(CPUs=cpu_count(),
205 | **self.coreg_kwargs))
206 | CRL.calculate_spatial_shifts()
207 | # CRL.view_CoRegPoints()
208 |
209 | assert CRL.success
210 |
211 | def test_calculation_of_tie_point_grid_with_mask(self):
212 | """Test COREG_LOCAL if bad data mask is given."""
213 | import numpy as np
214 | ref = GeoArray(self.ref_path)
215 | tgt = GeoArray(self.tgt_path)
216 |
217 | mask = np.zeros((tgt.rows, tgt.cols), dtype=np.uint8)
218 | mask[1000:2000, 1000:2000] = True
219 | gA_mask = GeoArray(mask, tgt.gt, tgt.prj)
220 |
221 | # get instance of COREG_LOCAL object
222 | CRL = COREG_LOCAL(ref, tgt, **dict(mask_baddata_tgt=gA_mask,
223 | **self.coreg_kwargs
224 | ))
225 | CRL.calculate_spatial_shifts()
226 | CRL.view_CoRegPoints()
227 |
228 | assert CRL.success
229 |
230 | def test_warnings_summary(self):
231 | """Test if the warnings summary is properly displayed."""
232 | with (patch.dict('os.environ', dict(AROSICS_CI_TEST='True')),
233 | pytest.warns(UserWarning, match='.*~100% of all tie point candidates.*Test warning!.*')):
234 | COREG_LOCAL(self.ref_path, self.tgt_path, **self.coreg_kwargs).calculate_spatial_shifts()
235 |
236 | with warnings.catch_warnings(category=UserWarning):
237 | warnings.simplefilter("error")
238 | COREG_LOCAL(self.ref_path, self.tgt_path, **self.coreg_kwargs).calculate_spatial_shifts()
239 |
240 |
241 | if __name__ == '__main__':
242 | pytest.main()
243 |
--------------------------------------------------------------------------------
/tests/test_tie_point_grid.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python
2 | # -*- coding: utf-8 -*-
3 |
4 | # AROSICS - Automated and Robust Open-Source Image Co-Registration Software
5 | #
6 | # Copyright (C) 2017-2024
7 | # - Daniel Scheffler (GFZ Potsdam, daniel.scheffler@gfz.de)
8 | # - Helmholtz Centre Potsdam - GFZ German Research Centre for Geosciences Potsdam,
9 | # Germany (https://www.gfz-potsdam.de/)
10 | #
11 | # This software was developed within the context of the GeoMultiSens project funded
12 | # by the German Federal Ministry of Education and Research
13 | # (project grant code: 01 IS 14 010 A-C).
14 | #
15 | # Licensed under the Apache License, Version 2.0 (the "License");
16 | # you may not use this file except in compliance with the License.
17 | # You may obtain a copy of the License at
18 | #
19 | # https://www.apache.org/licenses/LICENSE-2.0
20 | #
21 | # Unless required by applicable law or agreed to in writing, software
22 | # distributed under the License is distributed on an "AS IS" BASIS,
23 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
24 | # See the License for the specific language governing permissions and
25 | # limitations under the License.
26 |
27 | """Tests for the module arosics.Tie_Point_Grid."""
28 |
29 | import unittest
30 | import tempfile
31 | import os
32 | from importlib.util import find_spec
33 | import shutil
34 | import warnings
35 | import struct
36 |
37 | # custom
38 | import pytest
39 | import numpy as np
40 |
41 | from .cases import test_cases
42 | from arosics import COREG_LOCAL, Tie_Point_Grid
43 |
44 |
45 | class Test_Tie_Point_Grid(unittest.TestCase):
46 |
47 | @classmethod
48 | def setUp(cls):
49 | CRL = COREG_LOCAL(test_cases['INTER1']['ref_path'], test_cases['INTER1']['tgt_path'],
50 | **test_cases['INTER1']['kwargs_local'])
51 | cls.TPG = Tie_Point_Grid(CRL.COREG_obj, CRL.grid_res,
52 | max_points=100, # limit to 100 to reduce computational load
53 | outFillVal=CRL.outFillVal,
54 | resamp_alg_calc=CRL.rspAlg_calc,
55 | tieP_filter_level=CRL.tieP_filter_level,
56 | tieP_random_state=CRL.tieP_random_state,
57 | outlDetect_settings=dict(
58 | min_reliability=CRL.min_reliability,
59 | rs_max_outlier=CRL.rs_max_outlier,
60 | rs_tolerance=CRL.rs_tolerance),
61 | dir_out=CRL.projectDir,
62 | CPUs=CRL.CPUs,
63 | progress=CRL.progress,
64 | v=CRL.v,
65 | q=CRL.q)
66 |
67 | def tearDown(self):
68 | if os.path.isdir(self.TPG.dir_out):
69 | shutil.rmtree(self.TPG.dir_out)
70 |
71 | def test_mean_shifts(self):
72 | assert isinstance(self.TPG.mean_x_shift_px, float)
73 | assert isinstance(self.TPG.mean_y_shift_px, float)
74 | assert isinstance(self.TPG.mean_x_shift_map, float)
75 | assert isinstance(self.TPG.mean_y_shift_map, float)
76 |
77 | def test_get_CoRegPoints_table(self):
78 | self.TPG.get_CoRegPoints_table()
79 |
80 | def test_calc_rmse(self):
81 | self.TPG.calc_rmse(include_outliers=False)
82 | self.TPG.calc_rmse(include_outliers=True)
83 |
84 | def test_calc_overall_ssim(self):
85 | self.TPG.calc_overall_ssim(include_outliers=False, after_correction=True)
86 | self.TPG.calc_overall_ssim(include_outliers=True, after_correction=False)
87 |
88 | def test_calc_overall_stats(self):
89 | stats_noOL = self.TPG.calc_overall_stats(include_outliers=False)
90 | stats_OL = self.TPG.calc_overall_stats(include_outliers=True)
91 |
92 | assert stats_noOL
93 | assert stats_OL
94 | assert isinstance(stats_noOL, dict)
95 | assert isinstance(stats_OL, dict)
96 | assert stats_noOL != stats_OL
97 |
98 | def test_plot_shift_distribution(self):
99 | with warnings.catch_warnings():
100 | warnings.filterwarnings(
101 | 'ignore', category=UserWarning, message='Matplotlib is currently using agg, '
102 | 'which is a non-GUI backend, so cannot show the figure.')
103 | self.TPG.plot_shift_distribution()
104 |
105 | def test_dump_CoRegPoints_table(self):
106 | with tempfile.TemporaryDirectory() as tmpdir:
107 | outpath = os.path.join(tmpdir, 'CoRegPoints_table.pkl')
108 | self.TPG.dump_CoRegPoints_table(outpath)
109 | assert os.path.isfile(outpath)
110 |
111 | def test_to_GCPList(self):
112 | self.TPG.to_GCPList()
113 |
114 | def test_to_PointShapefile(self):
115 | with (pytest.warns(UserWarning, match='.*Column names longer than 10 characters will be truncated.*'),
116 | warnings.catch_warnings()
117 | ):
118 | warnings.filterwarnings("ignore", message=".*recognized as too large to be valid.*")
119 |
120 | tbl = self.TPG.CoRegPoints_table
121 |
122 | n_all_points = len(tbl)
123 | n_nodata = sum(tbl['ABS_SHIFT'] == self.TPG.outFillVal)
124 | n_outliers = sum(tbl['OUTLIER'].__eq__(True))
125 |
126 | def _get_n_records(filepath: str):
127 | with open(filepath, 'rb') as inF:
128 | header = inF.read(32)
129 | return struct.unpack('