├── .github
├── ISSUE_TEMPLATE
│ └── bug_report.md
└── workflows
│ ├── conda-install.yml
│ └── python-publish.yml
├── .gitignore
├── .travis.yml
├── CONTRIBUTING.md
├── LICENSE.txt
├── MANIFEST.in
├── README.md
├── appveyor.yml
├── datacube_prepare.ipynb
├── docs
├── Makefile
├── make.bat
└── source
│ ├── about
│ ├── changelog.rst
│ ├── projects.rst
│ ├── publications.rst
│ └── references.rst
│ ├── api
│ ├── ancillary.rst
│ ├── auxdata.rst
│ ├── datacube.rst
│ ├── drivers.rst
│ ├── examine.rst
│ ├── figures
│ │ ├── S1_bnr.png
│ │ ├── gamma_geocode.graphml
│ │ ├── gamma_geocode.svg
│ │ ├── snap_erode_edges.png
│ │ ├── snap_geocode.graphml
│ │ ├── snap_geocode.svg
│ │ └── workflow_readme.txt
│ ├── gamma.rst
│ ├── sentinel-1.rst
│ └── snap.rst
│ ├── conf.py
│ ├── general
│ ├── DEM.rst
│ ├── OSV.rst
│ ├── configuration.rst
│ ├── filenaming.rst
│ ├── installation.rst
│ ├── logging.rst
│ ├── processing.rst
│ └── snap.rst
│ ├── index.rst
│ └── references.bib
├── environment-dev.yml
├── environment-doc.yml
├── environment.yml
├── pyproject.toml
├── pyroSAR
├── ERS
│ ├── __init__.py
│ ├── auxil.py
│ ├── data
│ │ └── erspasses.db
│ └── mapping.py
├── S1
│ ├── __init__.py
│ ├── auxil.py
│ ├── linesimplify.py
│ └── polysimplify.py
├── __init__.py
├── ancillary.py
├── auxdata.py
├── config.py
├── datacube_util.py
├── drivers.py
├── examine.py
├── gamma
│ ├── __init__.py
│ ├── api.py
│ ├── auxil.py
│ ├── dem.py
│ ├── error.py
│ ├── parser.py
│ ├── parser_demo.py
│ └── util.py
├── install
│ ├── download_egm96_15.gtx.sh
│ ├── download_testdata.sh
│ └── install_deps.sh
├── patterns.py
├── snap
│ ├── __init__.py
│ ├── auxil.py
│ ├── data
│ │ ├── collect_suffices.py
│ │ ├── snap.auxdata.properties
│ │ └── snap.suffices.properties
│ ├── recipes
│ │ ├── base.xml
│ │ └── blank.xml
│ └── util.py
└── xml_util.py
├── readthedocs.yml
├── requirements-dev.txt
├── requirements.txt
└── tests
├── conftest.py
├── data
├── 0000022708_001001_ALOS2015976960-140909.zip
├── ASA_IMS_1PNESA20040703_205338_000000182028_00172_12250_00001672562030318361237.N1
├── S1A_IW_GRDH_1SDV_20150203T043109_20150203T043134_004454_00574F_6D00.zip
├── S1A_IW_GRDH_1SDV_20150203T043109_20150203T043134_004454_00574F_FEC3.zip
├── S1A_IW_GRDH_1SDV_20150222T170725_20150222T170750_004739_005DD8_CEAB.zip
├── S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip
├── S1A_IW_GRDH_1SDV_20210119T031653_20210119T031718_036201_043ED0_8255.zip
├── S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif
├── S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif.aux.xml
├── SAR_IMP_1PXESA19960808_205906_00000017G158_00458_26498_2615.E1
├── SAR_IMS_1PXESA19951220_024320_00000015G152_00132_23166_0252.E1.zip
├── archive_outdated.csv
├── archive_outdated_bbox.db
├── dem.par
└── mli.par
├── installtest_gdal_geos.py
├── installtest_ogr_sqlite.py
├── installtest_spatialite.py
├── test_ancillary.py
├── test_auxdata.py
├── test_config.py
├── test_drivers.py
├── test_examine.py
├── test_gamma.py
├── test_gamma_args.py
├── test_license.py
├── test_osv.py
├── test_snap.py
├── test_snap_exe.py
└── test_xml_util.py
/.github/ISSUE_TEMPLATE/bug_report.md:
--------------------------------------------------------------------------------
1 | ---
2 | name: Bug report
3 | about: Create a report to help us improve
4 | title: ''
5 | labels: bug
6 | assignees: ''
7 |
8 | ---
9 |
10 | - which operating system are you using?
11 | e.g. Windows 10, Ubuntu 18.4, etc.
12 | - which environment is pyroSAR running in?
13 | e.g. system-wide Python installation, Anaconda environment, virtual environment, etc.
14 | - which version of pyroSAR are you using?
15 | one installed via conda, pip or a clone of the GitHub repository?
16 | - which function of pyroSAR did you call with which parameters?
17 | - if applicable, which version of SNAP or GAMMA are you using in pyroSAR?
18 | - the full error message
19 |
--------------------------------------------------------------------------------
/.github/workflows/conda-install.yml:
--------------------------------------------------------------------------------
1 | name: conda build
2 |
3 | on: [ push ]
4 |
5 | jobs:
6 | build-linux:
7 | runs-on: ubuntu-latest
8 | defaults:
9 | run:
10 | shell: micromamba-shell {0}
11 | services:
12 | postgres:
13 | image: postgis/postgis:16-3.4
14 | env:
15 | POSTGRES_PASSWORD: Password12!
16 | ports:
17 | - 5432:5432
18 | steps:
19 | - uses: actions/checkout@v3
20 | - name: Set up Python 3.10
21 | uses: mamba-org/setup-micromamba@v2
22 | with:
23 | environment-file: environment.yml
24 | cache-environment: true
25 | init-shell: bash
26 | generate-run-shell: true
27 | post-cleanup: 'all'
28 | - name: Install ESA SNAP
29 | run: |
30 | wget -nv https://download.esa.int/step/snap/12.0/installers/esa-snap_all_linux-12.0.0.sh
31 | bash esa-snap_all_linux-12.0.0.sh -q -dir $GITHUB_ACTION_PATH/esa-snap
32 | - name: Set paths and variables
33 | run: |
34 | echo "$CONDA/bin" >> $GITHUB_PATH
35 | echo "$GITHUB_ACTION_PATH/esa-snap/bin" >> $GITHUB_PATH
36 | echo "PROJ_DATA=$CONDA/share/proj" >> $GITHUB_ENV
37 | - name: Install python packages
38 | run: |
39 | micromamba install -y python=3.10 flake8 -n ps_test
40 | micromamba env update --file environment-dev.yml -n ps_test
41 | - name: Lint with flake8
42 | run: |
43 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
44 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics
45 | - name: Install pyroSAR
46 | run: |
47 | pip install .
48 | - name: Test with pytest
49 | run: |
50 | coverage run -m pytest
51 | coverage xml
52 | env:
53 | PGUSER: postgres
54 | PGPASSWORD: Password12!
55 | - name: Publish to coveralls.io
56 | uses: coverallsapp/github-action@v2
57 | with:
58 | github-token: ${{ github.token }}
59 | format: cobertura
60 |
61 | build-windows:
62 | runs-on: windows-latest
63 | defaults:
64 | run:
65 | shell: cmd /C CALL {0}
66 | steps:
67 | - uses: actions/checkout@v3
68 | - uses: conda-incubator/setup-miniconda@v3
69 | with:
70 | activate-environment: ps_test_dev
71 | auto-update-conda: true
72 | python-version: '3.12'
73 | environment-file: environment.yml
74 | auto-activate-base: false
75 | - uses: nyurik/action-setup-postgis@v2
76 | with:
77 | cached-dir: C:\downloads
78 | - name: Install ESA SNAP
79 | run: |
80 | curl -O https://download.esa.int/step/snap/12.0/installers/esa-snap_all_windows-12.0.0.exe
81 | start /wait esa-snap_all_windows-12.0.0.exe -q -dir C:\esa-snap
82 | - name: Set paths and variables
83 | run: |
84 | echo %CONDA%\Scripts>> %GITHUB_PATH%
85 | echo C:\esa-snap\bin>> %GITHUB_PATH%
86 | echo PROJ_DATA=%CONDA%\share\proj>> %GITHUB_ENV%
87 | - name: Install python packages
88 | run: |
89 | conda env update --file environment-dev.yml --name ps_test_dev
90 | - name: Install pyroSAR
91 | run: |
92 | pip install .
93 | - name: Test with pytest
94 | run: |
95 | pytest
96 | env:
97 | PGUSER: postgres
98 | PGPASSWORD: postgres
99 |
--------------------------------------------------------------------------------
/.github/workflows/python-publish.yml:
--------------------------------------------------------------------------------
1 | # This workflow will upload a Python Package using Twine when a release is created
2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries
3 |
4 | # This workflow uses actions that are not certified by GitHub.
5 | # They are provided by a third-party and are governed by
6 | # separate terms of service, privacy policy, and support
7 | # documentation.
8 |
9 | name: Upload Python Package
10 |
11 | on:
12 | release:
13 | types: [ published ]
14 |
15 | permissions:
16 | contents: read
17 |
18 | jobs:
19 | deploy:
20 |
21 | runs-on: ubuntu-latest
22 |
23 | steps:
24 | - uses: actions/checkout@v3
25 | - name: Set up Python
26 | uses: actions/setup-python@v3
27 | with:
28 | python-version: '3.x'
29 | - name: Install dependencies
30 | run: |
31 | python -m pip install --upgrade pip
32 | pip install build
33 | - name: Build package
34 | run: python -m build
35 | - name: Publish package
36 | uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
37 | with:
38 | user: __token__
39 | password: ${{ secrets.PYPI_API_TOKEN }}
40 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | share/python-wheels/
24 | *.egg-info/
25 | .installed.cfg
26 | *.egg
27 | MANIFEST
28 |
29 | # PyInstaller
30 | # Usually these files are written by a python script from a template
31 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
32 | *.manifest
33 | *.spec
34 |
35 | # Installer logs
36 | pip-log.txt
37 | pip-delete-this-directory.txt
38 |
39 | # Unit test / coverage reports
40 | htmlcov/
41 | .tox/
42 | .nox/
43 | .coverage
44 | .coverage.*
45 | .cache
46 | nosetests.xml
47 | coverage.xml
48 | *.cover
49 | .hypothesis/
50 | .pytest_cache/
51 |
52 | # Translations
53 | *.mo
54 | *.pot
55 |
56 | # Django stuff:
57 | *.log
58 | local_settings.py
59 | db.sqlite3
60 |
61 | # Flask stuff:
62 | instance/
63 | .webassets-cache
64 |
65 | # Scrapy stuff:
66 | .scrapy
67 |
68 | # Sphinx documentation
69 | docs/_build/
70 |
71 | # PyBuilder
72 | target/
73 |
74 | # Jupyter Notebook
75 | .ipynb_checkpoints
76 |
77 | # IPython
78 | profile_default/
79 | ipython_config.py
80 |
81 | # pyenv
82 | .python-version
83 |
84 | # celery beat schedule file
85 | celerybeat-schedule
86 |
87 | # SageMath parsed files
88 | *.sage.py
89 |
90 | # Environments
91 | .env
92 | .venv
93 | env/
94 | venv/
95 | ENV/
96 | env.bak/
97 | venv.bak/
98 |
99 | # Spyder project settings
100 | .spyderproject
101 | .spyproject
102 |
103 | # Rope project settings
104 | .ropeproject
105 |
106 | # mkdocs documentation
107 | /site
108 |
109 | # mypy
110 | .mypy_cache/
111 | .dmypy.json
112 | dmypy.json
113 |
114 | # Pyre type checker
115 | .pyre/
116 |
117 | .idea/
118 | out/
119 | dev_*
120 |
121 | # OSX tempfiles
122 | .DS_Store
--------------------------------------------------------------------------------
/.travis.yml:
--------------------------------------------------------------------------------
1 | dist: jammy
2 | language: python
3 | sudo: required
4 | cache:
5 | directories:
6 | - ~/.cache/pip
7 |
8 | env:
9 | global:
10 | - PIP_WHEEL_DIR=$HOME/.cache/pip/wheels
11 | - PIP_FIND_LINKS=file://$HOME/.cache/pip/wheels
12 | - TESTDATA_DIR=$HOME/testdata
13 | - PGUSER=travis
14 | - PGPASSWORD=Password12!
15 | - SNAP_VERSION=10
16 |
17 | addons:
18 | postgresql: '14'
19 | apt:
20 | sources:
21 | - sourceline: 'ppa:ubuntugis/ppa'
22 | packages:
23 | - libgdal-dev
24 | - gdal-bin
25 | - libsqlite3-mod-spatialite
26 | - libproj-dev
27 | - python3-dev
28 | - postgresql-14-postgis-3
29 |
30 | services:
31 | - postgresql
32 |
33 | python:
34 | - '3.10'
35 |
36 | before_install:
37 | - export SNAP_INSTALLER=esa-snap_sentinel_linux-"$SNAP_VERSION".0.0.sh
38 | - wget -O $SNAP_INSTALLER https://download.esa.int/step/snap/"$SNAP_VERSION"_0/installers/"$SNAP_INSTALLER"
39 | - bash $SNAP_INSTALLER -q
40 | - export PATH=$PATH:/opt/snap/bin
41 |
42 | install:
43 | - mkdir -p ~/.cache/pip/wheels # remove warning "Url 'file:///home/travis/.cache/pip/wheels' is ignored: it is neither a file nor a directory."
44 | - pip install --ignore-installed setuptools pip six certifi # install packages inside the venv if the system version is too old
45 | - pip install numpy
46 | - pip install GDAL==$(gdal-config --version) --global-option=build_ext --global-option="$(gdal-config --cflags)"
47 | - pip install coveralls coverage
48 | - pip install .[test]
49 |
50 | #before_script:
51 | # - travis_wait 40 . ./pyroSAR/install/download_testdata.sh
52 |
53 | before_script:
54 | - psql -U $PGUSER -c 'create database travis_ci_test'
55 | - psql -U $PGUSER -c "create extension if not exists postgis"
56 | - psql -U $PGUSER -c "alter user ${PGUSER} password '${PGPASSWORD}'"
57 |
58 | script:
59 | - coverage run -m pytest
60 |
61 | after_success:
62 | - coveralls
63 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | # Contributing to pyroSAR
2 |
3 | First off, thanks for considering a contribution to pyroSAR. Any contribution, may it be a feature suggestion, a pull
4 | request or s simple bug report, is valuable to the project and very welcome.
5 | This document is intended as a guideline on best practices.
6 |
7 | ## How to open an issue
8 | The easiest way to contribute to pyroSAR is by opening an issue. This is intended for reporting software bugs and
9 | suggesting new features. Before you do, please read through the list of
10 | [open issues](https://github.com/johntruckenbrodt/pyroSAR/issues) to see whether this issue has already been raised.
11 | This way, duplicates can be reduced and it is easier for the developers to address them.
12 | If you are not sure whether your issue is a duplicate of an existing one, just open a new issue. It is easier to link
13 | two existing similar issues than separating two different ones contained in one.
14 | For reporting bugs please fill out the template, which is available once you open it. For suggesting new features you
15 | can just delete the template text.
16 | The following questions need to be answered so that is is possible for the developers to start fixing the software:
17 | - which operating system are you using?
18 | e.g. Windows 10, Ubuntu 18.4, etc.
19 | - which environment is pyroSAR running in?
20 | e.g. system-wide Python installation, Anaconda environment, virtual environment, etc.
21 | - which version of pyroSAR are you using?
22 | one installed via pip or a clone of the GitHub repository?
23 | - which function of pyroSAR did you call with which parameters?
24 | - if applicable, which version of SNAP or GAMMA are you using in pyroSAR?
25 | - the full error message
26 |
27 | This way the error is reproducible and can quickly be fixed.
28 |
29 | ## Checking pyroSAR's version
30 | The used version can be obtained like this:
31 | ```python
32 | import pyroSAR
33 | print(pyroSAR.__version__)
34 | ```
35 | Depending on how you installed pyroSAR the version might look differently.
36 | If installed via pip with `pip install pyroSAR`, the package is downloaded from
37 | [PyPI](https://pypi.org/project/pyroSAR/),
38 | where only the main releases are stored and versions are named e.g. `0.9.1`.
39 | These can also be found on GitHub [here](https://github.com/johntruckenbrodt/pyroSAR/releases).
40 | If you have installed pyroSAR directly from GitHub like so:
41 | ```shell script
42 | python3 -m pip install git+https://github.com/johntruckenbrodt/pyroSAR
43 | ```
44 | or have directly cloned a branch from GitHub, your version might look like this:
45 | `0.9.2.dev103+g57eeb30`, in which this naming pattern is used:
46 | `{next_version}.dev{distance}+{scm letter}{revision hash}`.
47 | In this case we can see that git is used as scm and the latest commit of the software was
48 | [57eeb30](https://github.com/johntruckenbrodt/pyroSAR/commit/57eeb30970dc6adfee62ca12fd8c8818ecaf3a14),
49 | which, at the time of checking the version, had a distance of 103 commits to the latest commit.
50 | See [here](https://www.diycode.cc/projects/pypa/setuptools_scm) for more details.
51 |
--------------------------------------------------------------------------------
/LICENSE.txt:
--------------------------------------------------------------------------------
1 | # Copyright (c) 2014-2025, the pyroSAR Developers.
2 |
3 | Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
4 | documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
5 | rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
6 | persons to whom the Software is furnished to do so, subject to the following conditions:
7 |
8 | The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
9 |
10 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
11 | WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
12 | COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
13 | OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
14 |
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | recursive-include pyroSAR/snap *.xml
2 | recursive-include pyroSAR/snap/data *
3 | recursive-include pyroSAR/ERS/data *
4 | recursive-exclude tests *
5 | recursive-exclude .github *
6 | exclude .travis.yml appveyor.yml
7 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 |
5 | A Python Framework for Large-Scale SAR Satellite Data Processing
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 | The pyroSAR package aims at providing a complete solution for the scalable organization and processing of SAR satellite data:
21 | * Reading of data from various past and present satellite missions
22 | * Handling of acquisition metadata
23 | * User-friendly access to processing utilities in [SNAP](https://step.esa.int/main/toolboxes/snap/)
24 | and [GAMMA Remote Sensing](https://www.gamma-rs.ch/) software
25 | * Formatting of the preprocessed data for further analysis
26 | * Export to Data Cube solutions
27 |
28 | Head on over to [readthedocs](https://pyrosar.readthedocs.io/en/latest/?badge=latest) for installation instructions,
29 | examples and API reference.
30 |
--------------------------------------------------------------------------------
/appveyor.yml:
--------------------------------------------------------------------------------
1 | # thanks a lot to the Nansat project (https://github.com/nansencenter/nansat) from which this file was adapted
2 | environment:
3 | matrix:
4 | - TARGET_ARCH: x64
5 | CONDA_PY: 36
6 | CONDA_INSTALL_LOCN: C:\Miniconda3-x64
7 | GDAL_DATA: C:\Miniconda3-x64\Library\share\gdal
8 | PROJECT_DIR: C:\projects\pyrosar
9 | SNAP_INSTALL: C:\projects\snap
10 | PGUSER: postgres
11 | PGPASSWORD: Password12!
12 | SNAP_VERSION: 10
13 | SNAP_INSTALLER: esa-snap_sentinel_windows-%SNAP_VERSION%.0.0.exe
14 |
15 | platform:
16 | - x64
17 |
18 | services:
19 | - postgresql96
20 |
21 | install:
22 | # Cygwin's git breaks conda-build. (See https://github.com/conda-forge/conda-smithy-feedstock/pull/2.)
23 | - rmdir C:\cygwin /s /q
24 |
25 | # install PostGIS
26 | - appveyor DownloadFile https://download.osgeo.org/postgis/windows/pg96/archive/postgis-bundle-pg96-3.2.0x64.zip
27 | - 7z x .\postgis-bundle-pg96-3.2.0x64.zip
28 | - xcopy /e /y /q .\postgis-bundle-pg96-3.2.0x64 C:\Progra~1\PostgreSQL\9.6
29 |
30 | # activate conda
31 | - call %CONDA_INSTALL_LOCN%\Scripts\activate.bat
32 |
33 | # If there is a newer build queued for the same PR, cancel this one.
34 | - appveyor DownloadFile https://raw.githubusercontent.com/conda-forge/conda-forge-build-setup-feedstock/master/recipe/ff_ci_pr_build.py
35 | - python ff_ci_pr_build.py -v --ci "appveyor" "%APPVEYOR_ACCOUNT_NAME%/%APPVEYOR_PROJECT_SLUG%" "%APPVEYOR_BUILD_NUMBER%" "%APPVEYOR_PULL_REQUEST_NUMBER%"
36 | - del ff_ci_pr_build.py
37 |
38 | # update conda
39 | - conda update --yes --quiet conda
40 |
41 | - set PYTHONUNBUFFERED=1
42 |
43 |
44 | # Add our channels.
45 | - conda config --set show_channel_urls true
46 | - conda config --remove channels defaults
47 | - conda config --add channels defaults
48 | - conda config --add channels conda-forge
49 |
50 | # install ESA SNAP
51 | - appveyor DownloadFile https://download.esa.int/step/snap/%SNAP_VERSION%_0/installers/%SNAP_INSTALLER%
52 | - start %SNAP_INSTALLER% -q -dir %SNAP_INSTALL%
53 |
54 | - set PATH=%PATH%;%SNAP_INSTALL%\bin
55 |
56 | - echo %PATH%
57 |
58 | # Configure the VM.
59 | - conda env create --file environment-dev.yml
60 | - conda activate ps_test_dev
61 | - pip install .
62 |
63 | # Skip .NET project specific build phase.
64 | build: false
65 |
66 | test_script:
67 | - coverage run -m pytest
68 |
--------------------------------------------------------------------------------
/datacube_prepare.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "This is a quick notebook to demonstrate the pyroSAR functionality for importing processed SAR scenes into an Open Data Cube"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": null,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "from pyroSAR.datacube_util import Product, Dataset\n",
17 | "from pyroSAR.ancillary import groupby, find_datasets"
18 | ]
19 | },
20 | {
21 | "cell_type": "code",
22 | "execution_count": null,
23 | "metadata": {},
24 | "outputs": [],
25 | "source": [
26 | "# define a directory containing processed SAR scenes\n",
27 | "dir = '/path/to/some/data'\n",
28 | "\n",
29 | "# define a name for the product YML; this is used for creating a new product in the datacube\n",
30 | "yml_product = './product_def.yml'\n",
31 | "\n",
32 | "# define a directory for storing the indexing YMLs; these are used to index the dataset in the datacube\n",
33 | "yml_index_outdir = './yml_indexing'\n",
34 | "\n",
35 | "# define a name for the ingestion YML; this is used to ingest the indexed datasets into the datacube\n",
36 | "yml_ingest = './ingestion.yml'\n",
37 | "\n",
38 | "# product description\n",
39 | "product_name_indexed = 'S1_GRD_index'\n",
40 | "product_name_ingested = 'S1_GRD_ingest'\n",
41 | "product_type = 'gamma0'\n",
42 | "description = 'this is just some test'\n",
43 | "\n",
44 | "# define the units of the dataset measurements (i.e. polarizations)\n",
45 | "units = 'backscatter'\n",
46 | "# alternatively this could be a dictionary:\n",
47 | "# units = {'VV': 'backscatter VV', 'VH': 'backscatter VH'}\n",
48 | "\n",
49 | "ingest_location = './ingest'"
50 | ]
51 | },
52 | {
53 | "cell_type": "code",
54 | "execution_count": null,
55 | "metadata": {},
56 | "outputs": [],
57 | "source": [
58 | "# find pyroSAR files by metadata attributes\n",
59 | "files = find_datasets(dir, recursive=True, sensor=('S1A', 'S1B'), acquisition_mode='IW')\n",
60 | "\n",
61 | "# group the found files by their file basenames\n",
62 | "# files with the same basename are considered to belong to the same dataset\n",
63 | "grouped = groupby(files, 'outname_base')"
64 | ]
65 | },
66 | {
67 | "cell_type": "code",
68 | "execution_count": null,
69 | "metadata": {},
70 | "outputs": [],
71 | "source": [
72 | "print(len(files))\n",
73 | "print(len(grouped))"
74 | ]
75 | },
76 | {
77 | "cell_type": "markdown",
78 | "metadata": {},
79 | "source": [
80 | "In the next step we create a new product, add the grouped datasets to it and create YML files for indexing the datasets in the cube."
81 | ]
82 | },
83 | {
84 | "cell_type": "code",
85 | "execution_count": null,
86 | "metadata": {},
87 | "outputs": [],
88 | "source": [
89 | "# create a new product and add the collected datasets to it\n",
90 | "# alternatively, an existing product can be used by providing the corresponding product YML file\n",
91 | "with Product(name=product_name_indexed,\n",
92 | " product_type=product_type,\n",
93 | " description=description) as prod:\n",
94 | "\n",
95 | " for dataset in grouped:\n",
96 | " with Dataset(dataset, units=units) as ds:\n",
97 | "\n",
98 | " # add the datasets to the product\n",
99 | " # this will generalize the metadata from those datasets to measurement descriptions,\n",
100 | " # which define the product definition\n",
101 | " prod.add(ds)\n",
102 | "\n",
103 | " # parse datacube indexing YMLs from product and dataset metadata\n",
104 | " prod.export_indexing_yml(ds, yml_index_outdir)\n",
105 | "\n",
106 | " # write the product YML\n",
107 | " prod.write(yml_product)\n",
108 | " \n",
109 | " # print the product metadata, which is written to the product YML\n",
110 | " print(prod)"
111 | ]
112 | },
113 | {
114 | "cell_type": "markdown",
115 | "metadata": {},
116 | "source": [
117 | "Now that we have a YML file for creating a new product and individual YML files for indexing the datasets, we can create a last YML file, which will ingest the indexed datasets into the cube. For this a new product is created and the files are converted to NetCDF, which are optimised for useage in the cube. The location of those NetCDF files also needs to be defined."
118 | ]
119 | },
120 | {
121 | "cell_type": "code",
122 | "execution_count": null,
123 | "metadata": {},
124 | "outputs": [],
125 | "source": [
126 | "with Product(yml_product) as prod:\n",
127 | " prod.export_ingestion_yml(yml_ingest, product_name_ingested, ingest_location, \n",
128 | " chunking={'x': 512, 'y': 512, 'time': 1})"
129 | ]
130 | },
131 | {
132 | "cell_type": "code",
133 | "execution_count": null,
134 | "metadata": {},
135 | "outputs": [],
136 | "source": []
137 | }
138 | ],
139 | "metadata": {
140 | "kernelspec": {
141 | "display_name": "cubeenv",
142 | "language": "python",
143 | "name": "cubeenv"
144 | },
145 | "language_info": {
146 | "codemirror_mode": {
147 | "name": "ipython",
148 | "version": 3
149 | },
150 | "file_extension": ".py",
151 | "mimetype": "text/x-python",
152 | "name": "python",
153 | "nbconvert_exporter": "python",
154 | "pygments_lexer": "ipython3",
155 | "version": "3.6.6"
156 | }
157 | },
158 | "nbformat": 4,
159 | "nbformat_minor": 2
160 | }
161 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = source
9 | BUILDDIR = build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=source
11 | set BUILDDIR=build
12 |
13 | %SPHINXBUILD% >NUL 2>NUL
14 | if errorlevel 9009 (
15 | echo.
16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
17 | echo.installed, then set the SPHINXBUILD environment variable to point
18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
19 | echo.may add the Sphinx directory to PATH.
20 | echo.
21 | echo.If you don't have Sphinx installed, grab it from
22 | echo.https://www.sphinx-doc.org/
23 | exit /b 1
24 | )
25 |
26 | if "%1" == "" goto help
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/source/about/projects.rst:
--------------------------------------------------------------------------------
1 | ######################
2 | Projects using pyroSAR
3 | ######################
4 |
5 | pyroSAR is/was used in these projects:
6 |
7 | - `BACI `_
8 | - `CCI Biomass `_
9 | - `COPA `_
10 | - `EMSAfrica `_
11 | - `GlobBiomass `_
12 | - `SALDi `_
13 | - `SenThIS `_
14 | - `Sentinel4REDD `_
15 | - `SWOS `_
16 | - `BONDS `_
17 |
18 | You know of other projects? We'd be happy to know.
19 |
--------------------------------------------------------------------------------
/docs/source/about/publications.rst:
--------------------------------------------------------------------------------
1 | ############
2 | Publications
3 | ############
4 |
5 | .. bibliography::
6 | :style: plain
7 | :list: bullet
8 | :filter: author % "Truckenbrodt"
--------------------------------------------------------------------------------
/docs/source/about/references.rst:
--------------------------------------------------------------------------------
1 | .. only:: html or text
2 |
3 | References
4 | ==========
5 |
6 | .. bibliography::
7 | :style: plain
8 |
--------------------------------------------------------------------------------
/docs/source/api/ancillary.rst:
--------------------------------------------------------------------------------
1 | Ancillary Functions
2 | ===================
3 |
4 | .. automodule:: pyroSAR.ancillary
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. autosummary::
10 | :nosignatures:
11 |
12 | find_datasets
13 | getargs
14 | groupby
15 | groupbyTime
16 | hasarg
17 | multilook_factors
18 | parse_datasetname
19 | seconds
20 | Lock
21 | LockCollection
22 |
--------------------------------------------------------------------------------
/docs/source/api/auxdata.rst:
--------------------------------------------------------------------------------
1 | Auxiliary Data Tools
2 | ====================
3 |
4 | .. automodule:: pyroSAR.auxdata
5 | :members: dem_autoload, dem_create, get_egm_lookup, getasse30_hdr, get_dem_options, DEMHandler
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. autosummary::
10 | :nosignatures:
11 |
12 | dem_autoload
13 | dem_create
14 | get_egm_lookup
15 | getasse30_hdr
16 | get_dem_options
17 | DEMHandler
18 |
--------------------------------------------------------------------------------
/docs/source/api/datacube.rst:
--------------------------------------------------------------------------------
1 | Datacube Tools
2 | ==============
3 |
4 | .. automodule:: pyroSAR.datacube_util
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
--------------------------------------------------------------------------------
/docs/source/api/drivers.rst:
--------------------------------------------------------------------------------
1 | Drivers
2 | =======
3 |
4 | .. automodule:: pyroSAR.drivers
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. rubric:: classes
10 |
11 | .. autosummary::
12 | :nosignatures:
13 |
14 | ID
15 | BEAM_DIMAP
16 | CEOS_PSR
17 | CEOS_ERS
18 | EORC_PSR
19 | ESA
20 | SAFE
21 | TSX
22 | TDM
23 | Archive
24 |
25 | .. rubric:: functions
26 |
27 | .. autosummary::
28 | :nosignatures:
29 |
30 | identify
31 | identify_many
32 | filter_processed
33 | getFileObj
34 | parse_date
35 | drop_archive
36 |
--------------------------------------------------------------------------------
/docs/source/api/examine.rst:
--------------------------------------------------------------------------------
1 | Examine
2 | =======
3 |
4 | .. automodule:: pyroSAR.examine
5 | :members:
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. autosummary::
10 | :nosignatures:
--------------------------------------------------------------------------------
/docs/source/api/figures/S1_bnr.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/docs/source/api/figures/S1_bnr.png
--------------------------------------------------------------------------------
/docs/source/api/figures/snap_erode_edges.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/docs/source/api/figures/snap_erode_edges.png
--------------------------------------------------------------------------------
/docs/source/api/figures/workflow_readme.txt:
--------------------------------------------------------------------------------
1 | workflow files were created with the yED Graph Editor (https://www.yworks.com/products/yed)
2 |
3 | setting the vector bridge style:
4 | Preferences -> Display -> Bridge Style
5 |
--------------------------------------------------------------------------------
/docs/source/api/gamma.rst:
--------------------------------------------------------------------------------
1 | GAMMA
2 | =====
3 |
4 | Processing
5 | ----------
6 |
7 | .. automodule:: pyroSAR.gamma
8 | :members: geocode, convert2gamma, ISPPar, process, ovs, S1_deburst, correctOSV, multilook, par2hdr, UTM, calibrate
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | .. autosummary::
13 | :nosignatures:
14 |
15 | calibrate
16 | convert2gamma
17 | correctOSV
18 | geocode
19 | ISPPar
20 | multilook
21 | ovs
22 | par2hdr
23 | process
24 | S1_deburst
25 | UTM
26 |
27 | DEM tools
28 | ---------
29 |
30 | .. automodule:: pyroSAR.gamma.dem
31 | :members: dem_autocreate, dem_import, dempar, fill, hgt, hgt_collect, makeSRTM, mosaic, swap
32 | :undoc-members:
33 | :show-inheritance:
34 |
35 | .. autosummary::
36 | :nosignatures:
37 |
38 | dem_autocreate
39 | dem_import
40 | dempar
41 | fill
42 | hgt
43 | hgt_collect
44 | makeSRTM
45 | mosaic
46 | swap
47 |
48 | .. _gamma-command-api:
49 |
50 | GAMMA Command API
51 | -----------------
52 |
53 | This is an attempt to make it easier to execute GAMMA commands by offering automatically parsed Python functions.
54 | Thus, instead of executing the command via shell:
55 |
56 | .. code-block:: shell
57 |
58 | offset_fit offs ccp off.par coffs - 0.15 3 0 > offset_fit.log
59 |
60 | one can wrap it in a Python script:
61 |
62 | .. code-block:: python
63 |
64 | import os
65 | from pyroSAR.gamma.api import isp
66 |
67 | workdir = '/data/gamma_workdir'
68 |
69 | parameters = {'offs': os.path.join(workdir, 'offs'),
70 | 'ccp': os.path.join(workdir, 'ccp'),
71 | 'OFF_par': os.path.join(workdir, 'off.par'),
72 | 'coffs': os.path.join(workdir, 'coffs'),
73 | 'thres': 0.15,
74 | 'npoly': 3,
75 | 'interact_flag': 0,
76 | 'logpath': workdir}
77 |
78 | isp.offset_fit(**parameters)
79 |
80 | A file `offset_fit.log` containing the output of the command is written in both cases. Any parameters, which should
81 | not be written and need to be set to - in the shell can be omitted in the Python call since all optional parameters
82 | of the functions are already defined with '-' as a default.
83 | The documentation can be called like with any Python function:
84 |
85 | .. code-block:: python
86 |
87 | from pyroSAR.gamma.api import isp
88 | help(isp.offset_fit)
89 |
90 | Parser Documentation
91 | ********************
92 |
93 | .. automodule:: pyroSAR.gamma.parser
94 | :members:
95 | :undoc-members:
96 | :show-inheritance:
97 |
98 | API Demo
99 | ********
100 |
101 | This is a demonstration of an output script as generated automatically by function
102 | :func:`~pyroSAR.gamma.parser.parse_module` for the GAMMA module `ISP`.
103 | Within each function, the command name and all parameters are passed to function
104 | :func:`~pyroSAR.gamma.process`, which converts all input to :py:obj:`str` and then calls the command via the
105 | :mod:`subprocess` module.
106 |
107 | .. automodule:: pyroSAR.gamma.parser_demo
108 | :members:
109 | :undoc-members:
110 | :show-inheritance:
111 |
--------------------------------------------------------------------------------
/docs/source/api/sentinel-1.rst:
--------------------------------------------------------------------------------
1 | Sentinel-1 Tools
2 | ================
3 |
4 | .. automodule:: pyroSAR.S1
5 | :members: OSV, removeGRDBorderNoise
6 | :undoc-members:
7 | :show-inheritance:
8 |
9 | .. autosummary::
10 | :nosignatures:
11 |
12 | OSV
13 | removeGRDBorderNoise
14 |
--------------------------------------------------------------------------------
/docs/source/api/snap.rst:
--------------------------------------------------------------------------------
1 | SNAP
2 | ====
3 |
4 | Processing
5 | ----------
6 |
7 | .. automodule:: pyroSAR.snap.util
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | .. autosummary::
13 | :nosignatures:
14 |
15 | geocode
16 | noise_power
17 |
18 | Workflow Parsing and Execution
19 | ------------------------------
20 |
21 | .. automodule:: pyroSAR.snap.auxil
22 | :members: gpt, execute, parse_node, parse_recipe, split, groupbyWorkers, Workflow, Node, Par, Par_BandMath, dem_parametrize, geo_parametrize, mli_parametrize, orb_parametrize, sub_parametrize
23 | :undoc-members:
24 | :show-inheritance:
25 |
26 | .. autosummary::
27 | :nosignatures:
28 |
29 | gpt
30 | execute
31 | parse_node
32 | parse_recipe
33 | split
34 | groupbyWorkers
35 | Workflow
36 | Node
37 | Par
38 | Par_BandMath
39 | dem_parametrize
40 | geo_parametrize
41 | mli_parametrize
42 | orb_parametrize
43 | sub_parametrize
44 |
45 | General Utilities
46 | -----------------
47 |
48 | .. automodule:: pyroSAR.snap.auxil
49 | :members: erode_edges, writer
50 | :undoc-members:
51 | :show-inheritance:
52 |
53 | .. autosummary::
54 | :nosignatures:
55 |
56 | erode_edges
57 | writer
58 |
--------------------------------------------------------------------------------
/docs/source/conf.py:
--------------------------------------------------------------------------------
1 | import sys
2 | import os
3 | import datetime
4 | from importlib.metadata import version as get_version
5 |
6 | project = 'pyroSAR'
7 | authors = 'the pyroSAR Developers'
8 | year = datetime.datetime.now().year
9 |
10 | # If extensions (or modules to document with autodoc) are in another directory,
11 | # add these directories to sys.path here. If the directory is relative to the
12 | # documentation root, use os.path.abspath to make it absolute, like shown here.
13 | sys.path.insert(0, os.path.abspath('../..'))
14 |
15 | # The full version, including alpha/beta/rc tags.
16 | version_full = get_version(project)
17 | # The short X.Y version.
18 | version = '.'.join(version_full.split('.')[:2])
19 | # release is automatically added to the latex document title and header
20 | release = version
21 |
22 | autodoc_mock_imports = ['osgeo', 'sqlalchemy', 'sqlalchemy_utils', 'geoalchemy2',
23 | 'lxml', 'progressbar', 'spatialist']
24 |
25 | # If your documentation needs a minimal Sphinx version, state it here.
26 | needs_sphinx = '1.6'
27 |
28 | # Add any Sphinx extension module names here, as strings. They can be
29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
30 | # ones.
31 | extensions = [
32 | 'sphinx.ext.autodoc',
33 | 'sphinx.ext.coverage',
34 | 'sphinx.ext.napoleon',
35 | 'sphinx.ext.autosummary',
36 | 'sphinx.ext.intersphinx',
37 | 'sphinx.ext.viewcode',
38 | 'sphinxcontrib.bibtex',
39 | 'sphinxcontrib.cairosvgconverter'
40 | ]
41 |
42 | bibtex_bibfiles = ['references.bib']
43 |
44 | # autodoc_default_flags = ['members']
45 | autosummary_generate = []
46 |
47 | intersphinx_mapping = {'osgeo': ('https://gdal.org', None),
48 | 'python': ('https://docs.python.org/3', None),
49 | 'requests': ('https://requests.readthedocs.io/en/latest', None),
50 | 'scipy': ('https://docs.scipy.org/doc/scipy', None),
51 | 'spatialist': ('https://spatialist.readthedocs.io/en/latest', None),
52 | 'sqlalchemy': ('https://docs.sqlalchemy.org/en/latest', None),
53 | 'sqlalchemy-utils': ('https://sqlalchemy-utils.readthedocs.io/en/latest', None)
54 | }
55 |
56 | napoleon_google_docstring = False
57 | napoleon_numpy_docstring = True
58 | napoleon_include_init_with_doc = False
59 | napoleon_include_private_with_doc = False
60 | napoleon_include_special_with_doc = True
61 | napoleon_use_admonition_for_examples = False
62 | napoleon_use_admonition_for_notes = False
63 | napoleon_use_admonition_for_references = False
64 | napoleon_use_ivar = False
65 | napoleon_use_param = True
66 | napoleon_use_rtype = True
67 |
68 | # Add any paths that contain templates here, relative to this directory.
69 | templates_path = ['_templates']
70 |
71 | # The suffix of source filenames.
72 | source_suffix = '.rst'
73 |
74 | # The encoding of source files.
75 | # source_encoding = 'utf-8-sig'
76 |
77 | # The master toctree document.
78 | master_doc = 'index'
79 |
80 | # General information about the project.
81 | copyright = ' (c) 2014-{}, {}'.format(year, authors)
82 |
83 | # The language for content autogenerated by Sphinx. Refer to documentation
84 | # for a list of supported languages.
85 | # language = None
86 |
87 | # There are two options for replacing |today|: either, you set today to some
88 | # non-false value, then it is used:
89 | # today = ''
90 | # Else, today_fmt is used as the format for a strftime call.
91 | # today_fmt = '%B %d, %Y'
92 |
93 | # List of patterns, relative to source directory, that match files and
94 | # directories to ignore when looking for source files.
95 | exclude_patterns = ['_build']
96 |
97 | # The reST default role (used for this markup: `text`) to use for all
98 | # documents.
99 | # default_role = None
100 |
101 | # If true, '()' will be appended to :func: etc. cross-reference text.
102 | # add_function_parentheses = True
103 |
104 | # If true, the current module name will be prepended to all description
105 | # unit titles (such as .. function::).
106 | # add_module_names = True
107 |
108 | # If true, sectionauthor and moduleauthor directives will be shown in the
109 | # output. They are ignored by default.
110 | # show_authors = False
111 |
112 | # The name of the Pygments (syntax highlighting) style to use.
113 | pygments_style = 'sphinx'
114 |
115 | # A list of ignored prefixes for module index sorting.
116 | # modindex_common_prefix = []
117 |
118 | # If true, keep warnings as "system message" paragraphs in the built documents.
119 | # keep_warnings = False
120 |
121 |
122 | # -- Options for HTML output ----------------------------------------------
123 |
124 | # The theme to use for HTML and HTML Help pages. See the documentation for
125 | # a list of builtin themes.
126 | html_theme = 'default'
127 |
128 | # Theme options are theme-specific and customize the look and feel of a theme
129 | # further. For a list of options available for each theme, see the
130 | # documentation.
131 | # html_theme_options = {}
132 |
133 | # Add any paths that contain custom themes here, relative to this directory.
134 | # html_theme_path = []
135 |
136 | # The name for this set of Sphinx documents. If None, it defaults to
137 | # " v documentation".
138 | # html_title = None
139 |
140 | # A shorter title for the navigation bar. Default is the same as html_title.
141 | # html_short_title = None
142 |
143 | # The name of an image file (relative to this directory) to place at the top
144 | # of the sidebar.
145 | # html_logo = None
146 |
147 | # The name of an image file (within the static path) to use as favicon of the
148 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
149 | # pixels large.
150 | # html_favicon = None
151 |
152 | # Add any paths that contain custom static files (such as style sheets) here,
153 | # relative to this directory. They are copied after the builtin static files,
154 | # so a file named "default.css" will overwrite the builtin "default.css".
155 | html_static_path = ['_static']
156 |
157 | # Add any extra paths that contain custom files (such as robots.txt or
158 | # .htaccess) here, relative to this directory. These files are copied
159 | # directly to the root of the documentation.
160 | # html_extra_path = []
161 |
162 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
163 | # using the given strftime format.
164 | # html_last_updated_fmt = '%b %d, %Y'
165 |
166 | # If true, SmartyPants will be used to convert quotes and dashes to
167 | # typographically correct entities.
168 | # html_use_smartypants = True
169 |
170 | # Custom sidebar templates, maps document names to template names.
171 | # html_sidebars = {}
172 |
173 | # Additional templates that should be rendered to pages, maps page names to
174 | # template names.
175 | # html_additional_pages = {}
176 |
177 | # If false, no module index is generated.
178 | # html_domain_indices = True
179 |
180 | # If false, no index is generated.
181 | html_use_index = True
182 |
183 | # If true, the index is split into individual pages for each letter.
184 | # html_split_index = False
185 |
186 | # If true, links to the reST sources are added to the pages.
187 | # html_show_sourcelink = True
188 |
189 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
190 | # html_show_sphinx = True
191 |
192 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
193 | # html_show_copyright = True
194 |
195 | # If true, an OpenSearch description file will be output, and all pages will
196 | # contain a tag referring to it. The value of this option must be the
197 | # base URL from which the finished HTML is served.
198 | # html_use_opensearch = ''
199 |
200 | # This is the file name suffix for HTML files (e.g. ".xhtml").
201 | # html_file_suffix = None
202 |
203 | # Output file base name for HTML help builder.
204 | htmlhelp_basename = '{}doc'.format(project)
205 |
206 | # -- Options for LaTeX output ---------------------------------------------
207 |
208 | latex_elements = {
209 | # The paper size ('letterpaper' or 'a4paper').
210 | 'papersize': 'a4paper',
211 |
212 | # The font size ('10pt', '11pt' or '12pt').
213 | 'pointsize': '10pt',
214 |
215 | # Additional stuff for the LaTeX preamble.
216 | 'preamble': r'''
217 | \setcounter{tocdepth}{2}
218 | \setlength{\headheight}{27pt}
219 | ''',
220 |
221 | # disable floating
222 | 'figure_align': 'H',
223 | }
224 |
225 | # Grouping the document tree into LaTeX files. List of tuples
226 | # (source start file, target name, title,
227 | # author, documentclass [howto, manual, or own class]).
228 | latex_documents = [
229 | ('index',
230 | '{}.tex'.format(project),
231 | r'{} Documentation'.format(project),
232 | authors, 'manual'),
233 | ]
234 |
235 | # The name of an image file (relative to this directory) to place at the top of
236 | # the title page.
237 | # latex_logo = None
238 |
239 | # For "manual" documents, if this is true, then toplevel headings are parts,
240 | # not chapters.
241 | # latex_use_parts = False
242 |
243 | # If true, show page references after internal links.
244 | # latex_show_pagerefs = False
245 |
246 | # If true, show URL addresses after external links.
247 | # latex_show_urls = False
248 |
249 | # Documents to append as an appendix to all manuals.
250 | # latex_appendices = []
251 |
252 | # If false, no module index is generated.
253 | # latex_domain_indices = True
254 |
255 |
256 | # -- Options for manual page output ---------------------------------------
257 |
258 | # One entry per manual page. List of tuples
259 | # (source start file, name, description, authors, manual section).
260 | man_pages = [
261 | ('index',
262 | project,
263 | '{} Documentation'.format(project),
264 | [authors],
265 | 1)
266 | ]
267 |
268 | # If true, show URL addresses after external links.
269 | # man_show_urls = False
270 |
271 |
272 | # -- Options for Texinfo output -------------------------------------------
273 |
274 | # Grouping the document tree into Texinfo files. List of tuples
275 | # (source start file, target name, title, author,
276 | # dir menu entry, description, category)
277 | texinfo_documents = [
278 | ('index',
279 | project,
280 | '{} Documentation'.format(project),
281 | authors,
282 | project,
283 | 'One line description of project.',
284 | 'Miscellaneous'),
285 | ]
286 |
287 | # Documents to append as an appendix to all manuals.
288 | # texinfo_appendices = []
289 |
290 | # If false, no module index is generated.
291 | # texinfo_domain_indices = True
292 |
293 | # How to display URL addresses: 'footnote', 'no', or 'inline'.
294 | # texinfo_show_urls = 'footnote'
295 |
296 | # If true, do not generate a @detailmenu in the "Top" node's menu.
297 | # texinfo_no_detailmenu = False
298 |
--------------------------------------------------------------------------------
/docs/source/general/DEM.rst:
--------------------------------------------------------------------------------
1 | ###############
2 | DEM Preparation
3 | ###############
4 |
5 | SAR processing requires a high resolution Digital Elevation Model for ortho-rectification and normalization of
6 | terrain-specific imaging effects.
7 |
8 | In SNAP, the DEM is usually auto-downloaded by the software itself and the user only specifies the DEM source to be
9 | used, e.g. SRTM. pyroSAR's convenience function :func:`pyroSAR.snap.util.geocode` can additionally pass SNAP's option to use an
10 | external DEM file via parameters `externalDEMFile`, `externalDEMNoDataValue` and `externalDEMApplyEGM`.
11 |
12 | GAMMA does not provide ways to automatically download DEMs for processing and the user thus also needs to provide an
13 | external DEM file in GAMMA's own format. However, several commands are available to prepare these DEMs including
14 | conversion from geoid heights to WGS84 ellipsoid heights.
15 |
16 | pyroSAR offers several convenience functions to automatically prepare DEM mosaics from different
17 | sources to use them in either SNAP or GAMMA.
18 |
19 | Download of DEM Tiles
20 | =====================
21 |
22 | The function :func:`pyroSAR.auxdata.dem_autoload` offers convenient download of tiles from different sources
23 | overlapping with user-defined geometries. Optionally, a buffer in degrees can be defined.
24 | This function internally makes use of the function :func:`spatialist.auxil.gdalbuildvrt`.
25 |
26 | .. code-block:: python
27 |
28 | from pyroSAR.auxdata import dem_autoload
29 | from spatialist import Vector
30 |
31 | site = 'mysite.shp'
32 | vrt = 'mosaic.vrt'
33 |
34 | with Vector(site) as vec:
35 | vrt = dem_autoload(geometries=[vec],
36 | demType='SRTM 1Sec HGT',
37 | vrt=vrt,
38 | buffer=0.1)
39 |
40 | The tiles, which are delivered in compressed archives, are directly connected to a virtual mosaic using GDAL's VRT
41 | format, making it easier to work with them by treating them as a single file.
42 | For downloading tiles of some DEM types, e.g. `TDX90m`, an account needs to be created and the user credentials be passed to
43 | function :func:`~pyroSAR.auxdata.dem_autoload`. See the function's documentation for further details.
44 |
45 | The files are stored in SNAP's location for auxiliary data, which per default is `$HOME/.snap/auxdata/dem`.
46 | The function :func:`~pyroSAR.auxdata.dem_autoload` has proven beneficial in server environments where not each node has internet access and the tiles thus
47 | need to be downloaded prior to processing on these nodes.
48 |
49 | DEM Mosaicing
50 | =============
51 |
52 | In a next step we create a mosaic GeoTIFF cropped to the boundaries defined in the VRT using the function
53 | :func:`pyroSAR.auxdata.dem_create`.
54 | The spatial reference system, WGS84 UTM 32N in this case, is defined by its EPSG code but also several other options
55 | are available. Since for SAR processing we are interested in ellipsoid heights, we call the function with the according
56 | parameter `geoid_convert` set to `True`.
57 | This function makes use of :func:`spatialist.auxil.gdalwarp`.
58 | Conversion of vertical reference systems, e.g. from geoid to ellipsoid, requires GDAL version >=2.2.
59 |
60 | .. code-block:: python
61 |
62 | from pyroSAR.auxdata import dem_create
63 |
64 | outname = 'mysite_srtm.tif'
65 |
66 | dem_create(src=vrt, dst=outname,
67 | t_srs=32632, tr=(20, 20),
68 | resampling_method='bilinear',
69 | geoid_convert=True, geoid='EGM96')
70 |
71 | GAMMA Import
72 | ============
73 |
74 | For convenience, pyroSAR's :mod:`~pyroSAR.gamma` submodule contains a function :func:`pyroSAR.gamma.dem.dem_autocreate`, which is a
75 | combination of functions :func:`~pyroSAR.auxdata.dem_autoload` and :func:`~pyroSAR.auxdata.dem_create` and further
76 | executes GAMMA commands for format conversion.
77 | It offers the same parameters as these two functions and a user can additionally decide whether geoid-ellipsoid
78 | conversion is done in GDAL or in GAMMA via parameter `geoid_mode`. The output is a file in GAMMA format, which can
79 | directly be used for processing by e.g. function :func:`pyroSAR.gamma.geocode`.
80 |
--------------------------------------------------------------------------------
/docs/source/general/OSV.rst:
--------------------------------------------------------------------------------
1 | ####################################
2 | Handling of Orbit State Vector Files
3 | ####################################
4 | SAR products require additional orbit state vector (OSV) information to improve their spatial location accuracy.
5 | This information is found in externally hosted files, which need to be downloaded separately and are then used by SAR
6 | processing software to update the product's metadata. Currently, pyroSAR only supports handling of Sentinel-1 OSV files.
7 |
8 | In SNAP, the corresponding processing node is called `Apply-Orbit-File`, which automatically downloads the OSV file and
9 | updates the scene's metadata. The files are stored in SNAP's location for auxiliary data,
10 | which per default is `$HOME/.snap/auxdata/Orbits`.
11 |
12 | In GAMMA, on the other hand, the downloading has to be done manually after which the command `isp.S1_OPOD_vec` can be
13 | used for updating the metadata. pyroSAR offers several approaches for automatically downloading these
14 | files. The central tool for managing existing files and downloading new ones is the class :class:`pyroSAR.S1.OSV`, which
15 | is used for all approaches.
16 |
17 | .. note::
18 |
19 | in the following a dedicated directory is defined into which the files will be downloaded. If this directory is
20 | not defined (default is `None`), the files will be downloaded to SNAP's auxiliary data location (see above). This is
21 | recommended as the files are kept in a central location that is accessible both by SNAP and by pyroSAR's GAMMA
22 | functionality.
23 |
24 | approach 1: direct download by time span
25 | ========================================
26 |
27 | In case a large number of scenes is to be processed and/or no internet access is available during processing, the files
28 | can be downloaded by time span to a central directory. This is the most basic approach using the central class
29 | :class:`~pyroSAR.S1.OSV` mentioned above, making use of its methods :meth:`~pyroSAR.S1.OSV.catch` and
30 | :meth:`~pyroSAR.S1.OSV.retrieve`.
31 |
32 | .. code-block:: python
33 |
34 | from pyroSAR.S1 import OSV
35 |
36 | osvdir = '/path/to/osvdir'
37 |
38 | with OSV(osvdir) as osv:
39 | files = osv.catch(sensor='S1A', osvtype='POE',
40 | start='20170101T000000', stop='20180101T000000',
41 | url_option=1)
42 | osv.retrieve(files)
43 |
44 | Two sub-directories `POEORB` and `RESORB` will be created in `osvdir` containing the downloaded files. `POEORB` will
45 | contain the `Precise Orbit Ephemerides` files, which are the most accurate but are first available about two weeks after
46 | the scene's acquisition. `RESORB` describes the `Restituted Orbit` files, which are less accurate but available
47 | directly after acquisition. See method :meth:`~pyroSAR.S1.OSV.catch` for download URL options.
48 |
49 | approach 2: manual download per scene
50 | =====================================
51 |
52 | The method :meth:`pyroSAR.drivers.SAFE.getOSV` can be used to directly retrieve the files relevant for the scene.
53 | This method internally uses the methods described above with a time span limited to that of the scene acquisition.
54 |
55 | .. code-block:: python
56 |
57 | from pyroSAR import identify
58 | scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'
59 | id = identify(scene)
60 | match = id.getOSV(osvdir='/path/to/osvdir', osvType='POE', returnMatch=True)
61 | print(match)
62 |
63 | approach 3: direct download and scene metadata update (GAMMA only)
64 | ==================================================================
65 |
66 | The convenience function :func:`pyroSAR.gamma.correctOSV` internally makes use of approach 2 and additionally directly
67 | executes the GAMMA command `isp.S1_OPOD_vec` for updating the scene's metadata with the information of the OSV file.
68 | The scene has to be unpacked first (see :meth:`pyroSAR.drivers.SAFE.unpack`).
69 |
70 | .. code-block:: python
71 |
72 | from pyroSAR import identify
73 | from pyroSAR.gamma import correctOSV
74 | scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'
75 | id = identify(scene)
76 | id.unpack('tmpdir')
77 | correctOSV(id=id, osvdir='/path/to/osvdir', osvType='POE')
78 |
79 | approach 4: automatic download and use during processing
80 | ========================================================
81 |
82 | The processing function :func:`pyroSAR.gamma.geocode` automatically downloads OSV files needed for processing and
83 | updates the scene's metadata using function :func:`~pyroSAR.gamma.correctOSV`.
84 | It is thus the most convenient way to handle these files and related processing steps.
85 | The parameter `allow_RES_OSV` can be used to allow processing with `RES` files if no `POE` file is available yet.
86 |
87 | .. code-block:: python
88 |
89 | from pyroSAR.gamma import geocode
90 | scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'
91 | geocode(scene=scene,
92 | dem='/path/to/demfile',
93 | tmpdir='tmpdir',
94 | outdir='outdir',
95 | targetres=20,
96 | osvdir='/path/to/osvdir',
97 | allow_RES_OSV=False)
98 |
99 | Similarly, the function :func:`pyroSAR.snap.util.geocode` also automatically downloads OSV files and chooses the best
100 | matching OSV type for processing.
101 |
102 | .. code-block:: python
103 |
104 | from pyroSAR.snap import geocode
105 | scene = 'S1A_IW_GRDH_1SDV_20180101T170648_20180101T170713_019964_021FFD_DA78.zip'
106 | geocode(infile=scene,
107 | outdir='outdir',
108 | allow_RES_OSV=True)
109 |
110 | In contrast to the GAMMA function, the OSV download directory cannot be set because of the fixed SNAP auxiliary data
111 | location. The type of the available OSV file is written to the workflow XML file for processing:
112 |
113 | .. code-block:: xml
114 |
115 |
116 | Apply-Orbit-File
117 |
118 |
119 |
120 |
121 | Sentinel Restituted (Auto Download)
122 | 3
123 | false
124 |
125 |
126 |
--------------------------------------------------------------------------------
/docs/source/general/configuration.rst:
--------------------------------------------------------------------------------
1 | #############
2 | Configuration
3 | #############
4 |
5 | pyroSAR stores configuration under `$HOME/.pyrosar`.
6 | It contains a file `config.ini` which stores installation paths of SNAP and GAMMA.
7 | The installations are first identified by running the respective `Examine*` class (e.g. :class:`~pyroSAR.examine.ExamineSnap`):
8 |
9 | .. code-block:: python
10 |
11 | from pyroSAR.examine import ExamineSnap
12 | config = ExamineSnap()
13 |
14 | SNAP configuration can also be modified with this class, either by the object properties `userpath` and `auxdatapath` or by the underlying :class:`~pyroSAR.examine.SnapProperties` object:
15 |
16 | .. code-block:: python
17 |
18 | config.userpath = '/path/to/snap/data'
19 | config.snap_properties['snap.userdir'] = '/path/to/snap/data'
20 |
21 | The values are directly written to either `snap.auxdata.properties` or `snap.properties` under `$HOME/.snap/etc`.
22 | The content of these files will override that in the files found under `etc` in the SNAP installation folder.
23 | Setting a parameter to `None` will comment out the value in the respective file.
24 |
--------------------------------------------------------------------------------
/docs/source/general/filenaming.rst:
--------------------------------------------------------------------------------
1 | ###########
2 | File Naming
3 | ###########
4 |
5 | pyroSAR internally uses a fixed naming scheme to keep track of processed results. For each scene an identifier is created,
6 | which contains the sensor, acquisition mode, orbit (ascending or descending) and the time stamp of the acquisition start.
7 | For example `S1A__IW___A_20150222T170750`, which is created by calling method :meth:`~pyroSAR.drivers.ID.outname_base`:
8 |
9 | .. code-block:: python
10 |
11 | from pyroSAR import identify
12 | id = identify('S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip')
13 | print(id.outname_base())
14 |
15 | For each attribute a fixed number of digits is reserved. In case the attribute is shorter than this number,
16 | the rest of the digits is filled with underscores. I.e., the sensor field is four digits long, but 'S1A' only three.
17 | Thus, `S1A_` is the sensor slot. In the same way, `IW__` is the acquisition mode slot, which is also four digits long.
18 | `A` denotes ascending orbit, the time stamp is in format YYYYmmddTHHMMSS.
19 |
20 | Processing functions like :func:`~pyroSAR.gamma.geocode` add suffixes to this identifier to further keep track of
21 | individual processing steps performed on the dataset.
22 | This core concept is used by many pyroSAR functions internally to keep track of which scenes have been processed before.
23 |
--------------------------------------------------------------------------------
/docs/source/general/installation.rst:
--------------------------------------------------------------------------------
1 | ############
2 | Installation
3 | ############
4 |
5 | conda
6 | =====
7 |
8 | Starting with version 0.11, pyroSAR is distributed via `conda-forge `_
9 | and can easily be installed with
10 |
11 | ::
12 |
13 | conda install --channel conda-forge pyrosar
14 |
15 | This is by far the easiest way to work with pyroSAR on any operating system.
16 |
17 | pip
18 | ===
19 |
20 | Installation with pip is also supported and offers the advantage to install intermediate development stages directly
21 | from the GitHub repository. Mind however that several dependencies like GDAL cannot fully be installed this way.
22 | See further below for detailed Linux dependency installation instructions.
23 |
24 | Installation of pip (Linux):
25 |
26 | ::
27 |
28 | sudo apt-get install python-pip
29 |
30 | The latest stable release of pyroSAR can then be installed:
31 |
32 | ::
33 |
34 | python -m pip install pyroSAR
35 |
36 | For installation of the latest master branch on GitHub, we need the version control system git. On Windows, git can be
37 | downloaded from `git-scm.com `_. On Linux you can install it via command line:
38 |
39 | ::
40 |
41 | sudo apt-get install git
42 |
43 | Once everything is set up, pyroSAR is ready to be installed:
44 |
45 | ::
46 |
47 | python -m pip install git+https://github.com/johntruckenbrodt/pyroSAR.git
48 |
49 | Dependencies
50 | ============
51 | The more specific instructions below are intended for Linux users who like to work outside of the Anaconda environment.
52 |
53 | GDAL
54 | ----
55 | pyroSAR requires GDAL version 2.1 with GEOS and PROJ4 as dependencies as well as the GDAL Python binding.
56 |
57 | Ubuntu
58 | ++++++
59 | Starting with release Yakkety (16.10), Ubuntu comes with GDAL >2.1.
60 | You can install it like this:
61 |
62 | ::
63 |
64 | sudo apt-get install python-gdal python3-gdal gdal-bin
65 |
66 | For older Ubuntu releases you can add the ubuntugis repository to apt prior to installation to install version >2.1:
67 |
68 | ::
69 |
70 | sudo add-apt-repository ppa:ubuntugis/ppa
71 | sudo apt-get update
72 |
73 | This way the required dependencies (GEOS and PROJ4 in particular) are also installed.
74 | You can check the version by typing:
75 |
76 | ::
77 |
78 | gdalinfo --version
79 |
80 | Debian
81 | ++++++
82 | Starting with Debian 9 (Stretch) GDAL is available in version >2.1 in the official repository.
83 |
84 | Building from source
85 | ++++++++++++++++++++
86 | Alternatively, you can build GDAL and the dependencies from source. The script `pyroSAR/install/install_deps.sh`
87 | gives specific instructions on how to do it. It is not yet intended to run this script via shell, but rather to
88 | follow the instructions step by step.
89 |
90 | SQLite + SpatiaLite
91 | -------------------
92 | While `sqlite3` and its Python binding are usually already installed, the `spatialite` extension needs to be
93 | added. Two packages exist, `libspatialite` and `mod_spatialite`. Both can be used by pyroSAR.
94 | On Ubuntu, `mod_spatialite` has been found to be easier to setup with `sqlite` and can be installed via `apt`:
95 |
96 | ::
97 |
98 | sudo apt-get install libsqlite3-mod-spatialite
99 |
100 | On CentOS, `libspatialite` including shared objects for extension loading can be installed via `yum`:
101 |
102 | ::
103 |
104 | sudo yum install libspatialite-devel
105 |
106 | The following can be run in Python to test the needed functionality:
107 |
108 | .. code-block:: python
109 |
110 | import sqlite3
111 |
112 | # setup an in-memory database
113 | con=sqlite3.connect(':memory:')
114 |
115 | # enable loading extensions and load spatialite
116 | con.enable_load_extension(True)
117 | try:
118 | con.load_extension('mod_spatialite.so')
119 | except sqlite3.OperationalError:
120 | con.load_extension('libspatialite.so')
121 |
122 | In case loading extensions is not permitted you might need to install the package `pysqlite2`
123 | together with a static build of `sqlite3`. See the script `pyroSAR/install/install_deps.sh` for instructions.
124 | There you can also find instructions on how to install `spatialite` from source.
125 | To test `pysqlite2` you can import it as follows and then run the test above:
126 |
127 | .. code-block:: python
128 |
129 | from pysqlite2 import dbapi2 as sqlite3
130 |
131 | Installing this package is likely to cause problems with the `sqlite3` library installed on the system.
132 | Thus, it is safer to build a static `sqlite3` library for it (see installation script).
133 |
134 | GAMMA
135 | -----
136 | GAMMA's home directory as environment variable 'GAMMA_HOME' is expected to end either as GAMMA_SOFTWARE- or GAMMA_SOFTWARE/.
137 | If this differs in your install and cannot be changed, a workaround is adjusting the expected pattern in :class:`~pyroSAR.examine.ExamineGamma`.
138 |
--------------------------------------------------------------------------------
/docs/source/general/logging.rst:
--------------------------------------------------------------------------------
1 | #######
2 | Logging
3 | #######
4 |
5 | pyroSAR makes use of the :mod:`logging` module to display status messages for running processes.
6 | See `Logging HOWTO `_ for a basic tutorial.
7 | To display log messages you may add one of the following examples to your script:
8 |
9 | .. code-block:: python
10 |
11 | import logging
12 |
13 | # basic info
14 | logging.basicConfig(level=logging.INFO)
15 |
16 | # basic info with some message filtering
17 | logging.basicConfig(format='%(levelname)s:%(message)s', level=logging.INFO)
18 |
19 | # detailed debug info
20 | logging.basicConfig(level=logging.DEBUG)
21 |
--------------------------------------------------------------------------------
/docs/source/general/processing.rst:
--------------------------------------------------------------------------------
1 | #################################
2 | SAR Image Handling and Processing
3 | #################################
4 |
5 | Image Metadata
6 | ==============
7 |
8 | Let's start working with our actual satellite data.
9 | At first we load the scene into pyroSAR for analysis of the metadata:
10 |
11 | .. code-block:: python
12 |
13 | from pyroSAR import identify
14 | name = 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'
15 | scene = identify(name)
16 | print(scene)
17 |
18 | This will automatically identify the scene, scan it for metadata and print a summary of selected metadata entries.
19 | Several attribute names (e.g. `sensor` and `acquisition_mode`) are standardized for all SAR scenes.
20 | Further entries, whose names are not standardized, can be found in a dictionary `scene.meta`.
21 | The function :func:`~pyroSAR.drivers.identify` will loop through all SAR images classes (:mod:`pyroSAR.drivers`) and return an
22 | object of the class that was successful in identifying the scene (:class:`~pyroSAR.drivers.SAFE` in this case).
23 |
24 | .. _database-handling:
25 |
26 | Database Handling
27 | =================
28 |
29 | Now that we have made ourselves familiar with the scene, we can import its metadata into an SQLite database using class
30 | :class:`~pyroSAR.drivers.Archive`:
31 |
32 | .. code-block:: python
33 |
34 | from pyroSAR import Archive
35 | dbfile = 'scenes.db'
36 | with Archive(dbfile) as archive:
37 | archive.insert(scene)
38 |
39 | `dbfile` is a file either containing an already existing database or one to be created.
40 | In this case an SQLite database with SpatiaLite extension is created.
41 | Alternatively, PostgreSQL + PostGIS can be used.
42 |
43 | Let's assume our database contains a number of scenes and we want to select some for processing.
44 | We have a shapefile, which contains a geometry delimiting our test site for which we want to
45 | process some Sentinel-1 scenes.
46 | We already processed some scenes in the past and the results are stored in a directory
47 | `outdir`. We only want to select scenes which have not been processed to this directory before.
48 | Furthermore, we are only interested in scenes acquired in Ground Range Detected (GRD) Interferometric Wide
49 | Swath mode (IW), which contain a VV band.
50 |
51 | .. code-block:: python
52 |
53 | from spatialist import Vector
54 | archive = Archive('scenes.db')
55 | outdir = '/path/to/processed/results'
56 | maxdate = '20171231T235959'
57 | with Vector('site.shp') as site:
58 | selection_proc = archive.select(vectorobject=site,
59 | processdir=outdir,
60 | maxdate=maxdate,
61 | sensor=('S1A', 'S1B'),
62 | product='GRD',
63 | acquisition_mode='IW',
64 | vv=1)
65 | archive.close()
66 |
67 | Here we use the vector geometry driver of package :doc:`spatialist `, which is developed alongside of pyroSAR.
68 | The :class:`spatialist.Vector ` object is then passed to method
69 | :meth:`Archive.select `.
70 |
71 | .. _processing:
72 |
73 | Processing
74 | ==========
75 |
76 | The returned `selection_proc` is a list of file names for the scenes we selected from the database, which we can now
77 | pass to a processing function:
78 |
79 | .. code-block:: python
80 |
81 | from pyroSAR.snap import geocode
82 |
83 | # the target pixel spacing in meters
84 | spacing = 20
85 |
86 | for scene in selection_proc:
87 | geocode(infile=scene, outdir=outdir, tr=spacing, scaling='db', shapefile=site)
88 |
89 | The function :func:`snap.geocode ` is a basic utility for SNAP.
90 | It will perform all necessary steps to subset, resample, topographically normalize, geocode and scale the input
91 | image and write GeoTIFF files to the selected output directory.
92 | All necessary files like orbit state vectors and SRTM DEM tiles are downloaded automatically in the background by SNAP.
93 | SNAP is most conveniently used with workflow XMLs. The function geocode parses a workflow for the particular scene,
94 | parametrizes it (depending on the scene type and selected processing parameters) and writes it to the output directory.
95 | It then calls the command `gpt`, which is SNAP's command line interface, on the workflow to execute the processing steps.
96 |
97 |
--------------------------------------------------------------------------------
/docs/source/general/snap.rst:
--------------------------------------------------------------------------------
1 | ########
2 | SNAP API
3 | ########
4 |
5 | pyroSAR offers a collection of tools to parse SNAP XML workflows and execute them with SNAP's Graph Processing Tool
6 | (`GPT `_). All functionality is
7 | purely performed in Python and only the command line calls to GPT interact with SNAP. SNAP's Python API
8 | `snappy `_ is not used
9 | due to installation limitations and processing performance.
10 |
11 | The following serves as a minimal example to showcase the core API functionality. A more complex example is given with
12 | function :func:`pyroSAR.snap.util.geocode`.
13 |
14 | .. code-block:: python
15 |
16 | from pyroSAR.snap.auxil import parse_recipe, parse_node
17 |
18 | workflow = parse_recipe('blank')
19 |
20 | read = parse_node('Read')
21 | read.parameters['file'] = 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'
22 | read.parameters['formatName'] = 'SENTINEL-1'
23 | workflow.insert_node(read)
24 |
25 | tnr = parse_node('ThermalNoiseRemoval')
26 | workflow.insert_node(tnr, before=read.id)
27 |
28 | bnr = parse_node('Remove-GRD-Border-Noise')
29 | bnr.parameters['selectedPolarisations'] = ['VV']
30 | workflow.insert_node(bnr, before=tnr.id)
31 |
32 | write = parse_node('Write')
33 | write.parameters['file'] = 'outname'
34 | write.parameters['formatName'] = 'BEAM-DIMAP'
35 | workflow.insert_node(write, before=bnr.id)
36 |
37 | workflow.write('outname_proc')
38 |
39 | Here, the function :func:`~pyroSAR.snap.auxil.parse_recipe` is first used to create an empty workflow object of type
40 | :class:`~pyroSAR.snap.auxil.Workflow`.
41 | Using the function :func:`~pyroSAR.snap.auxil.parse_node`, individual processing nodes can be loaded as
42 | :class:`~pyroSAR.snap.auxil.Node` objects and parameterized using a :class:`~pyroSAR.snap.auxil.Par` object via
43 | ``.parameters``.
44 | The method :meth:`~pyroSAR.snap.auxil.Workflow.insert_node` is then used to insert the nodes into the workflow including
45 | linking of the nodes by modifying the source node entries. E.g. `Read` is set as source of the newly inserted
46 | `Remove-GRD-Border-Noise` node. As a last step, the workflow is written to an XML file with method
47 | :meth:`~pyroSAR.snap.auxil.Workflow.write`.
48 |
49 | This XML file can then be passed to function :func:`~pyroSAR.snap.auxil.gpt` to process the workflow by internally
50 | calling the GPT command line tool:
51 |
52 | .. code-block:: python
53 |
54 | from pyroSAR.snap.auxil import gpt
55 |
56 | gpt('outname_proc.xml', tmpdir='.')
57 |
58 | workflow splitting
59 | ==================
60 |
61 | Simple workflows like the one shown above take only a few seconds to process, but the more processing nodes are added,
62 | the more time it obviously takes to execute them. However, it was observed that executing long workflows takes longer
63 | and consumes more memory than executing each node individually. pyroSAR offers functionality to split long workflows
64 | into smaller groups and execute them in sequence with intermediate files being written in a temporary directory.
65 | First, the workflow nodes are grouped to contain a defined number of processing nodes, i.e. everything but `Read` and
66 | `Write`, using function :func:`~pyroSAR.snap.auxil.groupbyWorkers`:
67 |
68 | .. code-block:: python
69 |
70 | from pyroSAR.snap.auxil import groupbyWorkers
71 |
72 | groupbyWorkers('outname_proc.xml', n=1)
73 |
74 | This will return
75 |
76 | .. code-block:: python
77 |
78 | [['Read', 'ThermalNoiseRemoval'], ['Remove-GRD-Border-Noise', 'Write']]
79 |
80 | These groups can directly be passed passed to function :func:`~pyroSAR.snap.auxil.gpt` via parameter ``groups``.
81 | Internally the workflow is then split based on the groups and written to new XML files in a temporary directory using
82 | function :func:`~pyroSAR.snap.auxil.split`. In this case, two workflows would be created:
83 |
84 | - `Read` -> `ThermalNoiseRemoval` -> `Write`
85 | - `Read` -> `Remove-GRD-Border-Noise` -> `Write`
86 |
87 | These new files are then executed in sequence with intermediate `BEAM-DIMAP`
88 | files written in the same directory as the sub-workflow XML files. After processing this directory is deleted unless
89 | parameter ``cleanup`` of function :func:`~pyroSAR.snap.auxil.gpt` is set to ``False``.
90 |
91 | backwards compatibility
92 | =======================
93 |
94 | With new versions of SNAP, new parameters are introduced and others removed. If a new parameter is not listed in the
95 | node's XML description its default is used by SNAP during processing. If, however, a parameter is contained in the
96 | workflow that is no longer supported by SNAP, the processing will be terminated. This can easily happen if the workflow
97 | was created by an older version of SNAP. pyroSAR reads the error messages and, if an unknown parameter is mentioned,
98 | deletes this parameter from the workflow, saves it to a new file and executes it instead.
99 |
100 | troubleshooting
101 | ===============
102 |
103 | SNAP as well as pyroSAR's SNAP API are constantly being developed and bugs are unfortunately inevitable.
104 | This section is intended to guide users to better interpret errors and unexpected behaviour.
105 |
106 | *The process is running but seems inactive without any progress.*
107 |
108 | This might be related to SNAP's inability to download needed DEM tiles.
109 | SNAP will be stuck in a loop infinitely trying to download the missing tiles.
110 | This can be identified by directly running gpt in the command line.
111 | However, by operating gpt through a Python subprocess, it is not possible to see those command line messages.
112 | Only after a process has terminated, all messages can be retrieved and be written to log or error files.
113 |
114 | A simple approach to interpret such a behaviour is to first create a workflow XML file with
115 | :func:`~pyroSAR.snap.util.geocode`'s parameter ``test=True`` (so that only the XML is written but it is not executed):
116 |
117 | .. code-block:: python
118 |
119 | from pyroSAR.snap import geocode
120 | geocode(scene='S1A_IW_GRDH_1SDV_20200720T023849_20200720T023914_033532_03E2B5_2952.zip',
121 | outdir='/test', test=True)
122 |
123 |
124 | and then run gpt on it directly in the shell (i.e. outside of Python):
125 |
126 | ::
127 |
128 | gpt /test/S1A__IW___D_20200720T023849_VV_Orb_ML_TC_proc.xml
129 |
130 | This way one can directly see gpt's status, which in this case might be
131 |
132 | ::
133 |
134 | SEVERE: org.esa.snap.core.dataop.dem.ElevationFile: java.lang.reflect.InvocationTargetException
--------------------------------------------------------------------------------
/docs/source/index.rst:
--------------------------------------------------------------------------------
1 | ###################################
2 | Welcome to pyroSAR's documentation!
3 | ###################################
4 |
5 | General Topics
6 | ==============
7 |
8 | .. toctree::
9 | :maxdepth: 1
10 |
11 | general/installation
12 | general/filenaming
13 | general/configuration
14 | general/OSV
15 | general/DEM
16 | general/snap
17 | general/processing
18 | general/logging
19 |
20 | API Documentation
21 | =================
22 |
23 | .. toctree::
24 | :maxdepth: 1
25 |
26 | api/drivers
27 | api/snap
28 | api/gamma
29 | api/sentinel-1
30 | api/auxdata
31 | api/datacube
32 | api/ancillary
33 | api/examine
34 |
35 | About
36 | =====
37 |
38 | .. toctree::
39 | :maxdepth: 1
40 |
41 | about/projects
42 | about/changelog
43 | about/publications
44 | about/references
45 |
46 | Indices and tables
47 | ==================
48 |
49 | * :ref:`genindex`
50 | * :ref:`modindex`
51 | * :ref:`search`
52 |
--------------------------------------------------------------------------------
/docs/source/references.bib:
--------------------------------------------------------------------------------
1 | % Encoding: UTF-8
2 | @article{Ali2018,
3 | author = {Ali, I. and Cao, S. and Naeimi, V. and Paulik, C. and Wagner, W.},
4 | title = {Methods to Remove the Border Noise From Sentinel-1 Synthetic Aperture Radar Data: Implications and Importance For Time-Series Analysis},
5 | journal = {IEEE Journal of Selected Topics in Applied Earth Observations and Remote Sensing},
6 | volume = {11},
7 | number = {3},
8 | pages = {777-786},
9 | DOI = {10.1109/Jstars.2017.2787650},
10 | year = {2018},
11 | type = {Journal Article}
12 | }
13 |
14 | @techreport{Miranda2018,
15 | author = {Miranda, N. and Hajduch, G.},
16 | title = {Masking "No-value" Pixels on GRD Products generated by the Sentinel-1 ESA IPF},
17 | institution = {CLS},
18 | month = {29 January},
19 | url = {https://sentinel.esa.int/documents/247904/2142675/Sentinel-1-masking-no-value-pixels-grd-products-note},
20 | year = {2018},
21 | type = {Report}
22 | }
23 |
24 | @article{Small2011,
25 | author = {Small, D.},
26 | title = {Flattening Gamma: Radiometric Terrain Correction for SAR Imagery},
27 | journal = {IEEE Transactions on Geoscience and Remote Sensing},
28 | volume = {49},
29 | number = {8},
30 | pages = {3081-3093},
31 | DOI = {10.1109/Tgrs.2011.2120616},
32 | year = {2011},
33 | type = {Journal Article}
34 | }
35 |
36 | @inproceedings{Truckenbrodt2019,
37 | author = {Truckenbrodt, J. and Cremer, F. and Baris, I. and Eberle, J.},
38 | title = {pyroSAR: A Framework for Large-Scale SAR Satellite Data Processing},
39 | booktitle = {Big Data from Space},
40 | editor = {Soille, P. and Loekken, S. and Albani, S.},
41 | address = {Luxembourg},
42 | publisher = {Publications Office of the European Union},
43 | pages = {197-200},
44 | ISBN = {ISBN 978-92-76-00034-1},
45 | DOI = {10.2760/848593},
46 | year = {2019},
47 | type = {Conference Proceedings}
48 | }
49 |
50 | @article{Truckenbrodt2019a,
51 | author = {Truckenbrodt, J. and Freemantle, T. and Williams, C. and Jones, T. and Small, D. and Dubois, C. and Thiel, C. and Rossi, C. and Syriou, A. and Giuliani, G.},
52 | title = {Towards Sentinel-1 SAR Analysis-Ready Data: A Best Practices Assessment on Preparing Backscatter Data for the Cube},
53 | journal = {Data},
54 | volume = {4},
55 | number = {3},
56 | ISSN = {2306-5729},
57 | DOI = {10.3390/data4030093},
58 | year = {2019},
59 | type = {Journal Article}
60 | }
61 |
62 | @article{Visvalingam1993,
63 | author = {Visvalingam, M. and Whyatt, J. D.},
64 | title = {Line Generalization by Repeated Elimination of Points},
65 | journal = {Cartographic Journal},
66 | volume = {30},
67 | number = {1},
68 | pages = {46-51},
69 | ISSN = {0008-7041},
70 | DOI = {10.1179/caj.1993.30.1.46},
71 | year = {1993},
72 | type = {Journal Article}
73 | }
74 |
--------------------------------------------------------------------------------
/environment-dev.yml:
--------------------------------------------------------------------------------
1 | name: ps_test_dev
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - coverage
7 | - pytest
--------------------------------------------------------------------------------
/environment-doc.yml:
--------------------------------------------------------------------------------
1 | name: ps_doc
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - python>=3.8
7 | - matplotlib
8 | - numpy<2.0
9 | - sphinx<7.0 # https://github.com/readthedocs/sphinx_rtd_theme/issues/1463
10 | - sphinxcontrib-bibtex>=2.2
11 | - pip
12 | - cairosvg
13 | - pip:
14 | - sphinxcontrib-svg2pdfconverter
15 |
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | name: ps_test
2 | channels:
3 | - conda-forge
4 | - defaults
5 | dependencies:
6 | - gdal>=2.4
7 | - geoalchemy2<0.14.0
8 | - libgdal
9 | - libspatialite>=5.1.0
10 | - lxml
11 | - numpy<2.0
12 | - packaging
13 | - pillow
14 | - progressbar2
15 | - psycopg2
16 | - python>=3.8
17 | - pyyaml
18 | - requests
19 | - shapely
20 | - spatialist>=0.15.2
21 | - sqlalchemy>=1.4,<2.0
22 | - sqlalchemy-utils>=0.37
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = ["setuptools>=45", "setuptools_scm[toml]>=6.2", "wheel"]
3 |
4 | [project]
5 | name = "pyroSAR"
6 | description = "a framework for large-scale SAR satellite data processing"
7 | requires-python = ">=3.8"
8 | license = { file = "LICENSE.txt" }
9 | maintainers = [
10 | { name = "John Truckenbrodt", email = "john.truckenbrodt@dlr.de" }
11 | ]
12 | classifiers = [
13 | "License :: OSI Approved :: MIT License",
14 | "Operating System :: Microsoft :: Windows",
15 | "Operating System :: POSIX :: Linux",
16 | "Programming Language :: Python :: 3"
17 | ]
18 | dynamic = ["version", "readme", "dependencies"]
19 |
20 | [project.urls]
21 | repository = "https://github.com/johntruckenbrodt/pyroSAR"
22 | documentation = "https://pyrosar.readthedocs.io/en/latest/"
23 |
24 | [project.optional-dependencies]
25 | test = ["pytest"]
26 | docs = ["sphinx", "sphinxcontrib-bibtex", "sphinxcontrib-svg2pdfconverter", "cairosvg"]
27 |
28 | [tool.setuptools.dynamic]
29 | dependencies = { file = ["requirements.txt"] }
30 | readme = { file = ["README.md"], content-type = "text/markdown" }
31 |
32 | [tool.setuptools_scm]
33 |
--------------------------------------------------------------------------------
/pyroSAR/ERS/__init__.py:
--------------------------------------------------------------------------------
1 | from .auxil import passdb_create, passdb_query
2 | from .mapping import get_angles_resolution
--------------------------------------------------------------------------------
/pyroSAR/ERS/auxil.py:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # tools for processing ERS satellite data
3 |
4 | # Copyright (c) 2014-2019, the pyroSAR Developers.
5 |
6 | # This file is part of the pyroSAR Project. It is subject to the
7 | # license terms in the LICENSE.txt file found in the top-level
8 | # directory of this distribution and at
9 | # https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
10 | # No part of the pyroSAR project, including this file, may be
11 | # copied, modified, propagated, or distributed except according
12 | # to the terms contained in the LICENSE.txt file.
13 | ################################################################################
14 | import os
15 | import math
16 | from spatialist import sqlite_setup
17 | from spatialist.ancillary import HiddenPrints
18 | from datetime import datetime, timedelta
19 |
20 | import logging
21 | log = logging.getLogger(__name__)
22 |
23 |
24 | def passdb_create(ers1passes, ers2passes, dbname):
25 | """
26 | create a sqlite database from ERS pass tables
27 | downloaded from http://www.deos.tudelft.nl/ers/phases/starttimes.html.
28 | There you can also find additional information on the file structure and background.
29 | The fields `phase`, `cycle`, `pass`, `starttime` and `endtime` are read from the table.
30 | The latter two are converted to format YYYY-MM-DD HH:MM:SS.SSS.
31 | The fields `cycle` and `pass` are converted to integer.
32 | All five fields plus the name of the sensor (`ERS1` or `ERS2`) are then stored to the database.
33 |
34 | Parameters
35 | ----------
36 | ers1passes: str
37 | the name of the ERS-1 pass table
38 | ers2passes: str
39 | the name of the ERS-2 pass table
40 | dbname: str
41 | the name of the database to write the results to
42 |
43 | Returns
44 | -------
45 |
46 | """
47 | columns = {'satellite': 'TEXT',
48 | 'phase': 'TEXT',
49 | 'cycleNumber': 'INTEGER',
50 | 'passNumber': 'INTEGER',
51 | 'starttime': 'TEXT',
52 | 'endtime': 'TEXT'}
53 |
54 | con = sqlite_setup(driver=dbname)
55 |
56 | create_string = '''CREATE TABLE if not exists data ({})'''.format(
57 | ', '.join([' '.join(x) for x in columns.items()]))
58 | cursor = con.cursor()
59 | cursor.execute(create_string)
60 |
61 | def time_convert(timestring):
62 | dt = datetime(1985, 1, 1) + timedelta(seconds=float(timestring))
63 | return dt.strftime('%Y-%m-%d %H:%M:%S.%f')
64 |
65 | insert_string = '''INSERT INTO data({0}) VALUES({1})''' \
66 | .format(', '.join(columns.keys()),
67 | ', '.join(['?'] * len(columns.keys())))
68 |
69 | for satellite, filename in [('ERS1', ers1passes), ('ERS2', ers2passes)]:
70 | with open(filename, 'r') as table:
71 | for line in table:
72 | phase, cycle, passNumber, starttime, endtime = line.split()[0:5]
73 | insertion = [satellite, phase,
74 | int(cycle), int(passNumber),
75 | time_convert(starttime), time_convert(endtime)]
76 | if satellite == 'ERS1':
77 | log.info(tuple(insertion))
78 | cursor.execute(insert_string, tuple(insertion))
79 | con.commit()
80 | con.close()
81 |
82 |
83 | def passdb_query(satellite, acqtime, dbname=None):
84 | """
85 | query the orbit information for an ERS acquisition
86 |
87 | Parameters
88 | ----------
89 | satellite: {'ERS1', 'ERS2'}
90 | the name of the satellite
91 | acqtime: datetime.datetime
92 | the acquisition of the satellite image
93 | dbname: str, None
94 | the name of the database as created by :func:`passdb_create`. If None, the default database delivered with
95 | pyroSAR is used
96 |
97 | Returns
98 | -------
99 |
100 | """
101 | if satellite == 'ERS1':
102 | # the last timestamp for which specific ERS-1 orbit information is present,
103 | # afterwards that of ERS-2 is used
104 | last = datetime.strptime('1996-06-02 21:59:26.618659', '%Y-%m-%d %H:%M:%S.%f')
105 | sat = 'ERS2' if acqtime > last else 'ERS1'
106 | elif satellite == 'ERS2':
107 | sat = 'ERS2'
108 | else:
109 | raise ValueError("satellite must be either 'ERS1' or 'ERS2', was '{}'".format(satellite))
110 |
111 | if dbname is None:
112 | dbname = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'data', 'erspasses.db')
113 | with HiddenPrints():
114 | con = sqlite_setup(driver=dbname)
115 |
116 | cursor = con.cursor()
117 | acqtime_str = acqtime.strftime('%Y-%m-%d %H:%M:%S.%f')
118 | query = '''SELECT * FROM data WHERE satellite = ? AND starttime <= ? AND endtime >= ?'''
119 | cursor.execute(query, (sat, acqtime_str, acqtime_str))
120 |
121 | fetch = cursor.fetchall()
122 | if len(fetch) == 0:
123 | cursor.execute(query, ('ERS2', acqtime_str, acqtime_str))
124 | fetch = cursor.fetchall()
125 |
126 | result = dict(zip(['satellite', 'phase', 'cycleNumber', 'passNumber'], fetch[0][0:4]))
127 | result['satellite'] = satellite
128 | result['orbitNumber_rel'] = int(math.ceil(result['passNumber'] / 2.))
129 | return result
130 |
--------------------------------------------------------------------------------
/pyroSAR/ERS/data/erspasses.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/pyroSAR/ERS/data/erspasses.db
--------------------------------------------------------------------------------
/pyroSAR/ERS/mapping.py:
--------------------------------------------------------------------------------
1 | ANGLES_RESOLUTION = {
2 | 'ERS1': {
3 | 'IMP': {
4 | 'IS2': {
5 | 'near': 20.1,
6 | 'far': 25.9,
7 | 'range': 25.04,
8 | 'azimuth': 21.51,
9 | 'nesz_near': 26.8,
10 | 'nesz_far': 26
11 | },
12 | 'std_dev': 20
13 | },
14 | 'IMS': {
15 | 'IS2': {
16 | 'near': 20.1,
17 | 'far': 25.9,
18 | 'range': 5.32,
19 | 'azimuth': 9.66,
20 | 'nesz_near': 26.8,
21 | 'nesz_far': 26
22 | },
23 | 'std_dev': 20
24 | }
25 | },
26 | 'ERS2': {
27 | 'IMP': {
28 | 'IS2': {
29 | 'near': 20.1,
30 | 'far': 25.9,
31 | 'range': 21.63,
32 | 'azimuth': 25.19,
33 | 'nesz_near': 23.1,
34 | 'nesz_far': 21.5
35 | },
36 | 'std_dev': 20
37 | },
38 | 'IMS': {
39 | 'IS2': {
40 | 'near': 20.1,
41 | 'far': 25.9,
42 | 'range': 5.33,
43 | 'azimuth': 9.83,
44 | 'nesz_near': 23.1,
45 | 'nesz_far': 21.5
46 | },
47 | 'std_dev': 20
48 | }
49 | },
50 | 'ASAR': {
51 | 'IMP': {
52 | 'IS1': {
53 | 'near': 14.7,
54 | 'far': 22.2,
55 | 'range': 30.86,
56 | 'azimuth': 22.14,
57 | 'nesz_near': 25.1,
58 | 'nesz_far': 19.2
59 | },
60 | 'IS2': {
61 | 'near': 19.2,
62 | 'far': 26.1,
63 | 'range': 24.90,
64 | 'azimuth': 22.14,
65 | 'nesz_near': 21.8,
66 | 'nesz_far': 20.5
67 | },
68 | 'IS3': {
69 | 'near': 25.7,
70 | 'far': 31.1,
71 | 'range': 24.84,
72 | 'azimuth': 22.14,
73 | 'nesz_near': 22.6,
74 | 'nesz_far': 20.5
75 | },
76 | 'IS4': {
77 | 'near': 30.7,
78 | 'far': 36.1,
79 | 'range': 25.56,
80 | 'azimuth': 22.14,
81 | 'nesz_near': 22.3,
82 | 'nesz_far': 19.1
83 | },
84 | 'IS5': {
85 | 'near': 35.7,
86 | 'far': 39.2,
87 | 'range': 25.73,
88 | 'azimuth': 22.14,
89 | 'nesz_near': 21.4,
90 | 'nesz_far': 19
91 | },
92 | 'IS6': {
93 | 'near': 38.8,
94 | 'far': 42.7,
95 | 'range': 26.15,
96 | 'azimuth': 22.14,
97 | 'nesz_near': 24,
98 | 'nesz_far': 21.2
99 | },
100 | 'IS7': {
101 | 'near': 42.4,
102 | 'far': 45.1,
103 | 'range': 26.59,
104 | 'azimuth': 22.14,
105 | 'nesz_near': 23,
106 | 'nesz_far': 20.4
107 | },
108 | 'std_dev': 5
109 | },
110 | 'IMS': {
111 | 'IS1': {
112 | 'near': 14.7,
113 | 'far': 22.2,
114 | 'range': 5.77,
115 | 'azimuth': 8.43,
116 | 'nesz_near': 25.1,
117 | 'nesz_far': 19.2
118 | },
119 | 'IS2': {
120 | 'near': 19.2,
121 | 'far': 26.1,
122 | 'range': 5.77,
123 | 'azimuth': 8.43,
124 | 'nesz_near': 21.8,
125 | 'nesz_far': 20.5
126 | },
127 | 'IS3': {
128 | 'near': 25.7,
129 | 'far': 31.1,
130 | 'range': 5.77,
131 | 'azimuth': 8.43,
132 | 'nesz_near': 22.6,
133 | 'nesz_far': 20.5
134 | },
135 | 'IS4': {
136 | 'near': 30.7,
137 | 'far': 36.1,
138 | 'range': 5.77,
139 | 'azimuth': 8.43,
140 | 'nesz_near': 22.3,
141 | 'nesz_far': 19.1
142 | },
143 | 'IS5': {
144 | 'near': 35.7,
145 | 'far': 39.2,
146 | 'range': 5.77,
147 | 'azimuth': 8.43,
148 | 'nesz_near': 21.4,
149 | 'nesz_far': 19
150 | },
151 | 'IS6': {
152 | 'near': 38.8,
153 | 'far': 42.7,
154 | 'range': 5.77,
155 | 'azimuth': 8.43,
156 | 'nesz_near': 24,
157 | 'nesz_far': 21.2
158 | },
159 | 'IS7': {
160 | 'near': 42.4,
161 | 'far': 45.1,
162 | 'range': 5.77,
163 | 'azimuth': 8.43,
164 | 'nesz_near': 23,
165 | 'nesz_far': 20.4
166 | },
167 | 'std_dev': 5
168 | },
169 | 'APP': {
170 | 'IS1': {
171 | 'near': 14.36,
172 | 'near-new': 14.36,
173 | 'far': 22.32,
174 | 'far-new': 22.32,
175 | 'range': 31.22,
176 | 'range-new': 31.22,
177 | 'azimuth': 27.45,
178 | 'nesz_near': 25.1,
179 | 'nesz_far': 19.2
180 | },
181 | 'IS2': {
182 | 'near': 18.68,
183 | 'near-new': 20.3,
184 | 'far': 26.1,
185 | 'far-new': 26.22,
186 | 'range': 25.23,
187 | 'range-new': 24.10,
188 | 'azimuth': 27.45,
189 | 'nesz_near': 21.8,
190 | 'nesz_far': 20.5
191 | },
192 | 'IS3': {
193 | 'near': 25.78,
194 | 'near-new': 26.73,
195 | 'far': 31.27,
196 | 'far-new': 31.27,
197 | 'range': 24.74,
198 | 'range-new': 24.30,
199 | 'azimuth': 27.45,
200 | 'nesz_near': 22.6,
201 | 'nesz_far': 20.5
202 | },
203 | 'IS4': {
204 | 'near': 30.89,
205 | 'near-new': 31.28,
206 | 'far': 36.2,
207 | 'far-new': 36.2,
208 | 'range': 25.46,
209 | 'range-new': 25.30,
210 | 'azimuth': 27.45,
211 | 'nesz_near': 22.3,
212 | 'nesz_far': 19.1
213 | },
214 | 'IS5': {
215 | 'near': 35.68,
216 | 'near-new': 36.81,
217 | 'far': 39.35,
218 | 'far-new': 39.35,
219 | 'range': 25.70,
220 | 'range-new': 25.35,
221 | 'azimuth': 27.45,
222 | 'nesz_near': 21.4,
223 | 'nesz_far': 19
224 | },
225 | 'IS6': {
226 | 'near': 39.02,
227 | 'near-new': 39.61,
228 | 'far': 42.76,
229 | 'far-new': 42.76,
230 | 'range': 26.07,
231 | 'range-new': 25.90,
232 | 'azimuth': 27.45,
233 | 'nesz_near': 24,
234 | 'nesz_far': 21.2
235 | },
236 | 'IS7': {
237 | 'near': 42.48,
238 | 'near-new': 43.3,
239 | 'far': 45.27,
240 | 'far-new': 45.27,
241 | 'range': 26.53,
242 | 'range-new': 26.32,
243 | 'azimuth': 27.45,
244 | 'nesz_near': 23,
245 | 'nesz_far': 20.4
246 | },
247 | 'std_dev': 10
248 | },
249 | 'APS': {
250 | 'IS1': {
251 | 'near': 14.36,
252 | 'far': 22.32,
253 | 'range': 4.3,
254 | 'azimuth': 8.39,
255 | 'nesz_near': 25.1,
256 | 'nesz_far': 19.2
257 | },
258 | 'IS2': {
259 | 'near': 18.68,
260 | 'far': 26.1,
261 | 'range': 4.3,
262 | 'azimuth': 8.39,
263 | 'nesz_near': 21.8,
264 | 'nesz_far': 20.5
265 | },
266 | 'IS3': {
267 | 'near': 25.78,
268 | 'far': 31.27,
269 | 'range': 4.3,
270 | 'azimuth': 8.39,
271 | 'nesz_near': 22.6,
272 | 'nesz_far': 20.5
273 | },
274 | 'IS4': {
275 | 'near': 30.89,
276 | 'far': 36.2,
277 | 'range': 4.3,
278 | 'azimuth': 8.39,
279 | 'nesz_near': 22.3,
280 | 'nesz_far': 19.1
281 | },
282 | 'IS5': {
283 | 'near': 35.68,
284 | 'far': 39.35,
285 | 'range': 4.3,
286 | 'azimuth': 8.39,
287 | 'nesz_near': 21.4,
288 | 'nesz_far': 19
289 | },
290 | 'IS6': {
291 | 'near': 39.02,
292 | 'far': 42.76,
293 | 'range': 4.3,
294 | 'azimuth': 8.39,
295 | 'nesz_near': 24,
296 | 'nesz_far': 21.2
297 | },
298 | 'IS7': {
299 | 'near': 42.48,
300 | 'far': 45.27,
301 | 'range': 4.3,
302 | 'azimuth': 8.39,
303 | 'nesz_near': 23,
304 | 'nesz_far': 20.4
305 | },
306 | 'std_dev': 10
307 | },
308 | 'WSM': {
309 | 'WS': {
310 | 'near': 14.21,
311 | 'far': 42.63,
312 | 'range': 150,
313 | 'azimuth': 150,
314 | 'nesz_near': 19.5,
315 | 'nesz_far': 23.5
316 | },
317 | 'std_dev': 20
318 | }
319 | }
320 | }
321 |
322 |
323 | def get_angles_resolution(sensor, mode, swath_id, date):
324 | string_new = ''
325 | if mode == 'APP' and date > '20090528':
326 | string_new = '-new'
327 | data = ANGLES_RESOLUTION[sensor][mode][swath_id]
328 | return (data[f'near{string_new}'], data[f'far{string_new}'],
329 | data[f'range{string_new}'], data['azimuth'],
330 | data['nesz_near'], data['nesz_far'])
331 |
--------------------------------------------------------------------------------
/pyroSAR/S1/__init__.py:
--------------------------------------------------------------------------------
1 | __author__ = 'john'
2 |
3 | from .auxil import OSV, removeGRDBorderNoise
4 |
--------------------------------------------------------------------------------
/pyroSAR/S1/linesimplify.py:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Utilities for simplification of lines used by pyroSAR for border noise removal
3 |
4 | # Copyright (c) 2017-2020, the pyroSAR Developers.
5 |
6 | # This file is part of the pyroSAR Project. It is subject to the
7 | # license terms in the LICENSE.txt file found in the top-level
8 | # directory of this distribution and at
9 | # https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
10 | # No part of the pyroSAR project, including this file, may be
11 | # copied, modified, propagated, or distributed except according
12 | # to the terms contained in the LICENSE.txt file.
13 | ###############################################################################
14 |
15 | from osgeo import ogr
16 | import numpy as np
17 | from spatialist.ancillary import rescale
18 | from .polysimplify import VWSimplifier
19 |
20 |
21 | import matplotlib
22 | import matplotlib.pyplot as plt
23 | from matplotlib.patches import Polygon
24 | from matplotlib.collections import PatchCollection
25 | matplotlib.rcParams['font.size'] = 12
26 |
27 |
28 | def simplify(x, y, maxpoints=20):
29 | x = list(map(float, x))
30 | y = list(map(float, y))
31 | pts = np.array(list(zip(x, y)))
32 | simplifier = VWSimplifier(pts)
33 | sqd = []
34 | iter_range = range(2, maxpoints + 1)
35 | for i in iter_range:
36 | VWpts = simplifier.from_number(i)
37 | xn, yn = zip(*VWpts)
38 | out = np.sum((y - np.interp(x, xn, yn)) ** 2)
39 | sqd.append(out)
40 | # sqd /= max(sqd)
41 | if min(sqd) == max(sqd):
42 | VWpts = simplifier.from_number(2)
43 | return VWpts
44 | else:
45 | sqd = rescale(sqd)
46 | # plt.plot(sqd)
47 | # plt.show()
48 | # iter = (np.array(iter_range) - 2) / (maxpoints - 2.)
49 | # plt.plot(iter_range, sqd, label='residual')
50 | # plt.plot(iter_range, iter, color='r', label='iteration')
51 | # plt.plot(iter_range, iter + sqd, color='g', label='residual+iteration')
52 | # plt.legend(loc='upper center', shadow=True)
53 | # plt.show()
54 | # npoints = np.argmin(iter + sqd) + 2
55 | npoints = np.argmax(np.array(sqd) < 0.01) + 2
56 | VWpts = simplifier.from_number(npoints)
57 | return VWpts
58 |
59 |
60 | def createPoly(xn, yn, xmax, ymax, plot=False):
61 | """
62 | create an OGR geometry from a sequence of indices
63 |
64 | Parameters
65 | ----------
66 | xn: numpy.ndarray
67 | the x indices of the points
68 | yn: numpy.ndarray
69 | the y indices of the points
70 | xmax: int or float
71 | the maximum x index value
72 | ymax: int or float
73 | the maximum y index value
74 |
75 | Returns
76 | -------
77 | osgeo.ogr.Geometry
78 | """
79 | ring = ogr.Geometry(ogr.wkbLinearRing)
80 | ring.AddPoint_2D(0, 0)
81 | for item in zip(xn, yn):
82 | item = list(map(int, item))
83 | if item != [0, 0] and item != [xmax, ymax]:
84 | ring.AddPoint_2D(item[0], item[1])
85 | ring.AddPoint_2D(xmax, ymax)
86 | ring.AddPoint_2D(xmax, 0)
87 | ring.CloseRings()
88 | poly = ogr.Geometry(ogr.wkbPolygon)
89 | poly.AddGeometry(ring)
90 | if plot:
91 | fig, ax = plt.subplots()
92 | pts = ring.GetPoints()
93 | arr = np.array(pts)
94 | polygon = Polygon(arr, True)
95 | p = PatchCollection([polygon], cmap=matplotlib.cm.jet, alpha=0.4)
96 | ax.add_collection(p)
97 | ax.autoscale_view()
98 | plt.scatter(arr[:, 0], arr[:, 1], s=10, color='red')
99 | plt.show()
100 | return poly
101 |
102 |
103 | def reduce(seq, maxpoints=20, straighten=False, plot=False):
104 | """
105 | reduce the complexity of a line; the following steps are performed:
106 | - simplify the line using the Visvalingam-Whyatt method
107 | - iteratively add points on the original line back to the simplified line
108 | until the polygon spanned by the simplified line and (xmin, ymin) does not
109 | contain any further points of the original line; the polygon area is
110 | expected to only cover valid pixels of the image
111 | - optionally further straighten the result for smoother edges
112 |
113 | Parameters
114 | ----------
115 | seq: numpy.ndarray
116 | the 1D line sequence to be simplified
117 | maxpoints: int
118 | the maximum number points in the simplified sequence
119 | straighten: bool
120 | perform additional straightening on the simplified line?
121 | plot: bool
122 | plot the results?
123 |
124 | Returns
125 | -------
126 | numpy.ndarray
127 | the simplified line sequence
128 | """
129 | if min(seq) == max(seq):
130 | return np.array(seq)
131 | x = list(range(0, len(seq)))
132 | if plot:
133 | plt.plot(seq, label='ESA-corrected')
134 | # simplify the sequence using the Visvalingam-Whyatt algorithm
135 | VWpts = simplify(x, seq, maxpoints)
136 | xn, yn = [list(x) for x in zip(*VWpts)]
137 | if plot:
138 | plt.plot(xn, yn, linewidth=2, color='r', label='VW-simplified')
139 | simple = np.interp(x, xn, yn)
140 | # create a list of OGR points for the original border
141 | points = []
142 | for xi, yi in enumerate(seq):
143 | point = ogr.Geometry(ogr.wkbPoint)
144 | point.AddPoint(int(xi), int(yi))
145 | points.append(point)
146 | points = np.array(points)
147 | while True:
148 | # create a polygon containing all pixels inside the simplified border
149 | # i.e., containing the area considered valid
150 | poly = createPoly(xn, yn, seq.size, int(max(seq)))
151 | # create an OGR line from the simplified border points
152 | line = ogr.Geometry(ogr.wkbLineString)
153 | for xi, yi in zip(xn, yn):
154 | line.AddPoint(xi, yi)
155 | # compute the distance of each original point to the simplified line
156 | dists = np.array([line.Distance(point) for point in points])
157 | # check which points are inside of the polygon
158 | contain = np.array([point.Within(poly) for point in points])
159 | # remove points outside the polygon and stop if
160 | # no further points outside the polygon exist
161 | dists[~contain] = 0
162 | points = points[(dists > 0)]
163 | dists = dists[(dists > 0)]
164 | if len(dists) == 0:
165 | break
166 | # select the point with the largest distance to the simplified
167 | # line and add it to the list of simplified points
168 | # this reduces the size of the polygon an thus the area considered valid
169 | candidate = points[np.argmax(dists)]
170 | cp = candidate.GetPoint()
171 | index = np.argmin(np.array(xn) < cp[0])
172 | xn.insert(index, cp[0])
173 | yn.insert(index, cp[1])
174 | if plot:
175 | plt.plot(xn, yn, linewidth=2, color='limegreen', label='corrected')
176 |
177 | # further straighten the line segments
178 | # def straight(xn, yn, VWpts):
179 | # indices = [i for i in range(0, len(xn)) if (xn[i], yn[i]) in VWpts]
180 | # log.info(indices)
181 | # for i, j in enumerate(indices):
182 | # if i < (len(indices) - 1):
183 | # if indices[i + 1] > j + 1:
184 | # dx = abs(xn[j] - xn[indices[i + 1]])
185 | # dy = abs(yn[j] - yn[indices[i + 1]])
186 | # if dx > dy:
187 | # seg_y = yn[j:indices[i + 1] + 1]
188 | # for k in range(j, indices[i + 1] + 1):
189 | # yn[k] = min(seg_y)
190 | # return yn
191 |
192 | def straight(xn, yn, VWpts):
193 | indices = [i for i in range(0, len(xn)) if (xn[i], yn[i]) in VWpts]
194 | xn_new = []
195 | yn_new = []
196 | # make all line segments horizontal or vertical
197 | for index in range(len(indices) - 1):
198 | i = indices[index]
199 | j = indices[index + 1]
200 | ymin = min(yn[i:j + 1])
201 | xn_new.extend([xn[i], xn[j]])
202 | yn_new.extend([ymin, ymin])
203 | # shift horizontal lines down if the preceding horizontal line has a lower y value
204 | # but only if the shift is less than the tolerance
205 | tolerance = 15
206 | for i in range(len(xn_new) - 2):
207 | if yn_new[i] == yn_new[i + 1]:
208 | if yn_new[i] < yn_new[i + 2] and abs(yn_new[i] - yn_new[i + 2]) < tolerance:
209 | yn_new[i + 2] = yn_new[i]
210 | yn_new[i + 3] = yn_new[i]
211 | elif (yn_new[i] > yn_new[i + 2]) \
212 | and (yn_new[i + 2] == yn_new[i + 3]) \
213 | and abs(yn_new[i] - yn_new[i + 2]) < tolerance:
214 | yn_new[i] = yn_new[i + 2]
215 | yn_new[i + 1] = yn_new[i + 2]
216 | return xn_new, yn_new
217 |
218 | if straighten:
219 | xn, yn = straight(xn, yn, VWpts)
220 | if plot:
221 | plt.plot(xn, yn, linewidth=2, color='m', label='straightened')
222 | if plot:
223 | plt.legend()
224 | plt.xlabel('row')
225 | plt.ylabel('column')
226 | plt.show()
227 | return np.interp(x, xn, yn).astype(int)
228 |
--------------------------------------------------------------------------------
/pyroSAR/__init__.py:
--------------------------------------------------------------------------------
1 | from .drivers import *
2 | from . import ancillary, drivers
3 |
4 | from importlib.metadata import version, PackageNotFoundError
5 |
6 | try:
7 | __version__ = version(__name__)
8 | except PackageNotFoundError:
9 | # package is not installed
10 | pass
11 |
--------------------------------------------------------------------------------
/pyroSAR/config.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | ###############################################################################
3 | # pyroSAR configuration handling
4 |
5 | # Copyright (c) 2018-2024, the pyroSAR Developers.
6 |
7 | # This file is part of the pyroSAR Project. It is subject to the
8 | # license terms in the LICENSE.txt file found in the top-level
9 | # directory of this distribution and at
10 | # https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
11 | # No part of the pyroSAR project, including this file, may be
12 | # copied, modified, propagated, or distributed except according
13 | # to the terms contained in the LICENSE.txt file.
14 | ###############################################################################
15 | import os
16 | import json
17 |
18 | import configparser as ConfigParser
19 |
20 | __LOCAL__ = ['acquisition_mode', 'coordinates', 'cycleNumber', 'frameNumber',
21 | 'lines', 'orbit', 'orbitNumber_abs', 'orbitNumber_rel',
22 | 'polarizations', 'product', 'projection', 'samples',
23 | 'sensor', 'spacing', 'start', 'stop']
24 |
25 |
26 | class Singleton(type):
27 | """
28 | Define an Instance operation that lets clients access its unique instance.
29 | https://sourcemaking.com/design_patterns/singleton/python/1
30 | """
31 |
32 | def __init__(cls, name, bases, attrs, **kwargs):
33 | super().__init__(name, bases, attrs)
34 | cls._instance = None
35 |
36 | def __call__(cls, *args, **kwargs):
37 | if cls._instance is None:
38 | cls._instance = super().__call__(*args, **kwargs)
39 | return cls._instance
40 |
41 |
42 | class ConfigHandler(metaclass=Singleton):
43 | """
44 | ConfigHandler is a configuration handler for pyroSAR. It is intended to be called by a class's '__init__' and
45 | set or get the configuration parameters throughout an entire package.
46 | The primary goal with ConfigHandler is to load a single, consistent configuration environment to be passed
47 | amongst ALL objects within a package.
48 |
49 | ConfigHandler is a SINGLETON, meaning once instantiated, THE SAME OBJECT
50 | will be returned to every class object calling it.
51 |
52 | Parameters
53 | ----------
54 | path : str or None
55 | A path where the .pyrosar directory will be created. If None (default) it will be created in the user home
56 | directory.
57 | config_fname : str
58 | Name of the config file. Default is 'config.ini'.
59 |
60 | Methods
61 | -------
62 | make_dir : Create a .pyrosar directory in home directory.
63 | create_config : Create a config.ini file in .pyrosar directory.
64 | open : Open the config.ini file.
65 | add_section : Create a new section in the configuration.
66 | set : Set an option in the configuration.
67 | remove_option : Remove an option in the configuration.
68 |
69 | Notes
70 | -----
71 | The syntax is the same as in ConfigParser. Here, keys are called options.
72 |
73 | """
74 |
75 | # Define __setter to control changeable keys (optional)
76 | # __setter = ["etc", "auxdata"]
77 |
78 | def __init__(self):
79 | path = os.path.join(os.path.expanduser('~'), '.pyrosar')
80 |
81 | self.__GLOBAL = {
82 | 'path': path,
83 | 'config_fname': 'config.ini',
84 | 'config': os.path.join(path, 'config.ini'),
85 | }
86 |
87 | if not os.path.isfile(self.__GLOBAL['config']):
88 | self.__create_config()
89 |
90 | self.parser = ConfigParser.RawConfigParser(allow_no_value=True)
91 | self.parser.optionxform = str
92 | self.parser.read(self.__GLOBAL['config'])
93 |
94 | def __create_config(self):
95 | """
96 | Create a config.ini file in .pyrosar directory.
97 |
98 | Returns
99 | -------
100 | None
101 | """
102 |
103 | if not os.path.exists(self.__GLOBAL['path']):
104 | os.makedirs(self.__GLOBAL['path'])
105 |
106 | with open(self.__GLOBAL['config'], 'w'):
107 | pass
108 |
109 | def __str__(self):
110 | items = []
111 | for section in self.parser.sections():
112 | items.append(' Section: {0}\n'.format(section))
113 |
114 | for options in self.parser.options(section):
115 | items.append(' x {0} :: {1} :: {2}\n'
116 | .format(options,
117 | self.parser.get(section, options),
118 | str(type(options))))
119 | out = f'Class : {self.__class__.__name__}\n' \
120 | f'Path : {self.__GLOBAL["config"]}\n' \
121 | f'Sections : {len(self.parser.sections())}\n' \
122 | f'Contents : \n{"".join(items)}'
123 |
124 | return out
125 |
126 | def __getitem__(self, section):
127 | if not self.parser.has_section(section):
128 | raise AttributeError('Section {0} does not exist.'.format(str(section)))
129 | return dict(self.parser.items(section))
130 |
131 | @property
132 | def sections(self):
133 | return self.parser.sections()
134 |
135 | def keys(self, section):
136 | """
137 | Get all keys (options) of a section.
138 |
139 | Parameters
140 | ----------
141 | section : str
142 | Section name.
143 |
144 | Returns
145 | -------
146 | list : options (keys) of a section.
147 |
148 | """
149 | return self.parser.options(section)
150 |
151 | def open(self):
152 | """
153 | Open the config.ini file. This method will open the config.ini
154 | file in an external standard app (text editor).
155 |
156 | Returns
157 | -------
158 | os.startfile
159 |
160 | """
161 |
162 | os.startfile(self.__GLOBAL['config'])
163 |
164 | def add_section(self, section):
165 | """
166 | Create a new section in the configuration.
167 |
168 | Parameters
169 | ----------
170 | section : str
171 | Section name
172 |
173 | Returns
174 | -------
175 | None
176 |
177 | """
178 | if not self.parser.has_section(section):
179 | self.parser.add_section(section)
180 | self.write()
181 | else:
182 | raise RuntimeError('section already exists')
183 |
184 | @property
185 | def file(self):
186 | return self.__GLOBAL['config']
187 |
188 | def set(self, section, key, value, overwrite=False):
189 | """
190 | Set an option.
191 |
192 | Parameters
193 | ----------
194 | section : str
195 | Section name.
196 | key : str
197 | the attribute name
198 | value :
199 | the attribute value
200 | overwrite : bool
201 | If True and the defined key exists the value will be overwritten.
202 |
203 | Returns
204 | -------
205 |
206 | """
207 | if not self.parser.has_section(section):
208 | raise AttributeError('Section {0} does not exist.'.format(str(section)))
209 |
210 | if isinstance(value, list):
211 | value = json.dumps(value)
212 |
213 | if key in self.parser.options(section) and not overwrite:
214 | raise RuntimeError('Value already exists.')
215 |
216 | self.parser.set(section, key, value)
217 | self.write()
218 |
219 | def remove_option(self, section, key):
220 | """
221 | Remove an option and key.
222 |
223 | Parameters
224 | ----------
225 | section : str
226 | Section name.
227 | key : str
228 | Key value.
229 |
230 | Returns
231 | -------
232 |
233 | """
234 | if not self.parser.has_section(section):
235 | raise AttributeError('Section {0} does not exist.'.format(str(section)))
236 |
237 | if key not in self.parser.options(section):
238 | raise AttributeError('Key {0} does not exist.'.format(str(key)))
239 |
240 | self.parser.remove_option(section, key)
241 | self.write()
242 |
243 | def remove_section(self, section):
244 | """
245 | remove a section
246 |
247 | Parameters
248 | ----------
249 | section: str
250 | Section name.
251 |
252 | Returns
253 | -------
254 |
255 | """
256 | self.parser.remove_section(section)
257 | self.write()
258 |
259 | def write(self):
260 | with open(self.__GLOBAL['config'], 'w', encoding='utf8') as out:
261 | self.parser.write(out)
262 |
--------------------------------------------------------------------------------
/pyroSAR/gamma/__init__.py:
--------------------------------------------------------------------------------
1 | from .auxil import process, ISPPar, UTM, Spacing, Namespace, slc_corners, par2hdr
2 | from .util import geocode, multilook, ovs, convert2gamma, calibrate, correctOSV, S1_deburst
3 | from . import dem
4 |
--------------------------------------------------------------------------------
/pyroSAR/gamma/api.py:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # import wrapper for the pyroSAR GAMMA API
3 |
4 | # Copyright (c) 2018-2019, the pyroSAR Developers.
5 |
6 | # This file is part of the pyroSAR Project. It is subject to the
7 | # license terms in the LICENSE.txt file found in the top-level
8 | # directory of this distribution and at
9 | # https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
10 | # No part of the pyroSAR project, including this file, may be
11 | # copied, modified, propagated, or distributed except according
12 | # to the terms contained in the LICENSE.txt file.
13 | ###############################################################################
14 | import os
15 | import sys
16 | import warnings
17 |
18 | from .parser import autoparse
19 |
20 | try:
21 | autoparse()
22 |
23 | sys.path.insert(0, os.path.join(os.path.expanduser('~'), '.pyrosar'))
24 |
25 | try:
26 | from gammaparse import *
27 | except ImportError:
28 | warnings.warn('found a GAMMA installation directory, but module parsing failed')
29 |
30 | except RuntimeError:
31 | warnings.warn('could not find GAMMA installation directory; please set the GAMMA_HOME environment variable')
32 |
--------------------------------------------------------------------------------
/pyroSAR/gamma/error.py:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # interface for translating GAMMA errors messages into Python error types
3 |
4 | # Copyright (c) 2015-2019, the pyroSAR Developers.
5 |
6 | # This file is part of the pyroSAR Project. It is subject to the
7 | # license terms in the LICENSE.txt file found in the top-level
8 | # directory of this distribution and at
9 | # https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
10 | # No part of the pyroSAR project, including this file, may be
11 | # copied, modified, propagated, or distributed except according
12 | # to the terms contained in the LICENSE.txt file.
13 | ###############################################################################
14 |
15 | import re
16 |
17 |
18 | def gammaErrorHandler(out, err):
19 | """
20 | Function to raise errors in Python. This function is not intended for direct use, but as part of function gamma.util.process
21 | Args:
22 | out: the stdout message returned by a subprocess call of a gamma command
23 | err: the stderr message returned by a subprocess call of a gamma command
24 |
25 | Raises: IOError | ValueError | RuntimeError | None
26 |
27 | """
28 |
29 | # scan stdout and stdin messages for lines starting with 'ERROR'
30 | messages = out.split('\n') if out else []
31 | messages.extend(err.strip().split('\n'))
32 | errormessages = [x for x in messages if x.startswith('ERROR')]
33 |
34 | # registry of known gamma error messages and corresponding Python error types
35 | # do not change the Python error types of specific messages! This will change the behavior of several functions
36 | # in case no error is to be thrown define None as error type
37 | knownErrors = {'image data formats differ': IOError,
38 | 'cannot open': IOError,
39 | r'no coverage of SAR image by DEM(?: \(in (?:latitude/northing|longitude/easting)\)|)': RuntimeError,
40 | 'libgdal.so.1: no version information available': None,
41 | 'line outside of image': ValueError,
42 | 'no offsets found above SNR threshold': ValueError,
43 | 'window size < 4': ValueError,
44 | 'MLI oversampling factor must be 1, 2, 4, 8': ValueError,
45 | 'no points available for determining average intensity': ValueError,
46 | 'p_interp(): time outside of range': RuntimeError,
47 | 'no overlap with lookup table': RuntimeError,
48 | 'insufficient offset points to determine offset model parameters': RuntimeError,
49 | 'insufficient offset points left after culling to determine offset model parameters': RuntimeError,
50 | 'calloc_1d: number of elements <= 0': ValueError,
51 | 'multi-look output line:': RuntimeError,
52 | 'no OPOD state vector found with the required start time!': RuntimeError,
53 | 'gc_map operates only with slant range geometry, image geometry in SLC_par: GROUND_RANGE': RuntimeError,
54 | 'OPOD state vector data ends before start of the state vector time window': RuntimeError,
55 | 'non-zero exit status': RuntimeError,
56 | 'unsupported DEM projection': RuntimeError,
57 | 'tiffWriteProc:No space left on device': RuntimeError,
58 | 'in subroutine julday: there is no year zero!': RuntimeError,
59 | 'cannot create ISP image parameter file': OSError}
60 |
61 | # check if the error message is known and throw the mapped error from knownErrors accordingly.
62 | # Otherwise throw an GammaUnknownError.
63 | # The actual message is passed to the error and thus visible for backtracing
64 | if len(errormessages) > 0:
65 | errormessage = errormessages[-1]
66 | err_out = '\n\n'.join([re.sub('ERROR[: ]*', '', x) for x in errormessages])
67 | for error in knownErrors:
68 | if re.search(error, errormessage):
69 | errortype = knownErrors[error]
70 | if errortype:
71 | raise errortype(err_out)
72 | else:
73 | return
74 | raise GammaUnknownError(err_out)
75 |
76 |
77 | class GammaUnknownError(Exception):
78 | """
79 | This is a general error, which is raised if the error message is not yet integrated
80 | into the known errors of function gammaErrorHandler.
81 | If this error occurs the message should be included in function gammaErrorHandler.
82 | """
83 |
84 | def __init__(self, errormessage):
85 | Exception.__init__(self, errormessage)
86 |
--------------------------------------------------------------------------------
/pyroSAR/install/download_egm96_15.gtx.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 | # download EGM96 geoid model to convert heights with GDAL
3 | cd /usr/share/proj
4 | sudo wget https://download.osgeo.org/proj/vdatum/egm96_15/egm96_15.gtx
5 | sudo chmod 644 egm96_15.gtx
6 |
--------------------------------------------------------------------------------
/pyroSAR/install/download_testdata.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env bash
2 |
3 |
4 | mkdir -p $TESTDATA_DIR
5 |
6 | #cd $TESTDATA_DIR
7 |
8 | echo "Start Download forest_brazil"
9 | wget --quiet -P $TESTDATA_DIR 'ftp://ftp.eorc.jaxa.jp/pub/ALOS-2/1501sample/310_forestbrazil/0000022708_001001_ALOS2015976960-140909.zip'
10 | echo "End download forest_brazil"
11 |
--------------------------------------------------------------------------------
/pyroSAR/install/install_deps.sh:
--------------------------------------------------------------------------------
1 | #!/usr/bin bash
2 | ##############################################################
3 | # manual installation of pyroSAR dependencies
4 | # GDAL, GEOS, PROJ, SpatiaLite
5 | # John Truckenbrodt, Rhys Kidd 2017-2019
6 | ##############################################################
7 |
8 |
9 | # define a root directory for downloading packages
10 | root=$HOME/test
11 |
12 | # define a directory for download and unpacked packages
13 | downloaddir=${root}/originals
14 | packagedir=${root}/packages
15 |
16 | # define the installation directory; This needs to be outside of the root directory so that the latter can be deleted in the end.
17 | # In case installdir is set to a location outside of /usr/*, the following installation commands do not need to be run with
18 | # administration rights (sudo)
19 | #installdir=/usr/local
20 | installdir=$HOME/local
21 |
22 | # the version of GDAL and its dependencies
23 | GDALVERSION=3.0.1
24 |
25 | # these versions are not quite as important. If you use already installed them you might need to define their location
26 | # for the configuration of GDAL
27 | geos_version=3.7.2
28 | proj_version=6.1.1
29 | spatialite_version=4.3.0
30 |
31 | # define the number of threads for compilation
32 | threads=2
33 | ########################################################################################################################
34 | # setup environment variables and create directories
35 |
36 | if [[ -d "${root}" ]]; then
37 | if [[ "$(ls -A ${root})" ]]; then
38 | echo "Error! root already exists. Please choose a fresh directory which can be deleted once finished" 1>&2
39 | #exit 64
40 | fi
41 | fi
42 |
43 | export PATH=${installdir}/bin:$PATH
44 | export LD_LIBRARY_PATH=${installdir}/lib:$LD_LIBRARY_PATH
45 |
46 |
47 | for dir in ${root} ${downloaddir} ${packagedir} ${installdir}; do
48 | mkdir -p ${dir}
49 | done
50 | ########################################################################################################################
51 | # download GDAL and its dependencies
52 |
53 | declare -a remotes=(
54 | "https://download.osgeo.org/gdal/$GDALVERSION/gdal-$GDALVERSION.tar.gz"
55 | "https://download.osgeo.org/geos/geos-$geos_version.tar.bz2"
56 | "https://download.osgeo.org/proj/proj-$proj_version.tar.gz"
57 | "https://www.gaia-gis.it/gaia-sins/libspatialite-sources/libspatialite-$spatialite_version.tar.gz"
58 | )
59 |
60 | for package in "${remotes[@]}"; do
61 | wget ${package} -nc -P ${downloaddir}
62 | done
63 | ########################################################################################################################
64 | # unpack downloaded archives
65 |
66 | for package in ${downloaddir}/*tar.gz; do
67 | tar xfvz ${package} -C ${packagedir}
68 | done
69 | for package in ${downloaddir}/*tar.bz2; do
70 | tar xfvj ${package} -C ${packagedir}
71 | done
72 | ########################################################################################################################
73 | # install GEOS
74 |
75 | cd ${packagedir}/geos*
76 | ./configure --prefix ${installdir}
77 | make -j${threads}
78 | sudo make install
79 | ########################################################################################################################
80 | # install PROJ
81 |
82 | cd ${packagedir}/proj*
83 | ./configure --prefix ${installdir}
84 | make -j${threads}
85 | sudo make install
86 | ########################################################################################################################
87 | # install spatialite
88 |
89 | cd ${packagedir}/libspatialite*
90 |
91 | # PROJ now uses a new API, using the old deprecated one (as done by spatialite) needs to be indicated explicitly
92 | ./configure --prefix=${installdir} \
93 | CFLAGS=-DACCEPT_USE_OF_DEPRECATED_PROJ_API_H
94 |
95 | make -j${threads}
96 | sudo make install
97 | ########################################################################################################################
98 | # install GDAL
99 |
100 | # please check the output of configure to make sure that the GEOS and PROJ drivers are enabled
101 | # otherwise you might need to define the locations of the packages
102 |
103 | python_bin=/usr/bin/python3.6
104 |
105 | cd ${packagedir}/gdal*
106 | ./configure --prefix ${installdir} \
107 | --with-python=${python_bin} \
108 | --with-geos=${installdir}/bin/geos-config \
109 | --with-proj=${installdir} \
110 | --with-spatialite=${installdir}
111 |
112 | make -j${threads}
113 | sudo make install
114 | ########################################################################################################################
115 | # install GDAL Python binding inside a virtual environment
116 |
117 | python -m pip install gdal==$GDALVERSION --global-option=build_ext --user --global-option="-I$installdir/include"
118 | ########################################################################################################################
119 | ########################################################################################################################
120 | # install pysqlite2 python package with static sqlite3 build
121 | # this needs git to be installed
122 |
123 | cd ${packagedir}
124 | git clone https://github.com/ghaering/pysqlite.git
125 | cd pysqlite
126 |
127 | wget https://sqlite.org/2019/sqlite-amalgamation-3290000.zip
128 |
129 | unzip sqlite-amalgamation-3290000.zip
130 | cp sqlite-amalgamation-3290000/* .
131 |
132 | sudo python setup.py build_static install --prefix=${installdir}
133 | ########################################################################################################################
134 | ########################################################################################################################
135 | # finishing the process
136 |
137 | echo depending on your choice of installdir and Python version you might need to add the following lines to your .bashrc:
138 | echo "export PATH=${installdir}/bin:$"PATH
139 | echo "export LD_LIBRARY_PATH=${installdir}/lib:$"LD_LIBRARY_PATH
140 | echo "export PYTHONPATH=${installdir}/lib64/python3.6/site-packages:$"PYTHONPATH
141 | echo "done"
142 |
143 | # deleting the root directory which is no longer needed
144 | sudo rm -rf ${root}
145 |
--------------------------------------------------------------------------------
/pyroSAR/patterns.py:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # Reading and Organizing system for SAR images
3 | # Copyright (c) 2016-2023, the pyroSAR Developers.
4 |
5 | # This file is part of the pyroSAR Project. It is subject to the
6 | # license terms in the LICENSE.txt file found in the top-level
7 | # directory of this distribution and at
8 | # https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
9 | # No part of the pyroSAR project, including this file, may be
10 | # copied, modified, propagated, or distributed except according
11 | # to the terms contained in the LICENSE.txt file.
12 | ###############################################################################
13 | """
14 | This file contains regular expressions to identify SAR products.
15 | The pattern 'pyrosar' identifies products in pyroSAR's unified naming scheme.
16 | The names of all other expressions correspond to the classes found in pyroSAR.drivers.
17 | """
18 | pyrosar = r'(?:.*[/\\]|)' \
19 | r'(?P' \
20 | r'(?P[A-Z0-9]{1,4})_+' \
21 | r'(?P[A-Z0-9]{1,4})_+' \
22 | r'(?P[AD])_' \
23 | r'(?P[0-9T]{15})' \
24 | r'(?:_(?P\w*?)|)' \
25 | r')_*' \
26 | r'(?:(?P[HV]{2})_' \
27 | r'(?P[\w-]*)|)' \
28 | r'(?P(?:.tif|.nc|))$'
29 |
30 | ceos_ers = r'(?P(?:SAR|ASA)_(?:IM(?:S|P|G|M|_)|AP(?:S|P|G|M|_)|WV(?:I|S|W|_)|WS(?:M|S|_))_[012B][CP])' \
31 | r'(?P[A-Z])' \
32 | r'(?P[A-Z\-]{3})' \
33 | r'(?P[0-9]{8})_' \
34 | r'(?P[0-9]{6})_' \
35 | r'(?P[0-9]{8})' \
36 | r'(?P[0-9A-Z]{1})' \
37 | r'(?P[0-9]{3})_' \
38 | r'(?P[0-9]{5})_' \
39 | r'(?P[0-9]{5})_' \
40 | r'(?P[0-9]{4,})\.' \
41 | r'(?P[EN][12])' \
42 | r'(?P(?:\.zip|\.tar\.gz|\.PS|))$'
43 |
44 | ceos_psr1 = r'^LED-ALPSR' \
45 | r'(?PP|S)' \
46 | r'(?P[0-9]{5})' \
47 | r'(?P[0-9]{4})-' \
48 | r'(?P[HWDPC])' \
49 | r'(?P1\.[015])' \
50 | r'(?PG|_)' \
51 | r'(?P[UPML_])' \
52 | r'(?PA|D)$'
53 |
54 | ceos_psr2 = r'^LED-ALOS2' \
55 | r'(?P[0-9]{5})' \
56 | r'(?P[0-9]{4})-' \
57 | r'(?P[0-9]{6})-' \
58 | r'(?PSBS|UBS|UBD|HBS|HBD|HBQ|FBS|FBD|FBQ|WBS|WBD|WWS|WWD|VBS|VBD)' \
59 | r'(?PL|R)' \
60 | r'(?P1\.0|1\.1|1\.5|2\.1|3\.1)' \
61 | r'(?P[GR_])' \
62 | r'(?P[UPML_])' \
63 | r'(?PA|D)$'
64 |
65 | eorc_psr = r'^PSR2-' \
66 | r'(?PSLTR)_' \
67 | r'(?PRSP[0-9]{3})_' \
68 | r'(?P[0-9]{8})' \
69 | r'(?PFBD|WBD)' \
70 | r'(?P[0-9]{2})' \
71 | r'(?PA|D)' \
72 | r'(?PL|R)_' \
73 | r'(?P[0-9A-Z]{16})-' \
74 | r'(?P[0-9A-Z]{5})_' \
75 | r'(?P[0-9]{3})_' \
76 | r'HDR$'
77 |
78 | esa = r'(?P(?:SAR|ASA)_(?:IM(?:S|P|G|M|_)|AP(?:S|P|G|M|_)|WV(?:I|S|W|_)|WS(?:M|S|_))_[012B][CP])' \
79 | r'(?P[A-Z])' \
80 | r'(?P[A-Z\-]{3})' \
81 | r'(?P[0-9]{8})_' \
82 | r'(?P[0-9]{6})_' \
83 | r'(?P[0-9]{8})' \
84 | r'(?P[0-9A-Z]{1})' \
85 | r'(?P[0-9]{3})_' \
86 | r'(?P[0-9]{5})_' \
87 | r'(?P[0-9]{5})_' \
88 | r'(?P[0-9]{4,})\.' \
89 | r'(?P[EN][12])'
90 |
91 | safe = r'^(?PS1[ABCD])_' \
92 | r'(?PS1|S2|S3|S4|S5|S6|IW|EW|WV|EN|N1|N2|N3|N4|N5|N6|IM)_' \
93 | r'(?PSLC|GRD|OCN)' \
94 | r'(?PF|H|M|_)_' \
95 | r'(?P1|2)' \
96 | r'(?PS|A)' \
97 | r'(?PSH|SV|DH|DV|VV|HH|HV|VH)_' \
98 | r'(?P[0-9]{8}T[0-9]{6})_' \
99 | r'(?P[0-9]{8}T[0-9]{6})_' \
100 | r'(?P[0-9]{6})_' \
101 | r'(?P[0-9A-F]{6})_' \
102 | r'(?P[0-9A-F]{4})' \
103 | r'\.SAFE$'
104 |
105 | tsx = r'^(?PT[DS]X1)_SAR__' \
106 | r'(?PSSC|MGD|GEC|EEC)_' \
107 | r'(?P____|SE__|RE__|MON1|MON2|BTX1|BRX2)_' \
108 | r'(?PSM|SL|HS|HS300|ST|SC)_' \
109 | r'(?P[SDTQ])_' \
110 | r'(?:SRA|DRA)_' \
111 | r'(?P[0-9]{8}T[0-9]{6})_' \
112 | r'(?P[0-9]{8}T[0-9]{6})(?:\.xml|)$'
113 |
114 | tdm = r'^(?PT[D]M1)_SAR__' \
115 | r'(?PCOS)_' \
116 | r'(?P____|MONO|BIST|ALT1|ALT2)_' \
117 | r'(?PSM|SL|HS)_' \
118 | r'(?P[SDQ])_' \
119 | r'(?:SRA|DRA)_' \
120 | r'(?P[0-9]{8}T[0-9]{6})_' \
121 | r'(?P[0-9]{8}T[0-9]{6})(?:\.xml|)$'
122 |
--------------------------------------------------------------------------------
/pyroSAR/snap/__init__.py:
--------------------------------------------------------------------------------
1 | from .util import geocode, noise_power
2 | from .auxil import gpt
3 |
--------------------------------------------------------------------------------
/pyroSAR/snap/data/collect_suffices.py:
--------------------------------------------------------------------------------
1 | ##############################################################
2 | # SNAP source code scan for retrieving operator suffices
3 |
4 | # Copyright (c) 2020-2024, the pyroSAR Developers.
5 |
6 | # This file is part of the pyroSAR Project. It is subject to the
7 | # license terms in the LICENSE.txt file found in the top-level
8 | # directory of this distribution and at
9 | # https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
10 | # No part of the pyroSAR project, including this file, may be
11 | # copied, modified, propagated, or distributed except according
12 | # to the terms contained in the LICENSE.txt file.
13 | ##############################################################
14 | import os
15 | import re
16 | import subprocess as sp
17 | from spatialist.ancillary import finder
18 |
19 | """
20 | This script clones the SNAP source code from GitHub and reads the suffices for SNAP operators.
21 | E.g. The operator Terrain-Flattening has a suffix TF. If Terrain-Flattening is added to a workflow
22 | in SNAP's graph builder, this suffix is appended to the automatically created output file name.
23 | As pyroSAR also automatically creates file names with processing step suffices, it is convenient to just
24 | use those defined by SNAP.
25 | Currently I am not aware of any way to retrieve them directly from a SNAP installation.
26 | A question in the STEP forum is asked: https://forum.step.esa.int/t/snappy-get-operator-product-suffix/22885
27 |
28 | Feel free to contact me if you have ideas on how to improve this!
29 | """
30 |
31 |
32 | def main():
33 | # some arbitrary directory for the source code
34 | workdir = os.path.join(os.path.expanduser('~'), '.pyrosar', 'snap_code')
35 | os.makedirs(workdir, exist_ok=True)
36 |
37 | # the name of the Java properties file containing the operator-suffix lookup
38 | outfile = 'snap.suffices.properties'
39 |
40 | # clone all relevant toolboxes
41 | for tbx in ['snap-engine', 'snap-desktop', 'microwave-toolbox']:
42 | print(tbx)
43 | target = os.path.join(workdir, tbx)
44 | if not os.path.isdir(target):
45 | url = 'https://github.com/senbox-org/{}'.format(tbx)
46 | sp.check_call(['git', 'clone', '--depth', '1', url], cwd=workdir)
47 | else:
48 | sp.check_call(['git', 'pull', '--depth', '1'], cwd=target)
49 |
50 | # search patterns for relevant files
51 | # Usually files containing operator classes are named Op.java but without dashes
52 | # e.g. TerrainFlatteningOp.java for the Terrain-Flattening operator
53 | # One exception is Calibration for which there is a subclass for each SAR sensor
54 | operators = finder(workdir, ['*Op.java', 'BaseCalibrator.java'])
55 |
56 | # a list for collection the suffices
57 | collect = []
58 |
59 | for op in operators:
60 | print(op)
61 | with open(op, encoding='utf8') as infile:
62 | content = infile.read()
63 |
64 | # the suffix is defined as a class attribute PRODUCT_SUFFIX
65 | pattern = 'String PRODUCT_SUFFIX = \"_([a-zA-Z]*)\"'
66 | match = re.search(pattern, content)
67 | if match:
68 | suffix = match.groups()[0]
69 | else:
70 | suffix = ''
71 |
72 | # the name of the operator as available in the UI
73 | pattern = r'\@OperatorMetadata\(alias = \"([a-zA-Z-]*)\"'
74 | match = re.search(pattern, content)
75 | if match:
76 | alias = match.groups()[0]
77 | else:
78 | alias = None
79 |
80 | if suffix == 'Cal':
81 | alias = 'Calibration'
82 |
83 | # only collect operators for which an alias exists, i.e. which are exposed in the UI,
84 | # and for which a suffix is defined. In the UI, all operators for which no suffix exists
85 | # will just get no suffix in any written file.
86 | if alias is not None and suffix != '':
87 | print(alias, suffix)
88 | collect.append('{0}={1}'.format(alias, suffix))
89 |
90 | print('found {} matching operators'.format(len(collect)))
91 |
92 | with open(outfile, 'w') as out:
93 | out.write('\n'.join(sorted(collect, key=str.lower)))
94 |
95 |
96 | if __name__ == '__main__':
97 | main()
98 |
--------------------------------------------------------------------------------
/pyroSAR/snap/data/snap.auxdata.properties:
--------------------------------------------------------------------------------
1 | ####################################################################################
2 | # Configuration for the Auxdata paths
3 | # All properties described here can also be passed to the VM as system
4 | # properties using the standard Java
5 | # -D=
6 | # syntax. In addition, it is possible to use macros of the form
7 | # ${}
8 | # within a value. Macros will expand to the value of the referred file property,
9 | # system property, or environment variable.
10 | ####################################################################################
11 |
12 | #AuxDataPath = c:\\AuxData2
13 | demPath = ${AuxDataPath}/dem
14 |
15 | DEM.aceDEMDataPath = ${demPath}/ACE_DEM/ACE
16 | DEM.aceDEM_HTTP = http://step.esa.int/auxdata/dem/ACE30/
17 | DEM.ace2_5MinDEMDataPath = ${demPath}/ACE2/5M_HEIGHTS
18 | DEM.ace2_5MinDEM_HTTP = http://step.esa.int/auxdata/dem/ACE2/5M/
19 | DEM.CDEM_HTTP = http://step.esa.int/auxdata/dem/cdem/
20 | DEM.egm96_HTTP = http://step.esa.int/auxdata/dem/egm96/
21 | DEM.gtopo30DEMDataPath = ${demPath}/GTOPO30/dem
22 | DEM.AsterDEMDataPath = ${demPath}/ASTER
23 | DEM.Getasse30DEMDataPath = ${demPath}/GETASSE30
24 | DEM.srtm3GeoTiffDEMDataPath = ${demPath}/SRTM_DEM/tiff
25 | DEM.srtm3GeoTiffDEM_FTP = xftp.jrc.it
26 | DEM.srtm3GeoTiffDEM_remotePath = /pub/srtmV4/tiff/
27 | DEM.srtm3GeoTiffDEM_HTTP = https://download.esa.int/step/auxdata/dem/SRTM90/tiff/
28 | DEM.srtm1HgtDEM_HTTP = http://step.esa.int/auxdata/dem/SRTMGL1/
29 | DEM.srtm1GridDEMDataPath =
30 |
31 | landCoverPath = ${AuxDataPath}/LandCover
32 | LandCover.glc2000DataPath = ${landCoverPath}/glc2000
33 | LandCover.globcoverDataPath = ${landCoverPath}/globcover
34 |
35 | OrbitFiles.dorisHTTP_vor_remotePath = http://step.esa.int/auxdata/orbits/Doris/vor
36 | OrbitFiles.dorisVOROrbitPath = ${AuxDataPath}/Orbits/Doris/vor
37 | OrbitFiles.dorisPOROrbitPath = ${AuxDataPath}/Orbits/Doris/por
38 |
39 | OrbitFiles.delftEnvisatOrbitPath = ${AuxDataPath}/Orbits/Delft Precise Orbits/ODR.ENVISAT1/eigen-cg03c
40 | OrbitFiles.delftERS1OrbitPath = ${AuxDataPath}/Orbits/Delft Precise Orbits/ODR.ERS-1/dgm-e04
41 | OrbitFiles.delftERS2OrbitPath = ${AuxDataPath}/Orbits/Delft Precise Orbits/ODR.ERS-2/dgm-e04
42 |
43 | OrbitFiles.delftFTP = dutlru2.lr.tudelft.nl
44 | OrbitFiles.delftFTP_ENVISAT_precise_remotePath = /pub/orbits/ODR.ENVISAT1/eigen-cg03c/
45 | OrbitFiles.delftFTP_ERS1_precise_remotePath = /pub/orbits/ODR.ERS-1/dgm-e04/
46 | OrbitFiles.delftFTP_ERS2_precise_remotePath = /pub/orbits/ODR.ERS-2/dgm-e04/
47 |
48 | OrbitFiles.prareHTTP_ERS1_remotePath = http://step.esa.int/auxdata/orbits/ers_precise_orb/ERS1
49 | OrbitFiles.prareHTTP_ERS2_remotePath = http://step.esa.int/auxdata/orbits/ers_precise_orb/ERS2
50 | OrbitFiles.prareERS1OrbitPath = ${AuxDataPath}/Orbits/ers_precise_orb/ERS1
51 | OrbitFiles.prareERS2OrbitPath = ${AuxDataPath}/Orbits/ers_precise_orb/ERS2
52 |
53 | OrbitFiles.sentinel1POEOrbitPath = ${AuxDataPath}/Orbits/Sentinel-1/POEORB
54 | OrbitFiles.sentinel1RESOrbitPath = ${AuxDataPath}/Orbits/Sentinel-1/RESORB
55 | OrbitFiles.sentinel1POEOrbit_remotePath = http://step.esa.int/auxdata/orbits/Sentinel-1/POEORB/
56 | OrbitFiles.sentinel1RESOrbit_remotePath = http://step.esa.int/auxdata/orbits/Sentinel-1/RESORB/
57 |
58 | AuxCal.Sentinel1.remotePath = http://step.esa.int/auxdata/auxcal/S1/
59 | AuxCal.ENVISAT.remotePath = http://step.esa.int/auxdata/auxcal/ENVISAT/
60 | AuxCal.ERS.remotePath = http://step.esa.int/auxdata/auxcal/ERS/
61 |
--------------------------------------------------------------------------------
/pyroSAR/snap/data/snap.suffices.properties:
--------------------------------------------------------------------------------
1 | AdaptiveThresholding=THR
2 | ALOS-Deskewing=DSk
3 | Apply-Orbit-File=Orb
4 | Back-Geocoding=Stack
5 | Calibration=Cal
6 | Coherence=Coh
7 | CP-Stokes-Parameters=Stokes
8 | CreateStack=Stack
9 | DEM-Assisted-Coregistration=Stack
10 | Demodulate=Demod
11 | Ellipsoid-Correction-GG=EC
12 | Ellipsoid-Correction-RD=EC
13 | Flip=Flip
14 | GLCM=GLCM
15 | GoldsteinPhaseFiltering=Flt
16 | HorizontalVerticalMotion=hvm
17 | Interferogram=Ifg
18 | IonosphericCorrection=iono
19 | KNN-Classifier=KNNClass
20 | LinearToFromdB=dB
21 | Maximum-Likelihood-Classifier=MLClass
22 | Minimum-Distance-Classifier=MDClass
23 | Multi-Temporal-Speckle-Filter=Spk
24 | Multilook=ML
25 | MultiMasterInSAR=mmifg
26 | Multitemporal-Compositing=MC
27 | Object-Discrimination=SHP
28 | Offset-Tracking=Vel
29 | Oversample=Ovr
30 | PhaseFilter=Flt
31 | PhaseToDisplacement=Disp
32 | PhaseToElevation=Hgt
33 | PhaseToHeight=Hgt
34 | Polarimetric-Classification=Class
35 | Polarimetric-Parameters=PP
36 | Polarimetric-Speckle-Filter=Spk
37 | Random-Forest-Classifier=RF
38 | Remodulate=Remod
39 | SARSim-Terrain-Correction=TC
40 | SliceAssembly=Asm
41 | SM-Dielectric-Modeling=SM
42 | Speckle-Divergence=SpkDiv
43 | Speckle-Filter=Spk
44 | SRGR=SRGR
45 | Supervised-Wishart-Classification=Class
46 | SVM-Classifier=SVMClass
47 | Terrain-Correction=TC
48 | Terrain-Flattening=TF
49 | ThermalNoiseRemoval=NR
50 | Three-passDInSAR=DInSAR
51 | TopoPhaseRemoval=DInSAR
52 | TOPSAR-Deburst=Deb
53 | TOPSAR-Merge=mrg
54 | Undersample=Udr
--------------------------------------------------------------------------------
/pyroSAR/snap/recipes/base.xml:
--------------------------------------------------------------------------------
1 |
2 | 1.0
3 |
4 | Read
5 |
6 |
7 | S1A_IW_GRDH_1SDV_20141220T155633_20141220T155658_003805_0048BB_CE9B.zip
8 | SENTINEL-1
9 |
10 |
11 |
12 | Apply-Orbit-File
13 |
14 |
15 |
16 |
17 | Sentinel Precise (Auto Download)
18 | 3
19 | false
20 |
21 |
22 |
23 | Calibration
24 |
25 |
26 |
27 |
28 |
29 | Product Auxiliary File
30 |
31 | false
32 | false
33 | false
34 | false
35 | VH,VV
36 | false
37 | false
38 | false
39 |
40 |
41 |
42 | Write
43 |
44 |
45 |
46 |
47 | E:\DATA\SWOS\SNAP\test\output\S1A_IW_GRDH_1SDV_20141005T052515_20141005T052540_002690_003012_763E.tif
48 | GeoTIFF
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
--------------------------------------------------------------------------------
/pyroSAR/snap/recipes/blank.xml:
--------------------------------------------------------------------------------
1 |
2 | 1.0
3 |
4 |
5 |
6 |
7 |
--------------------------------------------------------------------------------
/pyroSAR/xml_util.py:
--------------------------------------------------------------------------------
1 | ###############################################################################
2 | # utility collection for xml file handling
3 |
4 | # Copyright (c) 2016-2018, the pyroSAR Developers.
5 |
6 | # This file is part of the pyroSAR Project. It is subject to the
7 | # license terms in the LICENSE.txt file found in the top-level
8 | # directory of this distribution and at
9 | # https://github.com/johntruckenbrodt/pyroSAR/blob/master/LICENSE.txt.
10 | # No part of the pyroSAR project, including this file, may be
11 | # copied, modified, propagated, or distributed except according
12 | # to the terms contained in the LICENSE.txt file.
13 | ###############################################################################
14 |
15 | import os
16 | import re
17 | import ast
18 | import xml.etree.ElementTree as ET
19 |
20 |
21 | class XMLHandler(object):
22 | def __init__(self, xml):
23 | errormessage = 'xmlfile must be a string pointing to an existing file, ' \
24 | 'a string or bytes object from which an xml can be parsed or a file object'
25 | if 'readline' in dir(xml):
26 | self.infile = xml.name if hasattr(xml, 'name') else None
27 | xml.seek(0)
28 | self.text = xml.read()
29 | xml.seek(0)
30 | elif isinstance(xml, (bytes, str)):
31 | try:
32 | isfile = os.path.isfile(xml)
33 | except ValueError:
34 | isfile = False
35 | if isfile:
36 | self.infile = xml
37 | with open(xml, 'r') as infile:
38 | self.text = infile.read()
39 | else:
40 | try:
41 | tree = ET.fromstring(xml)
42 | self.infile = None
43 | self.text = str(xml)
44 | del tree
45 | except ET.ParseError:
46 | raise RuntimeError(errormessage)
47 | else:
48 | raise RuntimeError(errormessage)
49 | defs = re.findall('xmlns:[a-z0-9]+="[^"]*"', self.text)
50 | dictstring = '{{{}}}'.format(re.sub(r'xmlns:([a-z0-9]*)=', r'"\1":', ', '.join(defs)))
51 | self.namespaces = ast.literal_eval(dictstring)
52 |
53 | def restoreNamespaces(self):
54 | for key, val in self.namespaces.items():
55 | val_new = val.split('/')[-1]
56 | self.text = self.text.replace(key, val_new)
57 |
58 | def write(self, outname, mode):
59 | with open(outname, mode) as out:
60 | out.write(self.text)
61 |
62 | def __enter__(self):
63 | return self
64 |
65 | def __exit__(self, exc_type, exc_val, exc_tb):
66 | return
67 |
68 |
69 | def getNamespaces(xmlfile):
70 | with XMLHandler(xmlfile) as xml:
71 | return xml.namespaces
72 |
--------------------------------------------------------------------------------
/readthedocs.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 |
3 | build:
4 | os: "ubuntu-24.04"
5 | tools:
6 | python: "mambaforge-latest"
7 |
8 | conda:
9 | environment: environment-doc.yml
10 |
11 | python:
12 | install:
13 | - method: pip
14 | path: .
15 |
16 | formats:
17 | - epub
18 | - pdf
19 |
20 | sphinx:
21 | builder: html
22 | configuration: docs/source/conf.py
23 |
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | -r requirements.txt
2 |
3 | #Testing requirements
4 | pytest
5 |
6 | #Documentation requirements
7 | sphinx
8 | sphinxcontrib-bibtex
9 | sphinxcontrib-svg2pdfconverter
10 | cairosvg
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | geoalchemy2<0.14.0
2 | lxml
3 | numpy<2.0
4 | packaging
5 | pillow
6 | progressbar2
7 | psycopg2
8 | pyyaml
9 | requests
10 | shapely
11 | spatialist>=0.15.2
12 | sqlalchemy>=1.4,<2.0
13 | sqlalchemy-utils>=0.37
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pytest
3 | import platform
4 |
5 |
6 | @pytest.fixture
7 | def travis():
8 | return 'TRAVIS' in os.environ.keys()
9 |
10 |
11 | @pytest.fixture
12 | def appveyor():
13 | return 'APPVEYOR' in os.environ.keys()
14 |
15 |
16 | @pytest.fixture
17 | def testdir():
18 | return os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data')
19 |
20 |
21 | @pytest.fixture
22 | def testdata(testdir):
23 | out = {
24 | # ASAR_IMS__A_20040703T205338, product: SLC, driver: ESA
25 | 'asar': os.path.join(testdir,
26 | 'ASA_IMS_1PNESA20040703_205338_000000182028_00172_12250_00001672562030318361237.N1'),
27 | # ERS1_IMP__A_19960808T205906, product: PRI, driver: ESA
28 | 'ers1_esa': os.path.join(testdir, 'SAR_IMP_1PXESA19960808_205906_00000017G158_00458_26498_2615.E1'),
29 | # ERS1_IMS__D_19951220T024320, product: SLC, driver: CEOS_ERS
30 | 'ers1_ceos': os.path.join(testdir, 'SAR_IMS_1PXESA19951220_024320_00000015G152_00132_23166_0252.E1.zip'),
31 | # PSR2_FBD__A_20140909T043342, product: 1.5, driver: CEOS_PSR
32 | 'psr2': os.path.join(testdir, '0000022708_001001_ALOS2015976960-140909.zip'),
33 | # main scene for testing Sentinel-1 metadata reading and database ingestion
34 | 's1': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip'),
35 | # for test_snap.test_slice_assembly
36 | 's1_2': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20150222T170725_20150222T170750_004739_005DD8_CEAB.zip'),
37 | # for testing database duplicate handling
38 | 's1_3': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20150203T043109_20150203T043134_004454_00574F_6D00.zip'),
39 | # for testing database duplicate handling
40 | 's1_4': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20150203T043109_20150203T043134_004454_00574F_FEC3.zip'),
41 | # used in test_osv
42 | 's1_orbit': os.path.join(testdir, 'S1A_IW_GRDH_1SDV_20210119T031653_20210119T031718_036201_043ED0_8255.zip'),
43 | 'tif': os.path.join(testdir, 'S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'),
44 | 'archive_old_csv': os.path.join(testdir, 'archive_outdated.csv'),
45 | 'archive_old_bbox': os.path.join(testdir, 'archive_outdated_bbox.db'),
46 | 'dempar': os.path.join(testdir, 'dem.par'),
47 | 'mlipar': os.path.join(testdir, 'mli.par')
48 | }
49 | return out
50 |
51 |
52 | @pytest.fixture
53 | def auxdata_dem_cases():
54 | cases = [('AW3D30', ['N050E010/N051E011.tar.gz']),
55 | ('SRTM 1Sec HGT', ['N51E011.SRTMGL1.hgt.zip']),
56 | ('SRTM 3Sec', ['srtm_39_02.zip']),
57 | # ('TDX90m', ['DEM/N51/E010/TDM1_DEM__30_N51E011.zip'])
58 | ]
59 | return cases
60 |
61 |
62 | @pytest.fixture
63 | def tmp_home(monkeypatch, tmp_path):
64 | home = tmp_path / 'tmp_home'
65 | home.mkdir()
66 | var = 'USERPROFILE' if platform.system() == 'Windows' else 'HOME'
67 | monkeypatch.setenv(var, str(home))
68 | assert os.path.expanduser('~') == str(home)
69 | yield home
70 |
--------------------------------------------------------------------------------
/tests/data/0000022708_001001_ALOS2015976960-140909.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/0000022708_001001_ALOS2015976960-140909.zip
--------------------------------------------------------------------------------
/tests/data/ASA_IMS_1PNESA20040703_205338_000000182028_00172_12250_00001672562030318361237.N1:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/ASA_IMS_1PNESA20040703_205338_000000182028_00172_12250_00001672562030318361237.N1
--------------------------------------------------------------------------------
/tests/data/S1A_IW_GRDH_1SDV_20150203T043109_20150203T043134_004454_00574F_6D00.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/S1A_IW_GRDH_1SDV_20150203T043109_20150203T043134_004454_00574F_6D00.zip
--------------------------------------------------------------------------------
/tests/data/S1A_IW_GRDH_1SDV_20150203T043109_20150203T043134_004454_00574F_FEC3.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/S1A_IW_GRDH_1SDV_20150203T043109_20150203T043134_004454_00574F_FEC3.zip
--------------------------------------------------------------------------------
/tests/data/S1A_IW_GRDH_1SDV_20150222T170725_20150222T170750_004739_005DD8_CEAB.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/S1A_IW_GRDH_1SDV_20150222T170725_20150222T170750_004739_005DD8_CEAB.zip
--------------------------------------------------------------------------------
/tests/data/S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/S1A_IW_GRDH_1SDV_20150222T170750_20150222T170815_004739_005DD8_3768.zip
--------------------------------------------------------------------------------
/tests/data/S1A_IW_GRDH_1SDV_20210119T031653_20210119T031718_036201_043ED0_8255.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/S1A_IW_GRDH_1SDV_20210119T031653_20210119T031718_036201_043ED0_8255.zip
--------------------------------------------------------------------------------
/tests/data/S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif
--------------------------------------------------------------------------------
/tests/data/S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif.aux.xml:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 | 1.4325850009918
5 | -12.12492953445
6 | -26.654710769653
7 | 4.7382735947383
8 |
9 |
10 |
11 |
--------------------------------------------------------------------------------
/tests/data/SAR_IMP_1PXESA19960808_205906_00000017G158_00458_26498_2615.E1:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/SAR_IMP_1PXESA19960808_205906_00000017G158_00458_26498_2615.E1
--------------------------------------------------------------------------------
/tests/data/SAR_IMS_1PXESA19951220_024320_00000015G152_00132_23166_0252.E1.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/SAR_IMS_1PXESA19951220_024320_00000015G152_00132_23166_0252.E1.zip
--------------------------------------------------------------------------------
/tests/data/archive_outdated_bbox.db:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/johntruckenbrodt/pyroSAR/95b79cce7daac625acf44d6dff488285e29cfda8/tests/data/archive_outdated_bbox.db
--------------------------------------------------------------------------------
/tests/data/dem.par:
--------------------------------------------------------------------------------
1 | Gamma DIFF&GEO DEM/MAP parameter file
2 | title: alps_dem_gamma_SRTM-1Sec-HGT
3 | DEM_projection: UTM
4 | data_format: REAL*4
5 | DEM_hgt_offset: 0.00000
6 | DEM_scale: 1.00000
7 | width: 5927
8 | nlines: 6455
9 | corner_north: 5235158.873 m
10 | corner_east: 515363.565 m
11 | post_north: -20.0000000 m
12 | post_east: 20.0000000 m
13 |
14 | ellipsoid_name: WGS 84
15 | ellipsoid_ra: 6378137.000 m
16 | ellipsoid_reciprocal_flattening: 298.2572236
17 |
18 | datum_name: WGS 1984
19 | datum_shift_dx: 0.000 m
20 | datum_shift_dy: 0.000 m
21 | datum_shift_dz: 0.000 m
22 | datum_scale_m: 0.00000e+00
23 | datum_rotation_alpha: 0.00000e+00 arc-sec
24 | datum_rotation_beta: 0.00000e+00 arc-sec
25 | datum_rotation_gamma: 0.00000e+00 arc-sec
26 | datum_country_list: Global Definition, WGS84, World
27 |
28 | projection_name: UTM
29 | projection_zone: 32
30 | false_easting: 500000.000 m
31 | false_northing: 0.000 m
32 | projection_k0: 0.9996000
33 | center_longitude: 9.0000000 decimal degrees
34 | center_latitude: 0.0000000 decimal degrees
35 |
36 |
--------------------------------------------------------------------------------
/tests/data/mli.par:
--------------------------------------------------------------------------------
1 | Gamma Interferometric SAR Processor (ISP) - Image Parameter File
2 |
3 | title: S1A-IW-IW-VV-3296 (software: Sentinel-1 IPF 002.36)
4 | sensor: S1A IW IW VV
5 | date: 2014 11 15 18 18 1.3091
6 | start_time: 65881.309050 s
7 | center_time: 65893.808110 s
8 | end_time: 65906.307169 s
9 | azimuth_line_time: 1.4954606e-03 s
10 | line_header_size: 0
11 | range_samples: 31897
12 | azimuth_lines: 16717
13 | range_looks: 5
14 | azimuth_looks: 1
15 | image_format: FLOAT
16 | image_geometry: SLANT_RANGE
17 | range_scale_factor: 1.0000000e+00
18 | azimuth_scale_factor: 1.0000000e+00
19 | center_latitude: 37.1843743 degrees
20 | center_longitude: -4.8856155 degrees
21 | heading: -13.1840654 degrees
22 | range_pixel_spacing: 5.000000 m
23 | azimuth_pixel_spacing: 10.003560 m
24 | near_range_slc: 800286.7867 m
25 | center_range_slc: 880026.7867 m
26 | far_range_slc: 959766.7867 m
27 | first_slant_range_polynomial: 0.00000 0.00000 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 s m 1 m^-1 m^-2 m^-3
28 | center_slant_range_polynomial: 0.00000 0.00000 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 s m 1 m^-1 m^-2 m^-3
29 | last_slant_range_polynomial: 0.00000 0.00000 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 s m 1 m^-1 m^-2 m^-3
30 | incidence_angle: 38.9988 degrees
31 | azimuth_deskew: ON
32 | azimuth_angle: 90.0000 degrees
33 | radar_frequency: 5.4050005e+09 Hz
34 | adc_sampling_rate: 6.4345238e+07 Hz
35 | chirp_bandwidth: 5.6500000e+07 Hz
36 | prf: 668.6903123 Hz
37 | azimuth_proc_bandwidth: 327.00000 Hz
38 | doppler_polynomial: -11.16383 -6.56197e-05 7.59030e-10 0.00000e+00 Hz Hz/m Hz/m^2 Hz/m^3
39 | doppler_poly_dot: 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 Hz/s Hz/s/m Hz/s/m^2 Hz/s/m^3
40 | doppler_poly_ddot: 0.00000e+00 0.00000e+00 0.00000e+00 0.00000e+00 Hz/s^2 Hz/s^2/m Hz/s^2/m^2 Hz/s^2/m^3
41 | receiver_gain: 0.0000 dB
42 | calibration_gain: 0.0000 dB
43 | sar_to_earth_center: 7070852.9213 m
44 | earth_radius_below_sensor: 6370467.2353 m
45 | earth_semi_major_axis: 6378137.0000 m
46 | earth_semi_minor_axis: 6356752.3141 m
47 | number_of_state_vectors: 15
48 | time_of_first_state_vector: 65824.000000 s
49 | state_vector_interval: 10.000000 s
50 | state_vector_position_1: 5924711.4180 -966420.2665 3737790.5072 m m m
51 | state_vector_velocity_1: -4144.02916 -1043.28697 6279.00570 m/s m/s m/s
52 | state_vector_position_2: 5882932.1764 -976768.3769 3800368.2816 m m m
53 | state_vector_velocity_2: -4211.73676 -1026.29936 6236.43151 m/s m/s m/s
54 | state_vector_position_3: 5840478.3441 -986945.5426 3862516.7916 m m m
55 | state_vector_velocity_3: -4278.94597 -1009.09848 6193.15365 m/s m/s m/s
56 | state_vector_position_4: 5797354.9443 -996949.6443 3924229.0253 m m m
57 | state_vector_velocity_4: -4345.64895 -991.68701 6149.17708 m/s m/s m/s
58 | state_vector_position_5: 5753567.0788 -1006778.5897 3985498.0206 m m m
59 | state_vector_velocity_5: -4411.83787 -974.06767 6104.50682 m/s m/s m/s
60 | state_vector_position_6: 5709119.9266 -1016430.3138 4046316.8661 m m m
61 | state_vector_velocity_6: -4477.50500 -956.24320 6059.14797 m/s m/s m/s
62 | state_vector_position_7: 5664018.7442 -1025902.7791 4106678.7017 m m m
63 | state_vector_velocity_7: -4542.64266 -938.21637 6013.10570 m/s m/s m/s
64 | state_vector_position_8: 5618268.8641 -1035193.9760 4166576.7197 m m m
65 | state_vector_velocity_8: -4607.24325 -919.99000 5966.38528 m/s m/s m/s
66 | state_vector_position_9: 5571875.6949 -1044301.9234 4226004.1649 m m m
67 | state_vector_velocity_9: -4671.29921 -901.56693 5918.99202 m/s m/s m/s
68 | state_vector_position_10: 5524844.7202 -1053224.6686 4284954.3358 m m m
69 | state_vector_velocity_10: -4734.80307 -882.95004 5870.93135 m/s m/s m/s
70 | state_vector_position_11: 5477181.4982 -1061960.2877 4343420.5855 m m m
71 | state_vector_velocity_11: -4797.74742 -864.14223 5822.20873 m/s m/s m/s
72 | state_vector_position_12: 5428891.6610 -1070506.8864 4401396.3223 m m m
73 | state_vector_velocity_12: -4860.12490 -845.14645 5772.82972 m/s m/s m/s
74 | state_vector_position_13: 5379980.9136 -1078862.5998 4458875.0103 m m m
75 | state_vector_velocity_13: -4921.92827 -825.96568 5722.79995 m/s m/s m/s
76 | state_vector_position_14: 5330455.0334 -1087025.5930 4515850.1705 m m m
77 | state_vector_velocity_14: -4983.15030 -806.60292 5672.12514 m/s m/s m/s
78 | state_vector_position_15: 5280319.8694 -1094994.0613 4572315.3816 m m m
79 | state_vector_velocity_15: -5043.78388 -787.06120 5620.81106 m/s m/s m/s
80 |
81 |
--------------------------------------------------------------------------------
/tests/installtest_gdal_geos.py:
--------------------------------------------------------------------------------
1 | from osgeo import ogr
2 |
3 |
4 | # test whether GDAL was successfully built with GEOS support
5 | wkt1 = 'POLYGON ((' \
6 | '1208064.271243039 624154.6783778917, ' \
7 | '1208064.271243039 601260.9785661874, ' \
8 | '1231345.9998651114 601260.9785661874, ' \
9 | '1231345.9998651114 624154.6783778917, ' \
10 | '1208064.271243039 624154.6783778917' \
11 | '))'
12 |
13 | wkt2 = 'POLYGON ((' \
14 | '1199915.6662253144 633079.3410163528, ' \
15 | '1199915.6662253144 614453.958118695, ' \
16 | '1219317.1067437078 614453.958118695, ' \
17 | '1219317.1067437078 633079.3410163528, ' \
18 | '1199915.6662253144 633079.3410163528' \
19 | '))'
20 |
21 | poly1 = ogr.CreateGeometryFromWkt(wkt1)
22 | poly2 = ogr.CreateGeometryFromWkt(wkt2)
23 |
24 | intersection = poly1.Intersection(poly2)
25 |
--------------------------------------------------------------------------------
/tests/installtest_ogr_sqlite.py:
--------------------------------------------------------------------------------
1 | from osgeo import ogr
2 |
3 | driver = ogr.GetDriverByName('SQLite')
4 |
5 | if driver is None:
6 | raise RuntimeError('OGR was built without SQLite driver')
7 |
--------------------------------------------------------------------------------
/tests/installtest_spatialite.py:
--------------------------------------------------------------------------------
1 |
2 | try:
3 | from pysqlite2 import dbapi2 as sqlite3
4 | except ImportError:
5 | import sqlite3
6 |
7 | print(sqlite3.__file__)
8 |
9 | con = sqlite3.connect(':memory:')
10 |
11 | con.enable_load_extension(True)
12 |
13 | try:
14 | con.load_extension('mod_spatialite')
15 | except sqlite3.OperationalError:
16 | con.load_extension('libspatialite')
17 |
--------------------------------------------------------------------------------
/tests/test_ancillary.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pytest
3 | import datetime
4 | from pathlib import Path
5 | from pyroSAR.ancillary import (seconds, groupbyTime, groupby,
6 | parse_datasetname, find_datasets,
7 | Lock, LockCollection)
8 |
9 |
10 | def test_seconds():
11 | assert seconds('test_20151212T234411') == 3658952651.0
12 |
13 |
14 | def test_groupby():
15 | """
16 | Test correct grouping of filenames by their attributes
17 | Methodology is to provide a list of partially overlapping filenames
18 | and ensure the resultant list of lists contains the correct entry numbers
19 | """
20 | filenames = ['S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif',
21 | 'S1A__IW___A_20150309T173017_HH_grd_mli_geo_norm_db.tif',
22 | 'S2A__IW___A_20180309T173017_HH_grd_mli_geo_norm_db.tif']
23 | sensor_groups = groupby(filenames, 'sensor')
24 | print(sensor_groups)
25 | assert len(sensor_groups) == 2
26 | assert isinstance(sensor_groups[0], list)
27 | assert len(sensor_groups[0]) == 2
28 |
29 | filenames += ['S2A__IW___A_20180309T173017_VV_grd_mli_geo_norm_db.tif']
30 |
31 | polarization_groups = groupby(filenames, 'polarization')
32 | print(polarization_groups)
33 | assert len(polarization_groups) == 2
34 | assert isinstance(polarization_groups[0], list)
35 | assert isinstance(polarization_groups[1], list)
36 | assert len(polarization_groups[0]) == 2
37 | assert len(polarization_groups[1]) == 2
38 |
39 | filenames += ['S2A__IW___A_20180309T173017_HV_grd_mli_geo_norm_db.tif']
40 |
41 | polarization_groups = groupby(filenames, 'polarization')
42 | print(polarization_groups)
43 | assert len(polarization_groups) == 3
44 | assert isinstance(polarization_groups[0], list)
45 | assert isinstance(polarization_groups[1], list)
46 | assert isinstance(polarization_groups[2], list)
47 | assert len(polarization_groups[0]) == 2
48 | assert len(polarization_groups[1]) == 1
49 | assert len(polarization_groups[2]) == 2
50 |
51 |
52 | def test_groupbyTime():
53 | filenames = ['S1__IW___A_20151212T120000',
54 | 'S1__IW___A_20151212T120100',
55 | 'S1__IW___A_20151212T120300']
56 | groups = groupbyTime(filenames, seconds, 60)
57 | print(groups)
58 | assert len(groups) == 2
59 | assert isinstance(groups[0], list)
60 | assert len(groups[0]) == 2
61 |
62 | filenames = ['S1__IW___A_20151212T120000',
63 | 'S1__IW___A_20151212T120100',
64 | 'S1__IW___A_20151212T120200']
65 | groups = groupbyTime(filenames, seconds, 60)
66 | print(groups)
67 | assert len(groups[0]) == 3
68 |
69 |
70 | def test_parse_datasetname():
71 | assert parse_datasetname('foobar') is None
72 | filename = 'S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'
73 | meta = parse_datasetname(filename, parse_date=True)
74 | assert sorted(meta.keys()) == ['acquisition_mode', 'extensions', 'filename',
75 | 'filetype', 'orbit', 'outname_base',
76 | 'polarization', 'proc_steps', 'sensor', 'start']
77 | assert meta['acquisition_mode'] == 'IW'
78 | assert meta['extensions'] is None
79 | assert meta['filename'] == filename
80 | assert meta['orbit'] == 'A'
81 | assert meta['outname_base'] == 'S1A__IW___A_20150309T173017'
82 | assert meta['polarization'] == 'VV'
83 | assert meta['proc_steps'] == ['grd', 'mli', 'geo', 'norm', 'db']
84 | assert meta['sensor'] == 'S1A'
85 | assert meta['start'] == datetime.datetime(2015, 3, 9, 17, 30, 17)
86 | meta = parse_datasetname('S1A__IW___A_20150309T173017_VV_grd.tif')
87 | assert meta['proc_steps'] == ['grd']
88 |
89 | meta1 = parse_datasetname('S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif')
90 | meta2 = parse_datasetname('S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db')
91 | meta3 = parse_datasetname('S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.nc')
92 |
93 | assert meta1['filetype'] == '.tif'
94 | assert meta2['filetype'] == ''
95 | assert meta3['filetype'] == '.nc'
96 |
97 | for k in meta1.keys():
98 | if k not in ['filename', 'filetype']:
99 | assert meta1[k] == meta2[k]
100 | assert meta1[k] == meta3[k]
101 |
102 | filename = 'S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'
103 | expectation = {'outname_base': 'S1A__IW___A_20150309T173017', 'sensor': 'S1A', 'acquisition_mode': 'IW',
104 | 'orbit': 'A', 'start': '20150309T173017', 'extensions': None, 'polarization': 'VV',
105 | 'proc_steps': ['grd', 'mli', 'geo', 'norm', 'db'], 'filetype': '.tif',
106 | 'filename': 'S1A__IW___A_20150309T173017_VV_grd_mli_geo_norm_db.tif'}
107 | assert parse_datasetname(filename) == expectation
108 |
109 | filename = 'S1A__IW___A_20150309T173017_149_abc_VV_grd_mli_geo_norm_db.tif'
110 | expectation = {'outname_base': 'S1A__IW___A_20150309T173017_149_abc', 'sensor': 'S1A', 'acquisition_mode': 'IW',
111 | 'orbit': 'A', 'start': '20150309T173017', 'extensions': '149_abc', 'polarization': 'VV',
112 | 'proc_steps': ['grd', 'mli', 'geo', 'norm', 'db'], 'filetype': '.tif',
113 | 'filename': 'S1A__IW___A_20150309T173017_149_abc_VV_grd_mli_geo_norm_db.tif'}
114 | assert parse_datasetname(filename) == expectation
115 |
116 | filename = 'S1A__IW___A_20150309T173017_149_inc_geo.tif'
117 | expectation = {'outname_base': 'S1A__IW___A_20150309T173017_149_inc_geo', 'sensor': 'S1A', 'acquisition_mode': 'IW',
118 | 'orbit': 'A', 'start': '20150309T173017', 'extensions': '149_inc_geo', 'polarization': None,
119 | 'proc_steps': None, 'filetype': '.tif', 'filename': 'S1A__IW___A_20150309T173017_149_inc_geo.tif'}
120 | assert parse_datasetname(filename) == expectation
121 |
122 |
123 | def test_find_datasets(testdir):
124 | assert len(find_datasets(testdir, sensor='S1A')) == 1
125 | assert len(find_datasets(testdir, sensor='S1B')) == 0
126 |
127 |
128 | def test_lock(tmpdir):
129 | f1 = str(tmpdir / 'test1.txt')
130 | f2 = str(tmpdir / 'test2.txt')
131 | Path(f1).touch()
132 | Path(f2).touch()
133 |
134 | # simple nested write-locking
135 | with Lock(f1):
136 | with Lock(f1):
137 | assert os.path.isfile(f1 + '.lock')
138 | assert os.path.isfile(f1 + '.lock')
139 | assert not os.path.isfile(f1 + '.lock')
140 |
141 | # simple nested read-locking
142 | with Lock(f1, soft=True) as l1:
143 | used = l1.used
144 | with Lock(f1, soft=True):
145 | assert os.path.isfile(used)
146 | assert os.path.isfile(used)
147 | assert not os.path.isfile(used)
148 |
149 | # separate instances for different files
150 | with Lock(f1):
151 | with Lock(f2):
152 | assert os.path.isfile(f2 + '.lock')
153 | assert os.path.isfile(f1 + '.lock')
154 |
155 | # combination of nested locking, multiple instances, and LockCollection
156 | with LockCollection([f1, f2]):
157 | with LockCollection([f1, f2]):
158 | assert os.path.isfile(f1 + '.lock')
159 | assert os.path.isfile(f2 + '.lock')
160 | with Lock(f2):
161 | assert os.path.isfile(f1 + '.lock')
162 | assert os.path.isfile(f2 + '.lock')
163 | assert os.path.isfile(f1 + '.lock')
164 | assert os.path.isfile(f2 + '.lock')
165 | assert not os.path.isfile(f1 + '.lock')
166 | assert not os.path.isfile(f2 + '.lock')
167 |
168 | # nested locking does not work if the `soft` argument changes
169 | with Lock(f1):
170 | with pytest.raises(RuntimeError):
171 | with Lock(f1, soft=True):
172 | assert os.path.isfile(f1 + '.lock')
173 |
174 | with Lock(f1, soft=True):
175 | with pytest.raises(RuntimeError):
176 | with Lock(f1):
177 | assert os.path.isfile(f1 + '.lock')
178 |
179 | # not using the context manager requires manual lock removal
180 | lock = Lock(f1)
181 | try:
182 | raise RuntimeError
183 | except RuntimeError as e:
184 | lock.remove(exc_type=type(e))
185 | assert os.path.isfile(f1 + '.error')
186 |
--------------------------------------------------------------------------------
/tests/test_auxdata.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pytest
3 | from pyroSAR.auxdata import dem_autoload, DEMHandler, dem_create
4 |
5 | from spatialist import bbox
6 |
7 |
8 | def test_handler(auxdata_dem_cases):
9 | with bbox({'xmin': 11.5, 'xmax': 11.9, 'ymin': 51.1, 'ymax': 51.5}, crs=4326) as box:
10 | with DEMHandler([box]) as handler:
11 | for demType, reference in auxdata_dem_cases:
12 | result = handler.remote_ids(dem_type=demType, extent=box.extent)
13 | assert result == reference
14 |
15 | with bbox({'xmin': -11.9, 'xmax': -11.5, 'ymin': -51.5, 'ymax': -51.1}, crs=4326) as box:
16 | with DEMHandler([box]) as handler:
17 | cases = [('AW3D30', ['S055W015/S052W012.tar.gz']),
18 | ('SRTM 1Sec HGT', ['S52W012.SRTMGL1.hgt.zip']),
19 | ('SRTM 3Sec', ['srtm_34_23.zip']),
20 | # ('TDX90m', ['DEM/S52/W010/TDM1_DEM__30_S52W012.zip'])
21 | ]
22 | for demType, reference in cases:
23 | result = handler.remote_ids(dem_type=demType, extent=box.extent)
24 | assert result == reference
25 | with pytest.raises(RuntimeError):
26 | test = DEMHandler('foobar')
27 | ext_utm = {'xmin': -955867, 'xmax': -915536, 'ymin': -5915518, 'ymax': -5863678}
28 | with bbox(ext_utm, crs=32632) as box:
29 | with pytest.raises(RuntimeError):
30 | test = DEMHandler([box])
31 |
32 |
33 | def test_autoload(auxdata_dem_cases, travis):
34 | # delete all target files to test downloading them again
35 | home = os.path.expanduser('~')
36 | demdir = os.path.join(home, '.snap', 'auxdata', 'dem')
37 | locals = [os.path.join(demdir, x, os.path.basename(y[0])) for x, y in auxdata_dem_cases]
38 | for item in locals:
39 | if os.path.isfile(item):
40 | os.remove(item)
41 | with bbox({'xmin': 11.5, 'xmax': 11.9, 'ymin': 51, 'ymax': 51.5}, crs=4326) as box:
42 | # if the following is run in a loop, it is not possible to see which demType failed
43 | # Travis CI does not support ftp access;
44 | # see https://blog.travis-ci.com/2018-07-23-the-tale-of-ftp-at-travis-ci
45 | if not travis:
46 | files = dem_autoload([box], 'AW3D30')
47 | assert len(files) == 1
48 | files = dem_autoload([box], 'AW3D30', product='stk')
49 | assert len(files) == 1
50 | files = dem_autoload([box], 'SRTM 1Sec HGT')
51 | assert len(files) == 1
52 | files = dem_autoload([box], 'SRTM 3Sec')
53 | assert len(files) == 1
54 | with pytest.raises(RuntimeError):
55 | files = dem_autoload([box], 'TDX90m')
56 | with pytest.raises(RuntimeError):
57 | dem_autoload([box], 'AW3D30', product='foobar')
58 |
59 |
60 | def test_dem_create(tmpdir):
61 | with bbox({'xmin': 11.5, 'xmax': 11.9, 'ymin': 51, 'ymax': 51.5}, crs=4326) as box:
62 | with pytest.raises(RuntimeError):
63 | files = dem_autoload([box], 'foobar')
64 | vrt = '/vsimem/test.vrt'
65 | dem_autoload([box], 'SRTM 3Sec', vrt=vrt)
66 | out = os.path.join(str(tmpdir), 'srtm.tif')
67 | dem_create(src=vrt, dst=out, t_srs=32632, tr=(90, 90), nodata=-32767)
68 | assert os.path.isfile(out)
69 |
70 |
71 | def test_remote_ids():
72 | ext = {'xmin': 11, 'xmax': 12,
73 | 'ymin': 51, 'ymax': 51.5}
74 | with bbox(ext, 4326) as box:
75 | with DEMHandler([box]) as dem:
76 | ref1 = range(51, 52), range(11, 12)
77 | ref5 = range(50, 55, 5), range(10, 15, 5)
78 | ref15 = range(45, 60, 15), range(0, 15, 15)
79 | assert dem.intrange(box.extent, 1) == ref1
80 | assert dem.intrange(box.extent, 5) == ref5
81 | assert dem.intrange(box.extent, 15) == ref15
82 |
--------------------------------------------------------------------------------
/tests/test_config.py:
--------------------------------------------------------------------------------
1 | from pyroSAR.config import ConfigHandler
2 | import os
3 | import pytest
4 |
5 |
6 | class TestConfigHandler:
7 |
8 | def test_make_dir_and_config(self, tmpdir):
9 | conf = ConfigHandler()
10 |
11 | path_pyrosar = os.path.exists(conf._ConfigHandler__GLOBAL['path'])
12 | path_config = os.path.isfile(conf._ConfigHandler__GLOBAL['config'])
13 |
14 | assert path_pyrosar is True
15 | assert path_config is True
16 |
17 | def test_add_section(self):
18 | conf = ConfigHandler()
19 | conf.add_section('FOO')
20 |
21 | assert 'FOO' in conf.sections
22 |
23 | def test_options(self, tmpdir):
24 | conf = ConfigHandler()
25 | conf.set('FOO', 'bar', 'foobar')
26 |
27 | # cannot set attribute for section that does not exist
28 | with pytest.raises(AttributeError):
29 | conf.set('SNAPp', 'etc', 'temp/dir')
30 |
31 | assert conf['FOO']['bar'] == 'foobar'
32 | assert conf['FOO'] == {'bar': 'foobar'}
33 |
34 | def test_overwrite(self, tmpdir):
35 | conf = ConfigHandler()
36 |
37 | with pytest.raises(RuntimeError):
38 | conf.set('FOO', 'bar', 'loremipsum')
39 |
40 | conf.set('FOO', 'bar', 'loremipsum', overwrite=True)
41 | assert conf['FOO']['bar'] == 'loremipsum'
42 |
43 | def test_remove(self, tmpdir):
44 | conf = ConfigHandler()
45 |
46 | with pytest.raises(AttributeError):
47 | conf.remove_option('SNAP', 'kex')
48 |
49 | with pytest.raises(AttributeError):
50 | conf.remove_option('SNApP', 'etc')
51 |
52 | conf.remove_option('FOO', 'bar')
53 | assert list(conf['FOO'].keys()) == []
54 |
55 | conf.remove_section('FOO')
56 |
--------------------------------------------------------------------------------
/tests/test_drivers.py:
--------------------------------------------------------------------------------
1 | import pyroSAR
2 |
3 | import pytest
4 | import platform
5 | import tarfile as tf
6 | import os
7 | from datetime import datetime
8 | from spatialist import Vector
9 | from sqlalchemy import Table, MetaData, Column, Integer, String
10 | from geoalchemy2 import Geometry
11 | from shapely import wkt
12 |
13 | metadata = MetaData()
14 |
15 | mytable = Table('mytable', metadata,
16 | Column('mytable_id', Integer, primary_key=True),
17 | Column('value', String(50)),
18 | Column('shape', Geometry('POLYGON', management=True, srid=4326)))
19 |
20 |
21 | @pytest.fixture()
22 | def testcases():
23 | cases = {
24 | 's1': {
25 | 'acquisition_mode': 'IW',
26 | 'bbox_area': 7.573045244595988,
27 | 'compression': 'zip',
28 | 'corners': {'ymax': 52.183979, 'ymin': 50.295261, 'xmin': 8.017178, 'xmax': 12.0268},
29 | 'hgt_len': 15,
30 | 'lines': 16685,
31 | 'orbit': 'A',
32 | 'outname': 'S1A__IW___A_20150222T170750',
33 | 'polarizations': ['VV', 'VH'],
34 | 'product': 'GRD',
35 | 'samples': 25368,
36 | 'sensor': 'S1A',
37 | 'spacing': (10.0, 9.998647),
38 | 'start': '20150222T170750',
39 | 'stop': '20150222T170815'
40 | },
41 | 'psr2': {
42 | 'acquisition_mode': 'FBD',
43 | 'compression': 'zip',
44 | 'corners': {'xmin': -62.9005207, 'xmax': -62.1629744, 'ymin': -11.4233051, 'ymax': -10.6783401},
45 | 'hgt_len': 2,
46 | 'lines': 13160,
47 | 'orbit': 'A',
48 | 'outname': 'PSR2_FBD__A_20140909T043342',
49 | 'polarizations': ['HH', 'HV'],
50 | 'product': '1.5',
51 | 'samples': 12870,
52 | 'sensor': 'PSR2',
53 | 'spacing': (6.25, 6.25),
54 | 'start': '20140909T043342',
55 | 'stop': '20140909T043352'
56 | }
57 | }
58 | return cases
59 |
60 |
61 | @pytest.fixture
62 | def scene(testcases, testdata, request):
63 | case = testcases[request.param]
64 | case['pyro'] = pyroSAR.identify(testdata[request.param])
65 | return case
66 |
67 |
68 | class Test_Metadata():
69 | @pytest.mark.parametrize('scene', ['s1', 'psr2'], indirect=True)
70 | def test_attributes(self, scene):
71 | assert scene['pyro'].acquisition_mode == scene['acquisition_mode']
72 | assert scene['pyro'].compression == scene['compression']
73 | assert scene['pyro'].getCorners() == scene['corners']
74 | assert scene['pyro'].lines == scene['lines']
75 | assert scene['pyro'].outname_base() == scene['outname']
76 | assert scene['pyro'].orbit == scene['orbit']
77 | assert scene['pyro'].polarizations == scene['polarizations']
78 | assert scene['pyro'].product == scene['product']
79 | assert scene['pyro'].samples == scene['samples']
80 | assert scene['pyro'].start == scene['start']
81 | assert scene['pyro'].stop == scene['stop']
82 | assert scene['pyro'].sensor == scene['sensor']
83 | assert scene['pyro'].spacing == scene['spacing']
84 | assert len(scene['pyro'].getHGT()) == scene['hgt_len']
85 |
86 |
87 | def test_identify_fail(testdir, testdata):
88 | with pytest.raises(OSError):
89 | pyroSAR.identify(os.path.join(testdir, 'foobar'))
90 | with pytest.raises(RuntimeError):
91 | pyroSAR.identify(testdata['tif'])
92 |
93 |
94 | def test_identify_many_fail(testdata):
95 | assert pyroSAR.identify_many([testdata['tif']]) == []
96 |
97 |
98 | def test_filter_processed(tmpdir, testdata):
99 | scene = pyroSAR.identify(testdata['s1'])
100 | assert len(pyroSAR.filter_processed([scene], str(tmpdir))) == 1
101 |
102 |
103 | def test_parse_date():
104 | with pytest.raises(ValueError):
105 | print(pyroSAR.parse_date(1))
106 | with pytest.raises(ValueError):
107 | print(pyroSAR.parse_date('foobar'))
108 | assert pyroSAR.parse_date(datetime(2006, 11, 21)) == '20061121T000000'
109 |
110 |
111 | def test_export2dict():
112 | pass
113 |
114 |
115 | def test_getFileObj(tmpdir, testdata):
116 | scene = pyroSAR.identify(testdata['s1'])
117 | if platform.system() == 'Windows':
118 | directory = u'\\\\?\\' + str(tmpdir)
119 | else:
120 | directory = str(tmpdir)
121 | scene.unpack(directory)
122 | scene = pyroSAR.identify(scene.scene)
123 | item = scene.findfiles('manifest.safe')[0]
124 | assert os.path.basename(item) == 'manifest.safe'
125 | assert isinstance(scene.getFileObj(item).read(), (bytes, str))
126 |
127 | filename = os.path.join(str(tmpdir), os.path.basename(testdata['s1'].replace('zip', 'tar.gz')))
128 | with tf.open(filename, 'w:gz') as tar:
129 | tar.add(scene.scene, arcname=os.path.basename(scene.scene))
130 | # test error if scene is not a directory, zip or tar
131 | with pytest.raises(RuntimeError):
132 | pyroSAR.getFileObj(scene=os.path.join(scene.scene, 'manifest.safe'), filename='bar')
133 | scene = pyroSAR.identify(filename)
134 | assert scene.compression == 'tar'
135 | item = scene.findfiles('manifest.safe')[0]
136 | assert isinstance(scene.getFileObj(item).read(), (bytes, str))
137 | with pytest.raises(RuntimeError):
138 | pyroSAR.getFileObj('foo', 'bar')
139 |
140 |
141 | def test_scene(tmpdir, testdata):
142 | dbfile = os.path.join(str(tmpdir), 'scenes.db')
143 | id = pyroSAR.identify(testdata['s1'])
144 | assert isinstance(id.export2dict(), dict)
145 | with pytest.raises(RuntimeError):
146 | assert isinstance(id.gdalinfo(), dict)
147 | id.summary()
148 | id.bbox(outname=os.path.join(str(tmpdir), 'bbox_test.shp'), overwrite=True)
149 | assert id.is_processed(str(tmpdir)) is False
150 | id.unpack(str(tmpdir), overwrite=True)
151 | assert id.compression is None
152 | id.export2sqlite(dbfile)
153 | with pytest.raises(RuntimeError):
154 | id.getGammaImages()
155 | assert id.getGammaImages(id.scene) == []
156 | id = pyroSAR.identify(testdata['psr2'])
157 | assert id.getCorners() == {'xmax': -62.1629744, 'xmin': -62.9005207,
158 | 'ymax': -10.6783401, 'ymin': -11.4233051}
159 |
160 |
161 | def test_archive(tmpdir, testdata):
162 | id = pyroSAR.identify(testdata['s1'])
163 | dbfile = os.path.join(str(tmpdir), 'scenes.db')
164 | db = pyroSAR.Archive(dbfile)
165 | db.insert(testdata['s1'])
166 | assert all(isinstance(x, str) for x in db.get_tablenames())
167 | assert all(isinstance(x, str) for x in db.get_colnames())
168 | assert db.is_registered(testdata['s1']) is True
169 | assert len(db.get_unique_directories()) == 1
170 | assert db.select_duplicates() == []
171 | assert db.select_duplicates(outname_base='S1A__IW___A_20150222T170750', scene='scene.zip') == []
172 | assert len(db.select(mindate='20141001T192312', maxdate='20201001T192312')) == 1
173 | assert len(db.select(polarizations=['VV'])) == 1
174 | assert len(db.select(vectorobject=id.bbox())) == 1
175 | assert len(db.select(sensor='S1A', vectorobject='foo', processdir=str(tmpdir))) == 1
176 | assert len(db.select(sensor='S1A', mindate='foo', maxdate='bar', foobar='foobar')) == 1
177 | out = db.select(vv=1, acquisition_mode=('IW', 'EW'))
178 | assert len(out) == 1
179 | assert isinstance(out[0], str)
180 |
181 | out = db.select(vv=1, return_value=['mindate', 'geometry_wkt', 'geometry_wkb'])
182 | assert len(out) == 1
183 | assert isinstance(out[0], tuple)
184 | assert out[0][0] == '20150222T170750'
185 | geom = wkt.loads('POLYGON(('
186 | '8.505644 50.295261, 12.0268 50.688881, '
187 | '11.653832 52.183979, 8.017178 51.788181, '
188 | '8.505644 50.295261))')
189 | assert wkt.loads(out[0][1]) == geom
190 | assert out[0][2] == geom.wkb
191 |
192 | with pytest.raises(ValueError):
193 | out = db.select(vv=1, return_value=['foobar'])
194 |
195 | db.insert(testdata['s1_3'])
196 | db.insert(testdata['s1_4'])
197 | db.drop_element(testdata['s1_3'])
198 | assert db.size == (2, 0)
199 | db.drop_element(testdata['s1_4'])
200 |
201 | db.add_tables(mytable)
202 | assert 'mytable' in db.get_tablenames()
203 | with pytest.raises(TypeError):
204 | db.filter_scenelist([1])
205 | db.close()
206 |
207 |
208 | def test_archive2(tmpdir, testdata):
209 | dbfile = os.path.join(str(tmpdir), 'scenes.db')
210 | with pyroSAR.Archive(dbfile) as db:
211 | db.insert(testdata['s1'])
212 | assert db.size == (1, 0)
213 | shp = os.path.join(str(tmpdir), 'db.shp')
214 | db.export2shp(shp)
215 |
216 | os.remove(dbfile)
217 | assert not os.path.isfile(dbfile)
218 | assert Vector(shp).nfeatures == 1
219 |
220 | with pyroSAR.Archive(dbfile) as db:
221 | with pytest.raises(OSError):
222 | db.import_outdated(testdata['archive_old_csv'])
223 | with pytest.raises(RuntimeError):
224 | db.import_outdated('foobar')
225 |
226 | # the archive_old_bbox database contains a relative file name for the scene
227 | # so that it can be reimported into the new database. The working directory
228 | # is changed temporarily so that the scene can be found.
229 | cwd = os.getcwd()
230 | folder = os.path.dirname(os.path.realpath(__file__))
231 | os.chdir(os.path.join(folder, 'data'))
232 | with pyroSAR.Archive(dbfile) as db:
233 | with pyroSAR.Archive(testdata['archive_old_bbox'], legacy=True) as db_old:
234 | db.import_outdated(db_old)
235 | os.chdir(cwd)
236 |
237 | with pytest.raises(RuntimeError):
238 | db = pyroSAR.Archive(testdata['archive_old_csv'])
239 | with pytest.raises(RuntimeError):
240 | db = pyroSAR.Archive(testdata['archive_old_bbox'])
241 |
242 |
243 | def test_archive_postgres(tmpdir, testdata):
244 | pguser = os.environ.get('PGUSER')
245 | pgpassword = os.environ.get('PGPASSWORD')
246 | pgport = os.environ.get('PGPORT')
247 | if pgport is not None:
248 | pgport = int(pgport)
249 | else:
250 | pgport = 5432
251 |
252 | id = pyroSAR.identify(testdata['s1'])
253 | db = pyroSAR.Archive('test', postgres=True, port=pgport, user=pguser, password=pgpassword)
254 | db.insert(testdata['s1'])
255 | assert all(isinstance(x, str) for x in db.get_tablenames())
256 | assert all(isinstance(x, str) for x in db.get_colnames())
257 | assert db.is_registered(testdata['s1']) is True
258 | assert len(db.get_unique_directories()) == 1
259 | assert db.select_duplicates() == []
260 | assert db.select_duplicates(outname_base='S1A__IW___A_20150222T170750', scene='scene.zip') == []
261 | assert len(db.select(mindate='20141001T192312', maxdate='20201001T192312')) == 1
262 | assert len(db.select(polarizations=['VV'])) == 1
263 | assert len(db.select(vectorobject=id.bbox())) == 1
264 | assert len(db.select(sensor='S1A', vectorobject='foo', processdir=str(tmpdir))) == 1
265 | assert len(db.select(sensor='S1A', mindate='foo', maxdate='bar', foobar='foobar')) == 1
266 | out = db.select(vv=1, acquisition_mode=('IW', 'EW'))
267 | assert len(out) == 1
268 | assert isinstance(out[0], str)
269 |
270 | out = db.select(vv=1, return_value=['scene', 'start'])
271 | assert len(out) == 1
272 | assert isinstance(out[0], tuple)
273 | assert out[0][1] == '20150222T170750'
274 |
275 | with pytest.raises(ValueError):
276 | out = db.select(vv=1, return_value=['foobar'])
277 |
278 | db.add_tables(mytable)
279 | assert 'mytable' in db.get_tablenames()
280 | with pytest.raises(TypeError):
281 | db.filter_scenelist([1])
282 | db.close()
283 | with pyroSAR.Archive('test', postgres=True, port=pgport,
284 | user=pguser, password=pgpassword) as db:
285 | assert db.size == (1, 0)
286 | shp = os.path.join(str(tmpdir), 'db.shp')
287 | db.export2shp(shp)
288 | pyroSAR.drop_archive(db)
289 | assert Vector(shp).nfeatures == 1
290 |
291 | with pyroSAR.Archive('test', postgres=True, port=pgport,
292 | user=pguser, password=pgpassword) as db:
293 | with pytest.raises(OSError):
294 | db.import_outdated(testdata['archive_old_csv'])
295 | pyroSAR.drop_archive(db)
296 |
297 | # the archive_old_bbox database contains a relative file name for the scene
298 | # so that it can be reimported into the new database. The working directory
299 | # is changed temporarily so that the scene can be found.
300 | cwd = os.getcwd()
301 | folder = os.path.dirname(os.path.realpath(__file__))
302 | os.chdir(os.path.join(folder, 'data'))
303 | with pyroSAR.Archive('test', postgres=True, port=pgport,
304 | user=pguser, password=pgpassword) as db:
305 | with pyroSAR.Archive(testdata['archive_old_bbox'], legacy=True) as db_old:
306 | db.import_outdated(db_old)
307 | pyroSAR.drop_archive(db)
308 | os.chdir(cwd)
309 |
310 | dbfile = os.path.join(str(tmpdir), 'scenes.db')
311 | with pyroSAR.Archive('test', postgres=True, port=pgport,
312 | user=pguser, password=pgpassword) as db:
313 | with pyroSAR.Archive(dbfile, legacy=True) as db_sqlite:
314 | db.import_outdated(db_sqlite)
315 | pyroSAR.drop_archive(db)
316 |
317 | with pytest.raises(SystemExit) as pytest_wrapped_e:
318 | pyroSAR.Archive('test', postgres=True, user='hello_world', port=7080)
319 | assert pytest_wrapped_e.type == SystemExit
320 |
321 |
322 | datasets = ['asar', 'ers1_esa', 'ers1_ceos', 'psr2', 's1']
323 |
324 |
325 | @pytest.mark.parametrize('dataset', datasets)
326 | def test_geometry(testdata, dataset):
327 | scene = pyroSAR.identify(testdata[dataset])
328 | with scene.geometry() as geom:
329 | assert isinstance(geom, Vector)
330 |
331 |
332 | def test_geo_grid(tmpdir, testdata):
333 | scene = pyroSAR.identify(testdata['s1'])
334 | with scene.geo_grid() as geom:
335 | assert isinstance(geom, Vector)
336 | out = tmpdir / "geogrid.gpkg"
337 | scene.geo_grid(outname=str(out))
338 | assert out.exists()
339 |
--------------------------------------------------------------------------------
/tests/test_examine.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pytest
3 | from pyroSAR.examine import ExamineSnap, SnapProperties
4 |
5 |
6 | def test_snap_config(tmpdir, tmp_home):
7 | conf_snap = ExamineSnap()
8 | conf = SnapProperties(path=os.path.dirname(conf_snap.etc))
9 | path = os.path.join(os.path.expanduser('~'), '.snap', 'etc', 'snap.properties')
10 | assert conf.userpath_properties == path
11 | conf.userpath = tmpdir
12 | assert conf.userpath == tmpdir
13 | with pytest.raises(KeyError):
14 | conf['foobar'] = tmpdir
15 | ###########################################################################
16 | # check that the type is preserved when setting values
17 | conf['snap.jai.tileCacheSize'] = 2048
18 | assert conf['snap.jai.tileCacheSize'] == 2048
19 | assert isinstance(conf['snap.jai.tileCacheSize'], int)
20 |
21 | conf = SnapProperties(path=os.path.dirname(conf_snap.etc))
22 | assert conf['snap.jai.tileCacheSize'] == 2048
23 | assert isinstance(conf['snap.jai.tileCacheSize'], int)
24 |
25 | conf['snap.jai.tileCacheSize'] = 2048.
26 | assert isinstance(conf['snap.jai.tileCacheSize'], float)
27 |
28 | conf = SnapProperties(path=os.path.dirname(conf_snap.etc))
29 | assert conf['snap.jai.tileCacheSize'] == 2048.
30 | assert isinstance(conf['snap.jai.tileCacheSize'], float)
31 |
32 | conf['snap.jai.tileCacheSize'] = None
33 | assert conf['snap.jai.tileCacheSize'] is None
34 |
35 | conf = SnapProperties(path=os.path.dirname(conf_snap.etc))
36 | assert conf['snap.jai.tileCacheSize'] is None
37 |
38 | conf['snap.jai.tileCacheSize'] = True
39 | assert conf['snap.jai.tileCacheSize'] is True
40 |
41 | conf = SnapProperties(path=os.path.dirname(conf_snap.etc))
42 | assert conf['snap.jai.tileCacheSize'] is True
43 | ###########################################################################
44 | # check that a path can correctly be written and read
45 | conf = SnapProperties(path=os.path.dirname(conf_snap.etc))
46 | conf['snap.userdir'] = str(tmpdir / '.snap')
47 |
48 | conf = SnapProperties(path=os.path.dirname(conf_snap.etc))
49 | assert conf['snap.userdir'] == str(tmpdir / '.snap')
50 |
--------------------------------------------------------------------------------
/tests/test_gamma.py:
--------------------------------------------------------------------------------
1 | import os
2 | import pytest
3 | from pyroSAR.gamma import ISPPar, par2hdr, Namespace, slc_corners, api
4 |
5 |
6 | def test_par(testdata, tmpdir):
7 | with ISPPar(testdata['dempar']) as par:
8 | envi = par.envidict()
9 | assert envi['map_info'] == ['UTM', '1.0000', '1.0000', 515353.565, 5235168.873, '20.0', '20.0',
10 | 32, 'North', 'WGS-84', 'units=Meters']
11 | assert envi['lines'] == 6455
12 | assert envi['samples'] == 5927
13 | assert envi['interleave'] == 'bsq'
14 | assert envi['bands'] == 1
15 | assert envi['byte_order'] == 1
16 | assert envi['data_type'] == 4
17 | assert envi['file_type'] == 'ENVI Standard'
18 | hdrfile = os.path.join(str(tmpdir), 'dem.hdr')
19 | par2hdr(testdata['dempar'], hdrfile=hdrfile, modifications={'band_names': ['band1']}, nodata=0)
20 | assert os.path.isfile(hdrfile)
21 | with ISPPar(testdata['mlipar']) as par:
22 | assert par.date == '2014-11-15T18:18:1.309100'
23 | assert par.envidict()['acquisition_time'] == '2014-11-15T18:18:1.309100Z'
24 | print(par)
25 |
26 |
27 | def test_namespace():
28 | n = Namespace(directory='/test', basename='S1A__IW___A_20180829T170656')
29 | n.appreciate(['inc_geo', 'ls_map'])
30 | assert n.isregistered('inc_geo')
31 | assert n.isfile('inc_geo') is False
32 | assert n.isappreciated('inc_geo') is True
33 | exp1 = os.path.join('/test', 'S1A__IW___A_20180829T170656_inc_geo')
34 | exp2 = os.path.join('/test', 'S1A__IW___A_20180829T170656_ls_map')
35 | assert n['inc_geo'] == exp1
36 | assert n.get('ls_map') == exp2
37 | n.depreciate(['inc_geo'])
38 | assert n.isappreciated('inc_geo') is False
39 | assert n['inc_geo'] == '-'
40 | assert n.getall() == {'inc_geo': '-', 'ls_map': exp2}
41 | assert n.select(['inc_geo', 'ls_map']) == ['-', exp2]
42 | n.depreciate(['dem_seg'])
43 | assert n['dem_seg'] == '-'
44 |
45 |
46 | @pytest.mark.skipif('isp' not in dir(api), reason='requires GAMMA installation with module ISP')
47 | def test_slc_corners(testdata):
48 | print(testdata['dempar'])
49 | pts = slc_corners(testdata['mlipar'])
50 | assert pts == {'ymin': 36.20859758,
51 | 'ymax': 38.11058293,
52 | 'xmin': -6.59346425,
53 | 'xmax': -3.42811204}
54 |
--------------------------------------------------------------------------------
/tests/test_gamma_args.py:
--------------------------------------------------------------------------------
1 | import pytest
2 | from pyroSAR.ancillary import getargs
3 | from pyroSAR.gamma import api
4 |
5 |
6 | @pytest.mark.skipif('diff' not in dir(api), reason='requires GAMMA installation with module DIFF')
7 | def test_args_diff():
8 | from pyroSAR.gamma.api import diff
9 | assert getargs(diff.gc_map) == ['DEM', 'DEM_par', 'DEM_seg', 'DEM_seg_par', 'MLI_par', 'OFF_par', 'frame',
10 | 'inc', 'lat_ovr', 'logpath', 'lon_ovr', 'lookup_table', 'ls_map', 'ls_mode',
11 | 'outdir', 'pix', 'psi', 'r_ovr', 'shellscript', 'sim_sar', 'u', 'v']
12 | assert getargs(diff.gc_map_grd) == ['DEM', 'DEM_par', 'DEM_seg', 'DEM_seg_par', 'GRD_par', 'frame', 'inc',
13 | 'lat_ovr', 'logpath', 'lon_ovr', 'lookup_table', 'ls_map', 'ls_mode', 'outdir',
14 | 'pix', 'psi', 'r_ovr', 'shellscript', 'sim_sar', 'u', 'v']
15 |
16 | args = getargs(diff.geocode_back)
17 | args_ref = ['data_in', 'data_out', 'dtype', 'interp_mode', 'logpath', 'lookup_table',
18 | 'lr_in', 'lr_out', 'nlines_out', 'order', 'outdir', 'shellscript',
19 | 'width_in', 'width_out']
20 | comp = [x in args for x in args_ref]
21 | assert sum(comp) == len(args_ref)
22 |
23 | assert getargs(diff.par_EORC_PALSAR_geo) == ['CEOS_data', 'CEOS_leader', 'DEM_par', 'MLI',
24 | 'MLI_par', 'cal', 'logpath', 'outdir', 'shellscript']
25 | assert getargs(diff.par_TX_geo) == ['DEM_par', 'GEO', 'GeoTIFF', 'MLI_par', 'annotation_XML',
26 | 'logpath', 'outdir', 'pol', 'shellscript']
27 |
28 | args = getargs(diff.pixel_area)
29 | args_ref = ['DEM', 'DEM_par', 'MLI_par', 'area_fact', 'inc_map', 'logpath', 'lookup_table',
30 | 'ls_map', 'nstep', 'outdir', 'pix_gamma0', 'pix_sigma0', 'shellscript']
31 | comp = [x in args for x in args_ref]
32 | assert sum(comp) == len(args_ref)
33 |
34 |
35 | @pytest.mark.skipif('disp' not in dir(api), reason='requires GAMMA installation with module DISP')
36 | def test_args_disp():
37 | from pyroSAR.gamma.api import disp
38 |
39 | args = getargs(disp.data2geotiff)
40 | args_ref = ['DEM_par', 'GeoTIFF', 'data', 'logpath', 'no_data', 'outdir', 'shellscript',
41 | 'type']
42 | comp = [x in args for x in args_ref]
43 | assert sum(comp) == len(args_ref)
44 |
45 |
46 | @pytest.mark.skipif('isp' not in dir(api), reason='requires GAMMA installation with module ISP')
47 | def test_args_isp():
48 | from pyroSAR.gamma.api import isp
49 | assert getargs(isp.multi_look) == ['MLI', 'MLI_par', 'SLC', 'SLC_par', 'azlks', 'exp', 'loff', 'logpath',
50 | 'nlines', 'outdir', 'rlks', 'scale', 'shellscript']
51 |
52 | args = getargs(isp.multi_look_MLI)
53 | args_ref = ['MLI_in', 'MLI_in_par', 'MLI_out', 'MLI_out_par', 'azlks', 'loff',
54 | 'logpath', 'nlines', 'outdir', 'rlks', 'scale', 'shellscript']
55 | comp = [x in args for x in args_ref]
56 | assert sum(comp) == len(args_ref)
57 |
58 | assert getargs(isp.par_ASAR) == ['ASAR_ERS_file', 'K_dB', 'logpath', 'outdir', 'output_name', 'shellscript']
59 | assert getargs(isp.par_EORC_PALSAR) == ['CEOS_data', 'CEOS_leader', 'SLC', 'SLC_par', 'dtype',
60 | 'logpath', 'outdir', 'sc_dB', 'shellscript']
61 | assert getargs(isp.par_ESA_ERS) == ['CEOS_DAT', 'CEOS_SAR_leader', 'SLC', 'SLC_par', 'inlist',
62 | 'logpath', 'outdir', 'shellscript']
63 |
64 | args = getargs(isp.par_S1_GRD)
65 | args_ref = ['GRD', 'GRD_par', 'GeoTIFF', 'MLI', 'MLI_par', 'annotation_XML',
66 | 'calibration_XML', 'eflg', 'logpath', 'noise_XML', 'noise_pwr',
67 | 'outdir', 'rps', 'shellscript']
68 | comp = [x in args for x in args_ref]
69 | assert sum(comp) == len(args_ref)
70 |
71 | assert getargs(isp.par_S1_SLC) == ['GeoTIFF', 'SLC', 'SLC_par', 'TOPS_par', 'annotation_XML', 'calibration_XML',
72 | 'dtype', 'logpath', 'noise_XML', 'noise_pwr', 'outdir', 'sc_dB', 'shellscript']
73 | assert getargs(isp.par_TX_GRD) == ['GRD', 'GRD_par', 'GeoTIFF', 'annotation_XML', 'logpath',
74 | 'outdir', 'pol', 'shellscript']
75 | assert getargs(isp.par_TX_SLC) == ['COSAR', 'SLC', 'SLC_par', 'annotation_XML', 'dtype',
76 | 'logpath', 'outdir', 'pol', 'shellscript']
77 | assert getargs(isp.radcal_MLI) == ['CMLI', 'K_dB', 'MLI', 'MLI_par', 'OFF_par', 'ant_flag', 'antenna', 'logpath',
78 | 'outdir', 'pix_area', 'refarea_flag', 'rloss_flag', 'sc_dB', 'shellscript']
79 | assert getargs(isp.radcal_PRI) == ['GRD', 'GRD_par', 'K_dB', 'PRI', 'PRI_par',
80 | 'inc_ref', 'loff', 'logpath', 'nl', 'nr',
81 | 'outdir', 'roff', 'shellscript']
82 | assert getargs(isp.radcal_SLC) == ['CSLC', 'CSLC_par', 'K_dB', 'SLC', 'SLC_par',
83 | 'ant_flag', 'antenna', 'fcase', 'logpath', 'outdir',
84 | 'pix_area', 'refarea_flag', 'rloss_flag', 'sc_dB', 'shellscript']
85 | assert getargs(isp.S1_OPOD_vec) == ['OPOD', 'SLC_par', 'logpath', 'nstate', 'outdir', 'shellscript']
86 |
87 | args = getargs(isp.SLC_deramp_ScanSAR)
88 | args_ref = ['SLC1_tab', 'SLC2_tab', 'logpath', 'mode', 'outdir',
89 | 'phflg', 'shellscript']
90 | comp = [x in args for x in args_ref]
91 | assert sum(comp) == len(args_ref)
92 |
93 | args_ref = ['SLC', 'SLCR_tab', 'SLC_par', 'SLC_tab', 'azlks', 'logpath',
94 | 'outdir', 'rlks', 'shellscript', 'bflg']
95 | args = getargs(isp.SLC_mosaic_S1_TOPS)
96 | comp = [x in args for x in args_ref]
97 | assert sum(comp) == len(args_ref)
98 |
99 |
100 | @pytest.mark.skipif('lat' not in dir(api), reason='requires GAMMA installation with module LAT')
101 | def test_args_lat():
102 | from pyroSAR.gamma.api import lat
103 | assert getargs(lat.linear_to_dB) == ['data_in', 'data_out', 'inverse_flag', 'logpath', 'null_value', 'outdir',
104 | 'shellscript', 'width']
105 | assert getargs(lat.product) == ['bx', 'by', 'data_1', 'data_2', 'logpath', 'outdir', 'product',
106 | 'shellscript', 'wgt_flag', 'width']
107 | assert getargs(lat.ratio) == ['bx', 'by', 'd1', 'd2', 'logpath', 'outdir', 'ratio',
108 | 'shellscript', 'wgt_flag', 'width']
109 |
110 | args = getargs(lat.sigma2gamma)
111 | args_ref = ['gamma0', 'inc', 'logpath', 'outdir', 'sigma0', 'shellscript', 'width']
112 | comp = [x in args for x in args_ref]
113 | assert sum(comp) == len(args_ref)
114 |
--------------------------------------------------------------------------------
/tests/test_license.py:
--------------------------------------------------------------------------------
1 | import os
2 | import re
3 | from datetime import datetime
4 |
5 | def test_license_year():
6 | dir_current = os.path.dirname(os.path.abspath(__file__))
7 | license = os.path.join(dir_current, '..', 'LICENSE.txt')
8 | assert os.path.isfile(license)
9 | with open(license, 'r') as f:
10 | content = f.read()
11 | start, end = re.search('([0-9]{4})-([0-9]{4})', content).groups()
12 | year_current = datetime.now().year
13 | assert int(start) == 2014
14 | assert int(end) == year_current
15 |
--------------------------------------------------------------------------------
/tests/test_osv.py:
--------------------------------------------------------------------------------
1 | import os
2 | import time
3 | import pytest
4 | from pyroSAR import identify
5 | from pyroSAR.S1 import OSV
6 | from datetime import datetime, timedelta
7 |
8 |
9 | def test_osv_cleanres(tmpdir):
10 | with OSV(str(tmpdir)) as osv:
11 | assert osv.getLocals('POE') == []
12 | assert osv.getLocals('RES') == []
13 | now = (datetime.now() - timedelta(hours=10)).strftime('%Y%m%dT%H%M%S')
14 | res = osv.catch(sensor='S1A', osvtype='RES', start=now)
15 | nfiles = len(res)
16 | osv.retrieve(res)
17 | osv.clean_res()
18 | assert len(osv.getLocals('RES')) == nfiles
19 |
20 |
21 | def test_scene_osv(tmpdir, testdata):
22 | id = identify(testdata['s1_orbit'])
23 | osvdir = os.path.join(str(tmpdir), 'osv')
24 | id.getOSV(osvdir)
25 | with OSV(osvdir) as osv:
26 | with pytest.raises(RuntimeError):
27 | osv.catch(sensor='S1A', osvtype='XYZ')
28 | res = osv.catch(sensor='S1A', osvtype='RES', start=osv.mindate('POE'), stop=osv.maxdate('POE'))
29 | assert len(res) == 0
30 |
31 | assert len(osv.getLocals('POE')) == 1
32 | assert len(osv.getLocals('RES')) == 0
33 | assert osv.match(sensor=id.sensor, timestamp=id.start, osvtype='POE') is not None
34 | assert osv.match(sensor=id.sensor, timestamp=id.start, osvtype=['POE', 'RES']) is not None
35 | assert osv.match(sensor=id.sensor, timestamp=id.start, osvtype='RES') is None
36 | for item in osv.getLocals('POE')[1:3]:
37 | os.remove(item)
38 | assert len(osv.getLocals('POE')) == 1
39 | res = osv.catch(sensor='S1A', osvtype='RES', start='20210201T00000', stop='20210201T150000', url_option=1)
40 | assert len(res) == 11
41 | osv.retrieve(res[0:3])
42 | assert len(osv.getLocals('RES')) == 3
43 | # check retrieving files for the current day (e.g. to ensure that search is not extended to the future)
44 | poe = osv.catch(sensor='S1A', osvtype='POE', start=time.strftime('%Y%m%dT%H%M%S'))
45 | assert len(poe) == 0
46 | # check retrieving files whose start is in the previous month of the search start
47 | poe = osv.catch(sensor='S1A', osvtype='POE', start='20220201T163644', stop='20220201T163709')
48 | assert len(poe) == 1
49 |
--------------------------------------------------------------------------------
/tests/test_snap.py:
--------------------------------------------------------------------------------
1 | #####################################################################
2 | # Module for testing the functionality of the SNAP processing module
3 | #####################################################################
4 | import os
5 | import pytest
6 | from pyroSAR import identify
7 | from pyroSAR.snap import geocode
8 | from spatialist import bbox
9 | from spatialist.ancillary import finder
10 | from pyroSAR.snap.auxil import is_consistent, split, groupbyWorkers, Workflow, parse_recipe
11 | from pyroSAR.examine import ExamineSnap
12 |
13 |
14 | def test_installation():
15 | reg = ExamineSnap()
16 | assert os.path.isfile(reg.gpt)
17 |
18 |
19 | def test_consistency():
20 | with parse_recipe('base') as wf:
21 | assert is_consistent(wf)
22 |
23 |
24 | def test_geocode(tmpdir, testdata):
25 | scene = testdata['s1']
26 | geocode(scene, str(tmpdir), test=True)
27 | xmlfile = finder(str(tmpdir), ['*.xml'])[0]
28 | tree = Workflow(xmlfile)
29 | assert is_consistent(tree) is True
30 | groups = groupbyWorkers(xmlfile, 2)
31 | assert len(groups) == 4
32 | groups2 = groupbyWorkers(xmlfile, 100)
33 | assert len(groups2) == 1
34 | split(xmlfile, groups)
35 | id = identify(scene)
36 | basename = '{}_{}'.format(id.outname_base(), tree.suffix())
37 | procdir = os.path.join(str(tmpdir), basename)
38 | assert os.path.isdir(procdir)
39 | tempdir = os.path.join(procdir, 'tmp')
40 | assert os.path.isdir(tempdir)
41 | parts = finder(tempdir, ['*.xml'])
42 | assert len(parts) == 4
43 |
44 |
45 | class Test_geocode_opts():
46 | def test_infile_type(self, tmpdir, testdata):
47 | scene = testdata['s1']
48 | with pytest.raises(TypeError):
49 | geocode(infile=123, outdir=str(tmpdir), test=True)
50 | id = identify(scene)
51 | geocode(infile=id, outdir=str(tmpdir), test=True)
52 |
53 | def test_pol(self, tmpdir, testdata):
54 | scene = testdata['s1']
55 | with pytest.raises(RuntimeError):
56 | geocode(scene, str(tmpdir), polarizations=1, test=True)
57 | with pytest.raises(RuntimeError):
58 | geocode(scene, str(tmpdir), polarizations='foobar', test=True)
59 | geocode(scene, str(tmpdir), polarizations='VV', test=True)
60 |
61 | def test_pol_list(self, tmpdir, testdata):
62 | scene = testdata['s1']
63 | geocode(scene, str(tmpdir), polarizations=['VV', 'VH'], test=True)
64 |
65 | def test_geotype(self, tmpdir, testdata):
66 | scene = testdata['s1']
67 | with pytest.raises(RuntimeError):
68 | geocode(scene, str(tmpdir), geocoding_type='foobar', test=True)
69 | geocode(scene, str(tmpdir), test=True,
70 | geocoding_type='SAR simulation cross correlation')
71 |
72 | def test_srs(self, tmpdir, testdata):
73 | scene = testdata['s1']
74 | with pytest.raises(RuntimeError):
75 | geocode(scene, str(tmpdir), t_srs='foobar', test=True)
76 | geocode(scene, str(tmpdir), t_srs=32632, test=True)
77 |
78 | def test_scaling(self, tmpdir, testdata):
79 | scene = testdata['s1']
80 | with pytest.raises(RuntimeError):
81 | geocode(scene, str(tmpdir), scaling='foobar', test=True)
82 |
83 | def test_shp(self, tmpdir, testdata):
84 | scene = testdata['s1']
85 | ext = {'xmin': 12, 'xmax': 13, 'ymin': 53, 'ymax': 54}
86 | with bbox(ext, 4326) as new:
87 | with pytest.raises(RuntimeError):
88 | geocode(scene, str(tmpdir), shapefile=new, test=True)
89 |
90 | with identify(scene).bbox() as box:
91 | ext = box.extent
92 | ext['xmax'] -= 1
93 | with bbox(ext, 4326) as new:
94 | geocode(scene, str(tmpdir), shapefile=new, test=True)
95 |
96 | def test_offset(self, tmpdir, testdata):
97 | scene = testdata['s1']
98 | geocode(scene, str(tmpdir), offset=(100, 100, 0, 0), test=True)
99 |
100 | def test_export_extra(self, tmpdir, testdata):
101 | scene = testdata['s1']
102 | with pytest.raises(RuntimeError):
103 | geocode(scene, str(tmpdir), test=True,
104 | export_extra=['foobar'])
105 | geocode(scene, str(tmpdir), test=True,
106 | export_extra=['localIncidenceAngle'])
107 |
108 | def test_externalDEM(self, tmpdir, testdata):
109 | scene = testdata['s1']
110 | dem_dummy = testdata['tif']
111 | with pytest.raises(RuntimeError):
112 | geocode(scene, str(tmpdir), externalDEMFile='foobar', test=True)
113 | geocode(scene, str(tmpdir), externalDEMFile=dem_dummy, test=True)
114 |
115 | def test_speckleFilter(self, tmpdir, testdata):
116 | scene = testdata['s1']
117 | with pytest.raises(ValueError):
118 | geocode(scene, str(tmpdir), speckleFilter='foobar', test=True)
119 | geocode(scene, str(tmpdir), speckleFilter='Refined Lee', test=True)
120 |
121 | def test_refarea(self, tmpdir, testdata):
122 | scene = testdata['s1']
123 | with pytest.raises(ValueError):
124 | geocode(scene, str(tmpdir), terrainFlattening=False, refarea='foobar', test=True)
125 | geocode(scene, str(tmpdir), terrainFlattening=True, refarea='gamma0', test=True)
126 |
127 | def test_sliceassembly(self, tmpdir, testdata):
128 | scene1 = testdata['s1']
129 | scene2 = testdata['s1_2']
130 | wf = geocode([scene1, scene2], str(tmpdir), test=True, returnWF=True)
131 | for n in range(1, 4):
132 | groups = groupbyWorkers(wf, n=n)
133 | split(wf, groups)
134 |
--------------------------------------------------------------------------------
/tests/test_snap_exe.py:
--------------------------------------------------------------------------------
1 | from contextlib import contextmanager
2 |
3 |
4 | @contextmanager
5 | def not_raises(ExpectedException):
6 | try:
7 | yield
8 |
9 | except ExpectedException:
10 | raise AssertionError(
11 | "Did raise exception {0} when it should not!".format(
12 | repr(ExpectedException)
13 | )
14 | )
15 |
16 | except Exception:
17 | raise AssertionError(
18 | "An unexpected exception {0} raised.".format(repr(Exception))
19 | )
20 |
--------------------------------------------------------------------------------
/tests/test_xml_util.py:
--------------------------------------------------------------------------------
1 |
2 | import os
3 | import pytest
4 | from pyroSAR import SAFE
5 | from pyroSAR.xml_util import XMLHandler
6 |
7 |
8 | def test_handler(tmpdir, testdata):
9 | id = SAFE(testdata['s1'])
10 | id.unpack(str(tmpdir))
11 | testfile = os.path.join(id.scene, 'manifest.safe')
12 | xml = XMLHandler(testfile)
13 | xml.restoreNamespaces()
14 | xml.write(os.path.join(str(tmpdir), 'test.xml'), 'w')
15 | with pytest.raises(RuntimeError):
16 | xml = XMLHandler(1)
17 | with pytest.raises(RuntimeError):
18 | xml = XMLHandler('foobar')
19 | with open(testfile, 'r') as infile:
20 | xml = XMLHandler(infile)
21 |
--------------------------------------------------------------------------------