├── .circleci └── config.yml ├── .github ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ └── feature_request.yml ├── environment.yml ├── lockfile.lock └── workflows │ └── test.yml ├── .gitignore ├── .pre-commit-config.yaml ├── Docker ├── Dockerfile ├── environment.yml ├── lockfile.lock └── requirements.txt ├── LICENSE ├── README.md ├── app ├── rtc_compare.py ├── rtc_s1.py └── rtc_s1_single_job.py ├── build_docker_image.sh ├── setup.py ├── src └── rtc │ ├── __init__.py │ ├── core.py │ ├── defaults │ ├── rtc_s1.yaml │ └── rtc_s1_static.yaml │ ├── extern │ ├── __init__.py │ └── validate_cloud_optimized_geotiff.py │ ├── geogrid.py │ ├── h5_prep.py │ ├── helpers.py │ ├── mosaic_geobursts.py │ ├── radar_grid.py │ ├── rtc_s1.py │ ├── rtc_s1_single_job.py │ ├── runconfig.py │ ├── schemas │ └── rtc_s1.yaml │ ├── version.py │ └── wrap_namespace.py └── tests ├── runconfigs ├── s1b_los_angeles_mask_off.yaml ├── s1b_los_angeles_mask_off_h5.yaml ├── s1b_los_angeles_mask_on.yaml └── s1b_los_angeles_mask_on_h5.yaml └── test_rtc_s1_workflow.py /.circleci/config.yml: -------------------------------------------------------------------------------- 1 | version: 2.1 2 | 3 | jobs: 4 | build: 5 | machine: 6 | image: ubuntu-2004:current 7 | resource_class: medium 8 | steps: 9 | - checkout 10 | - run: 11 | name: "Build RTC docker image" 12 | command: | 13 | docker build . -f docker/Dockerfile 14 | 15 | workflows: 16 | build-workflow: 17 | jobs: 18 | - build 19 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: Bug Report 2 | description: Create a report to help us improve 3 | title: "[Bug]: " 4 | labels: ["bug", "needs triage"] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | > _Thanks for filing a bug ticket. We appreciate your time and effort. Please answer a few questions._ 10 | - type: dropdown 11 | id: checked-for-duplicates 12 | attributes: 13 | label: Checked for duplicates 14 | description: Have you checked for duplicate issue tickets? 15 | multiple: false 16 | options: 17 | - "Yes - I've already checked" 18 | - "No - I haven't checked" 19 | validations: 20 | required: yes 21 | - type: textarea 22 | id: description 23 | attributes: 24 | label: Describe the bug 25 | description: A clear and concise description of what the bug is. Plain-text snippets preferred but screenshots welcome. 26 | placeholder: Tell us what you saw 27 | value: "When I did [...] action, I noticed [...]" 28 | validations: 29 | required: true 30 | - type: textarea 31 | id: expected-behavior 32 | attributes: 33 | label: What did you expect? 34 | description: A clear and concise description of what you expect to happen 35 | placeholder: Tell us what you expected 36 | value: "I expected [...]" 37 | validations: 38 | required: true 39 | - type: textarea 40 | id: reproduction 41 | attributes: 42 | label: Reproducible steps 43 | description: "How would we reproduce this bug? Please walk us through it step by step. Plain-text snippets preferred but screenshots welcome." 44 | value: | 45 | 1. 46 | 2. 47 | 3. 48 | ... 49 | render: bash 50 | - type: textarea 51 | id: environment 52 | attributes: 53 | label: Environment 54 | description: "What is your environment?" 55 | value: | 56 | - Version of this software [e.g. vX.Y.Z] 57 | - Operating System: [e.g. MacOSX with Docker Desktop vX.Y] 58 | ... 59 | render: bash 60 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: New Feature 2 | description: Submit a new feature request 3 | title: "[New Feature]: " 4 | labels: ["enhancement", "needs triage"] 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: | 9 | > _Thanks for filing a new feature request. We appreciate your time and effort. Please answer a few questions._ 10 | - type: dropdown 11 | id: checked-for-duplicates 12 | attributes: 13 | label: Checked for duplicates 14 | description: Have you checked for duplicate issue tickets? 15 | multiple: false 16 | options: 17 | - "Yes - I've already checked" 18 | - "No - I haven't checked" 19 | validations: 20 | required: yes 21 | - type: dropdown 22 | id: checked-alternatives 23 | attributes: 24 | label: Alternatives considered 25 | description: Have you considered alternative solutions to your feature request? 26 | options: 27 | - "Yes - and alternatives don't suffice" 28 | - "No - I haven't considered" 29 | validations: 30 | required: yes 31 | - type: textarea 32 | id: related-problems 33 | attributes: 34 | label: Related problems 35 | description: Is your feature request related to any problems? Please help us understand if so, including linking to any other issue tickets. 36 | placeholder: Tell us the problems 37 | value: "I'm frustrated when [...] happens as documented in issue-XYZ" 38 | validations: 39 | required: false 40 | - type: textarea 41 | id: description 42 | attributes: 43 | label: Describe the feature request 44 | description: A clear and concise description of your request. 45 | placeholder: Tell us what you want 46 | value: "I need or want [...]" 47 | validations: 48 | required: true 49 | -------------------------------------------------------------------------------- /.github/environment.yml: -------------------------------------------------------------------------------- 1 | ../Docker/environment.yml -------------------------------------------------------------------------------- /.github/lockfile.lock: -------------------------------------------------------------------------------- 1 | ../Docker/lockfile.lock -------------------------------------------------------------------------------- /.github/workflows/test.yml: -------------------------------------------------------------------------------- 1 | name: Tests 2 | 3 | on: [pull_request, push] 4 | 5 | concurrency: 6 | group: ${{ github.workflow }}-${{ github.head_ref }} 7 | cancel-in-progress: true 8 | 9 | jobs: 10 | install_and_test: 11 | 12 | defaults: 13 | run: 14 | shell: bash -l {0} 15 | 16 | strategy: 17 | fail-fast: false 18 | matrix: 19 | os: 20 | - label: Linux 21 | runner: ubuntu-latest 22 | env-file: 23 | - label: Lock File 24 | file: .github/lockfile.lock 25 | - label: Environment File 26 | file: .github/environment.yml 27 | 28 | name: Install And Test - ${{ matrix.os.label }} ${{ matrix.env-file.label }} 29 | runs-on: ${{ matrix.os.runner }} 30 | steps: 31 | # Typical github repo checkout step. 32 | - name: Github Repo Checkout 33 | uses: actions/checkout@v3 34 | 35 | # Install Python on the runner. 36 | - name: Provision with Python 37 | uses: actions/setup-python@v4 38 | with: 39 | python-version: '3.9' 40 | cache: 'pip' 41 | 42 | # Set the conda environment up using Mamba and install dependencies 43 | - name: Setup Environment 44 | uses: mamba-org/setup-micromamba@v2 45 | with: 46 | environment-file: ${{ matrix.env-file.file }} 47 | environment-name: RTC 48 | init-shell: bash # let the action touch ~/.bash_profile 49 | log-level: debug # shows solver problems early 50 | post-cleanup: none 51 | 52 | # Install the S1-Reader OPERA-ADT project. 53 | - name: Install S1-Reader 54 | run: | 55 | curl -sSL \ 56 | https://github.com/opera-adt/s1-reader/archive/refs/tags/v0.2.5.tar.gz \ 57 | -o s1_reader_src.tar.gz \ 58 | && tar -xvf s1_reader_src.tar.gz \ 59 | && ln -s s1-reader-0.2.5 s1-reader \ 60 | && rm s1_reader_src.tar.gz \ 61 | && python -m pip install -e ./s1-reader 62 | 63 | # Setup the project 64 | - name: Install Project 65 | run: python -m pip install . 66 | 67 | # # Test the project. 68 | - name: Test Project 69 | run: | 70 | pytest -vrpP tests/ 71 | 72 | build_docker: 73 | 74 | defaults: 75 | run: 76 | shell: bash -l {0} 77 | 78 | strategy: 79 | fail-fast: false 80 | matrix: 81 | os: 82 | - label: Linux 83 | runner: ubuntu-latest 84 | 85 | name: Docker Build Test - ${{ matrix.os.label }} 86 | runs-on: ${{ matrix.os.runner }} 87 | steps: 88 | # Typical github repo checkout step. 89 | - name: Github Repo Checkout 90 | uses: actions/checkout@v3 91 | 92 | # Install Python on the runner. 93 | - name: Provision with Python 94 | uses: actions/setup-python@v4 95 | with: 96 | python-version: '3.8' 97 | cache: 'pip' 98 | 99 | # Install the setuptools dependency for build_docker_image.sh. 100 | - name: Setup Python dependencies 101 | run: pip install setuptools 102 | 103 | # Build the image. 104 | - name: Build docker image 105 | run: | 106 | bash build_docker_image.sh 107 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | share/python-wheels/ 24 | *.egg-info/ 25 | .installed.cfg 26 | *.egg 27 | MANIFEST 28 | 29 | # PyInstaller 30 | # Usually these files are written by a python script from a template 31 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 32 | *.manifest 33 | *.spec 34 | 35 | # Installer logs 36 | pip-log.txt 37 | pip-delete-this-directory.txt 38 | 39 | # Unit test / coverage reports 40 | htmlcov/ 41 | .tox/ 42 | .nox/ 43 | .coverage 44 | .coverage.* 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | *.cover 49 | *.py,cover 50 | .hypothesis/ 51 | .pytest_cache/ 52 | cover/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | docs/_build/ 73 | 74 | # PyBuilder 75 | .pybuilder/ 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | # For a library or package, you might want to ignore these files since the code is 87 | # intended to run in multiple environments; otherwise, check them in: 88 | # .python-version 89 | 90 | # pipenv 91 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 92 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 93 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 94 | # install all needed dependencies. 95 | #Pipfile.lock 96 | 97 | # poetry 98 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. 99 | # This is especially recommended for binary packages to ensure reproducibility, and is more 100 | # commonly ignored for libraries. 101 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control 102 | #poetry.lock 103 | 104 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 105 | __pypackages__/ 106 | 107 | # Celery stuff 108 | celerybeat-schedule 109 | celerybeat.pid 110 | 111 | # SageMath parsed files 112 | *.sage.py 113 | 114 | # Environments 115 | .env 116 | .venv 117 | env/ 118 | venv/ 119 | ENV/ 120 | env.bak/ 121 | venv.bak/ 122 | 123 | # Spyder project settings 124 | .spyderproject 125 | .spyproject 126 | 127 | # Rope project settings 128 | .ropeproject 129 | 130 | # mkdocs documentation 131 | /site 132 | 133 | # mypy 134 | .mypy_cache/ 135 | .dmypy.json 136 | dmypy.json 137 | 138 | # Pyre type checker 139 | .pyre/ 140 | 141 | # pytype static type analyzer 142 | .pytype/ 143 | 144 | # Cython debug symbols 145 | cython_debug/ 146 | 147 | # PyCharm 148 | # JetBrains specific template is maintainted in a separate JetBrains.gitignore that can 149 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore 150 | # and can be added to the global gitignore or merged into this file. For a more nuclear 151 | # option (not recommended) you can uncomment the following to ignore the entire idea folder. 152 | #.idea/ 153 | 154 | 155 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | # See https://pre-commit.com for more information 2 | # See https://pre-commit.com/hooks.html for more hooks 3 | repos: 4 | - repo: https://github.com/pre-commit/pre-commit-hooks 5 | rev: v4.6.0 6 | hooks: 7 | - id: check-added-large-files 8 | -------------------------------------------------------------------------------- /Docker/Dockerfile: -------------------------------------------------------------------------------- 1 | ARG BASE_IMAGE=oraclelinux:8.8 2 | 3 | FROM ${BASE_IMAGE} 4 | 5 | # ARG is out of scope here unless re-declared 6 | ARG BASE_IMAGE 7 | 8 | LABEL author="OPERA ADT" \ 9 | description="RTC cal/val release R4" \ 10 | version="1.0.4-final" 11 | 12 | RUN yum -y update &&\ 13 | yum -y install curl &&\ 14 | adduser rtc_user 15 | 16 | RUN mkdir -p /home/rtc_user/OPERA/RTC 17 | 18 | RUN chmod -R 755 /home/rtc_user &&\ 19 | chown -R rtc_user:rtc_user /home/rtc_user/OPERA 20 | 21 | USER rtc_user 22 | 23 | ENV CONDA_PREFIX=/home/rtc_user/miniconda3 24 | 25 | # install Miniconda 26 | WORKDIR /home/rtc_user 27 | RUN curl -sSL https://repo.continuum.io/miniconda/Miniconda3-latest-Linux-x86_64.sh -o miniconda.sh &&\ 28 | bash miniconda.sh -b -p ${CONDA_PREFIX} &&\ 29 | rm $HOME/miniconda.sh 30 | 31 | ENV PATH=${CONDA_PREFIX}/bin:${PATH} 32 | RUN ${CONDA_PREFIX}/bin/conda init bash 33 | 34 | # copy RTC source code and set rtc_user as owner 35 | COPY --chown=rtc_user:rtc_user . /home/rtc_user/OPERA/RTC 36 | 37 | # create CONDA environment 38 | RUN conda create --name "RTC" --file /home/rtc_user/OPERA/RTC/Docker/lockfile.lock && conda clean -afy 39 | 40 | SHELL ["conda", "run", "-n", "RTC", "/bin/bash", "-c"] 41 | 42 | WORKDIR /home/rtc_user/OPERA 43 | 44 | # installing OPERA s1-reader 45 | RUN curl -sSL https://github.com/isce-framework/s1-reader/archive/refs/tags/v0.2.5.tar.gz -o s1_reader_src.tar.gz &&\ 46 | tar -xvf s1_reader_src.tar.gz &&\ 47 | ln -s s1-reader-0.2.5 s1-reader &&\ 48 | rm s1_reader_src.tar.gz &&\ 49 | python -m pip install ./s1-reader 50 | 51 | # installing OPERA RTC 52 | RUN python -m pip install ./RTC &&\ 53 | echo "conda activate RTC" >> /home/rtc_user/.bashrc 54 | 55 | WORKDIR /home/rtc_user/scratch 56 | 57 | ENTRYPOINT ["conda", "run", "--no-capture-output", "-n", "RTC"] 58 | -------------------------------------------------------------------------------- /Docker/environment.yml: -------------------------------------------------------------------------------- 1 | name: rtc_s1_sas_final 2 | channels: 3 | - conda-forge 4 | - nodefaults 5 | dependencies: 6 | - python>=3.9,<3.10 7 | - gdal>=3.0 8 | - s1reader>=0.2.5 9 | - numpy>=1.20 10 | - pybind11>=2.5 11 | - pyre>=1.11.2 12 | - scipy!=1.10.0 13 | - isce3==0.15.0 14 | # Workaround for the issue with `libabseil` (09/11/2023) 15 | - libabseil=20230125.3 16 | -------------------------------------------------------------------------------- /Docker/lockfile.lock: -------------------------------------------------------------------------------- 1 | # This file may be used to create an environment using: 2 | # $ conda create --name --file 3 | # platform: linux-64 4 | @EXPLICIT 5 | https://conda.anaconda.org/conda-forge/linux-64/_libgcc_mutex-0.1-conda_forge.tar.bz2 6 | https://conda.anaconda.org/conda-forge/linux-64/ca-certificates-2023.7.22-hbcca054_0.conda 7 | https://conda.anaconda.org/conda-forge/noarch/font-ttf-dejavu-sans-mono-2.37-hab24e00_0.tar.bz2 8 | https://conda.anaconda.org/conda-forge/noarch/font-ttf-inconsolata-3.000-h77eed37_0.tar.bz2 9 | https://conda.anaconda.org/conda-forge/noarch/font-ttf-source-code-pro-2.038-h77eed37_0.tar.bz2 10 | https://conda.anaconda.org/conda-forge/noarch/font-ttf-ubuntu-0.83-hab24e00_0.tar.bz2 11 | https://conda.anaconda.org/conda-forge/linux-64/ld_impl_linux-64-2.40-h41732ed_0.conda 12 | https://conda.anaconda.org/conda-forge/linux-64/libgfortran5-13.1.0-h15d22d2_0.conda 13 | https://conda.anaconda.org/conda-forge/linux-64/libstdcxx-ng-13.1.0-hfd8a6a1_0.conda 14 | https://conda.anaconda.org/conda-forge/noarch/poppler-data-0.4.12-hd8ed1ab_0.conda 15 | https://conda.anaconda.org/conda-forge/linux-64/python_abi-3.9-3_cp39.conda 16 | https://conda.anaconda.org/conda-forge/noarch/tzdata-2023c-h71feb2d_0.conda 17 | https://conda.anaconda.org/conda-forge/noarch/fonts-conda-forge-1-0.tar.bz2 18 | https://conda.anaconda.org/conda-forge/linux-64/libgfortran-ng-13.1.0-h69a702a_0.conda 19 | https://conda.anaconda.org/conda-forge/linux-64/libgomp-13.1.0-he5830b7_0.conda 20 | https://conda.anaconda.org/conda-forge/linux-64/_openmp_mutex-4.5-2_gnu.tar.bz2 21 | https://conda.anaconda.org/conda-forge/noarch/fonts-conda-ecosystem-1-0.tar.bz2 22 | https://conda.anaconda.org/conda-forge/linux-64/libgcc-ng-13.1.0-he5830b7_0.conda 23 | https://conda.anaconda.org/conda-forge/linux-64/bzip2-1.0.8-h7f98852_4.tar.bz2 24 | https://conda.anaconda.org/conda-forge/linux-64/c-ares-1.19.1-hd590300_0.conda 25 | https://conda.anaconda.org/conda-forge/linux-64/fftw-3.3.10-nompi_hc118613_108.conda 26 | https://conda.anaconda.org/conda-forge/linux-64/freexl-1.0.6-h166bdaf_1.tar.bz2 27 | https://conda.anaconda.org/conda-forge/linux-64/geos-3.12.0-h59595ed_0.conda 28 | https://conda.anaconda.org/conda-forge/linux-64/gettext-0.21.1-h27087fc_0.tar.bz2 29 | https://conda.anaconda.org/conda-forge/linux-64/giflib-5.2.1-h0b41bf4_3.conda 30 | https://conda.anaconda.org/conda-forge/linux-64/gtest-1.14.0-h00ab1b0_1.conda 31 | https://conda.anaconda.org/conda-forge/linux-64/icu-73.2-h59595ed_0.conda 32 | https://conda.anaconda.org/conda-forge/linux-64/json-c-0.17-h7ab15ed_0.conda 33 | https://conda.anaconda.org/conda-forge/linux-64/keyutils-1.6.1-h166bdaf_0.tar.bz2 34 | https://conda.anaconda.org/conda-forge/linux-64/lerc-4.0.0-h27087fc_0.tar.bz2 35 | https://conda.anaconda.org/conda-forge/linux-64/libabseil-20230125.3-cxx17_h59595ed_0.conda 36 | https://conda.anaconda.org/conda-forge/linux-64/libaec-1.0.6-hcb278e6_1.conda 37 | https://conda.anaconda.org/conda-forge/linux-64/libcrc32c-1.1.2-h9c3ff4c_0.tar.bz2 38 | https://conda.anaconda.org/conda-forge/linux-64/libdeflate-1.18-h0b41bf4_0.conda 39 | https://conda.anaconda.org/conda-forge/linux-64/libev-4.33-h516909a_1.tar.bz2 40 | https://conda.anaconda.org/conda-forge/linux-64/libexpat-2.5.0-hcb278e6_1.conda 41 | https://conda.anaconda.org/conda-forge/linux-64/libffi-3.4.2-h7f98852_5.tar.bz2 42 | https://conda.anaconda.org/conda-forge/linux-64/libiconv-1.17-h166bdaf_0.tar.bz2 43 | https://conda.anaconda.org/conda-forge/linux-64/libjpeg-turbo-2.1.5.1-h0b41bf4_0.conda 44 | https://conda.anaconda.org/conda-forge/linux-64/libnsl-2.0.0-h7f98852_0.tar.bz2 45 | https://conda.anaconda.org/conda-forge/linux-64/libopenblas-0.3.24-pthreads_h413a1c8_0.conda 46 | https://conda.anaconda.org/conda-forge/linux-64/libuuid-2.38.1-h0b41bf4_0.conda 47 | https://conda.anaconda.org/conda-forge/linux-64/libwebp-base-1.3.1-hd590300_0.conda 48 | https://conda.anaconda.org/conda-forge/linux-64/libzlib-1.2.13-hd590300_5.conda 49 | https://conda.anaconda.org/conda-forge/linux-64/lz4-c-1.9.4-hcb278e6_0.conda 50 | https://conda.anaconda.org/conda-forge/linux-64/lzo-2.10-h516909a_1000.tar.bz2 51 | https://conda.anaconda.org/conda-forge/linux-64/ncurses-6.4-hcb278e6_0.conda 52 | https://conda.anaconda.org/conda-forge/linux-64/nspr-4.35-h27087fc_0.conda 53 | https://conda.anaconda.org/conda-forge/linux-64/openssl-3.1.2-hd590300_0.conda 54 | https://conda.anaconda.org/conda-forge/linux-64/pixman-0.40.0-h36c2ea0_0.tar.bz2 55 | https://conda.anaconda.org/conda-forge/linux-64/pthread-stubs-0.4-h36c2ea0_1001.tar.bz2 56 | https://conda.anaconda.org/conda-forge/linux-64/re2-2023.03.02-h8c504da_0.conda 57 | https://conda.anaconda.org/conda-forge/linux-64/snappy-1.1.10-h9fff704_0.conda 58 | https://conda.anaconda.org/conda-forge/linux-64/tzcode-2023c-h0b41bf4_0.conda 59 | https://conda.anaconda.org/conda-forge/linux-64/xorg-kbproto-1.0.7-h7f98852_1002.tar.bz2 60 | https://conda.anaconda.org/conda-forge/linux-64/xorg-libice-1.1.1-hd590300_0.conda 61 | https://conda.anaconda.org/conda-forge/linux-64/xorg-libxau-1.0.11-hd590300_0.conda 62 | https://conda.anaconda.org/conda-forge/linux-64/xorg-libxdmcp-1.1.3-h7f98852_0.tar.bz2 63 | https://conda.anaconda.org/conda-forge/linux-64/xorg-renderproto-0.11.1-h7f98852_1002.tar.bz2 64 | https://conda.anaconda.org/conda-forge/linux-64/xorg-xextproto-7.3.0-h0b41bf4_1003.conda 65 | https://conda.anaconda.org/conda-forge/linux-64/xorg-xproto-7.0.31-h7f98852_1007.tar.bz2 66 | https://conda.anaconda.org/conda-forge/linux-64/xz-5.2.6-h166bdaf_0.tar.bz2 67 | https://conda.anaconda.org/conda-forge/linux-64/yaml-0.2.5-h7f98852_2.tar.bz2 68 | https://conda.anaconda.org/conda-forge/linux-64/expat-2.5.0-hcb278e6_1.conda 69 | https://conda.anaconda.org/conda-forge/linux-64/hdf4-4.2.15-h501b40f_6.conda 70 | https://conda.anaconda.org/conda-forge/linux-64/libblas-3.9.0-18_linux64_openblas.conda 71 | https://conda.anaconda.org/conda-forge/linux-64/libedit-3.1.20191231-he28a2e2_2.tar.bz2 72 | https://conda.anaconda.org/conda-forge/linux-64/libnghttp2-1.52.0-h61bc06f_0.conda 73 | https://conda.anaconda.org/conda-forge/linux-64/libpng-1.6.39-h753d276_0.conda 74 | https://conda.anaconda.org/conda-forge/linux-64/libprotobuf-4.23.3-hd1fb520_1.conda 75 | https://conda.anaconda.org/conda-forge/linux-64/librttopo-1.1.0-hb58d41b_14.conda 76 | https://conda.anaconda.org/conda-forge/linux-64/libsqlite-3.43.0-h2797004_0.conda 77 | https://conda.anaconda.org/conda-forge/linux-64/libssh2-1.11.0-h0841786_0.conda 78 | https://conda.anaconda.org/conda-forge/linux-64/libxcb-1.15-h0b41bf4_0.conda 79 | https://conda.anaconda.org/conda-forge/linux-64/libxml2-2.11.5-h232c23b_1.conda 80 | https://conda.anaconda.org/conda-forge/linux-64/libzip-1.10.1-h2629f0a_2.conda 81 | https://conda.anaconda.org/conda-forge/linux-64/pcre2-10.40-hc3806b6_0.tar.bz2 82 | https://conda.anaconda.org/conda-forge/linux-64/readline-8.2-h8228510_1.conda 83 | https://conda.anaconda.org/conda-forge/linux-64/tk-8.6.12-h27826a3_0.tar.bz2 84 | https://conda.anaconda.org/conda-forge/linux-64/xorg-libsm-1.2.4-h7391055_0.conda 85 | https://conda.anaconda.org/conda-forge/linux-64/zlib-1.2.13-hd590300_5.conda 86 | https://conda.anaconda.org/conda-forge/linux-64/zstd-1.5.5-hfc55251_0.conda 87 | https://conda.anaconda.org/conda-forge/linux-64/blosc-1.21.5-h0f2a231_0.conda 88 | https://conda.anaconda.org/conda-forge/linux-64/boost-cpp-1.78.0-h2c5509c_4.conda 89 | https://conda.anaconda.org/conda-forge/linux-64/freetype-2.12.1-hca18f0e_1.conda 90 | https://conda.anaconda.org/conda-forge/linux-64/krb5-1.21.2-h659d440_0.conda 91 | https://conda.anaconda.org/conda-forge/linux-64/libarchive-3.6.2-h039dbb9_1.conda 92 | https://conda.anaconda.org/conda-forge/linux-64/libcblas-3.9.0-18_linux64_openblas.conda 93 | https://conda.anaconda.org/conda-forge/linux-64/libglib-2.76.4-hebfc3b9_0.conda 94 | https://conda.anaconda.org/conda-forge/linux-64/libgrpc-1.56.2-h3905398_1.conda 95 | https://conda.anaconda.org/conda-forge/linux-64/liblapack-3.9.0-18_linux64_openblas.conda 96 | https://conda.anaconda.org/conda-forge/linux-64/libtiff-4.5.1-h8b53f26_1.conda 97 | https://conda.anaconda.org/conda-forge/linux-64/libxslt-1.1.37-h0054252_1.conda 98 | https://conda.anaconda.org/conda-forge/linux-64/nss-3.92-h1d7d5a4_0.conda 99 | https://conda.anaconda.org/conda-forge/linux-64/python-3.9.18-h0755675_0_cpython.conda 100 | https://conda.anaconda.org/conda-forge/linux-64/sqlite-3.43.0-h2c6b66d_0.conda 101 | https://conda.anaconda.org/conda-forge/linux-64/xorg-libx11-1.8.6-h8ee46fc_0.conda 102 | https://conda.anaconda.org/conda-forge/noarch/backoff-2.2.1-pyhd8ed1ab_0.tar.bz2 103 | https://conda.anaconda.org/conda-forge/linux-64/brotli-python-1.1.0-py39h3d6467e_0.conda 104 | https://conda.anaconda.org/conda-forge/noarch/cached_property-1.5.2-pyha770c72_1.tar.bz2 105 | https://conda.anaconda.org/conda-forge/noarch/certifi-2023.7.22-pyhd8ed1ab_0.conda 106 | https://conda.anaconda.org/conda-forge/noarch/charset-normalizer-3.2.0-pyhd8ed1ab_0.conda 107 | https://conda.anaconda.org/conda-forge/linux-64/fontconfig-2.14.2-h14ed4e7_0.conda 108 | https://conda.anaconda.org/conda-forge/noarch/idna-3.4-pyhd8ed1ab_0.tar.bz2 109 | https://conda.anaconda.org/conda-forge/linux-64/lcms2-2.15-haa2dc70_1.conda 110 | https://conda.anaconda.org/conda-forge/linux-64/libcurl-8.2.1-hca28451_0.conda 111 | https://conda.anaconda.org/conda-forge/linux-64/libkml-1.3.0-h37653c0_1015.tar.bz2 112 | https://conda.anaconda.org/conda-forge/linux-64/libpq-15.4-hfc447b1_0.conda 113 | https://conda.anaconda.org/conda-forge/linux-64/lxml-4.9.3-py39hed45dcc_0.conda 114 | https://conda.anaconda.org/conda-forge/linux-64/numpy-1.25.2-py39h6183b62_0.conda 115 | https://conda.anaconda.org/conda-forge/linux-64/openjpeg-2.5.0-hfec8fc6_2.conda 116 | https://conda.anaconda.org/conda-forge/noarch/packaging-23.1-pyhd8ed1ab_0.conda 117 | https://conda.anaconda.org/conda-forge/linux-64/pybind11-global-2.11.1-py39h7633fee_0.conda 118 | https://conda.anaconda.org/conda-forge/noarch/pysocks-1.7.1-pyha2e5f31_6.tar.bz2 119 | https://conda.anaconda.org/conda-forge/linux-64/pyyaml-6.0.1-py39hd1e30aa_0.conda 120 | https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml.clib-0.2.7-py39h72bdee0_1.conda 121 | https://conda.anaconda.org/conda-forge/noarch/setuptools-68.1.2-pyhd8ed1ab_0.conda 122 | https://conda.anaconda.org/conda-forge/noarch/typing_extensions-4.7.1-pyha770c72_0.conda 123 | https://conda.anaconda.org/conda-forge/noarch/wheel-0.41.2-pyhd8ed1ab_0.conda 124 | https://conda.anaconda.org/conda-forge/linux-64/xorg-libxext-1.3.4-h0b41bf4_2.conda 125 | https://conda.anaconda.org/conda-forge/linux-64/xorg-libxrender-0.9.11-hd590300_0.conda 126 | https://conda.anaconda.org/conda-forge/noarch/zipp-3.16.2-pyhd8ed1ab_0.conda 127 | https://conda.anaconda.org/conda-forge/noarch/cached-property-1.5.2-hd8ed1ab_1.tar.bz2 128 | https://conda.anaconda.org/conda-forge/linux-64/cairo-1.16.0-h0c91306_1017.conda 129 | https://conda.anaconda.org/conda-forge/linux-64/cfitsio-4.3.0-hbdc6101_0.conda 130 | https://conda.anaconda.org/conda-forge/linux-64/hdf5-1.14.2-nompi_h4f84152_100.conda 131 | https://conda.anaconda.org/conda-forge/noarch/importlib_resources-6.0.1-pyhd8ed1ab_0.conda 132 | https://conda.anaconda.org/conda-forge/linux-64/libgoogle-cloud-2.12.0-h840a212_1.conda 133 | https://conda.anaconda.org/conda-forge/noarch/pip-23.2.1-pyhd8ed1ab_0.conda 134 | https://conda.anaconda.org/conda-forge/linux-64/postgresql-15.4-h8972f4a_0.conda 135 | https://conda.anaconda.org/conda-forge/linux-64/proj-9.2.1-ha643af7_0.conda 136 | https://conda.anaconda.org/conda-forge/linux-64/pybind11-2.11.1-py39h7633fee_0.conda 137 | https://conda.anaconda.org/conda-forge/linux-64/ruamel.yaml-0.17.32-py39hd1e30aa_0.conda 138 | https://conda.anaconda.org/conda-forge/linux-64/shapely-2.0.1-py39h1bc45ef_2.conda 139 | https://conda.anaconda.org/conda-forge/noarch/typing-extensions-4.7.1-hd8ed1ab_0.conda 140 | https://conda.anaconda.org/conda-forge/noarch/urllib3-2.0.4-pyhd8ed1ab_0.conda 141 | https://conda.anaconda.org/conda-forge/linux-64/xerces-c-3.2.4-hac6953d_3.conda 142 | https://conda.anaconda.org/conda-forge/noarch/yamale-4.0.4-pyh6c4a22f_0.tar.bz2 143 | https://conda.anaconda.org/conda-forge/linux-64/geotiff-1.7.1-h22adcc9_11.conda 144 | https://conda.anaconda.org/conda-forge/linux-64/h5py-3.9.0-nompi_py39h87cadad_102.conda 145 | https://conda.anaconda.org/conda-forge/noarch/importlib-resources-6.0.1-pyhd8ed1ab_0.conda 146 | https://conda.anaconda.org/conda-forge/linux-64/kealib-1.5.1-hcd42e92_5.conda 147 | https://conda.anaconda.org/conda-forge/linux-64/libnetcdf-4.9.2-nompi_h80fb2b6_112.conda 148 | https://conda.anaconda.org/conda-forge/linux-64/libspatialite-5.0.1-h15f6e67_28.conda 149 | https://conda.anaconda.org/conda-forge/noarch/platformdirs-3.10.0-pyhd8ed1ab_0.conda 150 | https://conda.anaconda.org/conda-forge/linux-64/poppler-23.08.0-hd18248d_0.conda 151 | https://conda.anaconda.org/conda-forge/linux-64/pyre-1.12.1-py39ha0dfafb_2.conda 152 | https://conda.anaconda.org/conda-forge/noarch/requests-2.31.0-pyhd8ed1ab_0.conda 153 | https://conda.anaconda.org/conda-forge/linux-64/tiledb-2.16.3-h84d19f0_1.conda 154 | https://conda.anaconda.org/conda-forge/linux-64/libgdal-3.7.1-h880a63b_9.conda 155 | https://conda.anaconda.org/conda-forge/noarch/pooch-1.7.0-pyha770c72_3.conda 156 | https://conda.anaconda.org/conda-forge/linux-64/gdal-3.7.1-py39h41b90d8_9.conda 157 | https://conda.anaconda.org/conda-forge/linux-64/scipy-1.11.2-py39h6183b62_0.conda 158 | https://conda.anaconda.org/conda-forge/linux-64/isce3-0.15.0-py39h431996e_0.conda -------------------------------------------------------------------------------- /Docker/requirements.txt: -------------------------------------------------------------------------------- 1 | python 2 | cmake>=3.18 3 | eigen>=3.3 4 | fftw>=3.3 5 | gdal>=3.0 6 | gmock>=1.10 7 | gtest>=1.10 8 | h5py>=3.0 9 | hdf5>=1.10.2 10 | libgcc-ng 11 | libstdcxx-ng 12 | lxml 13 | numpy>=1.20 14 | pybind11>=2.5 15 | pyre>=1.11.2 16 | pytest 17 | cython 18 | ruamel.yaml 19 | scipy 20 | setuptools 21 | shapely 22 | yamale 23 | backoff 24 | isce3 25 | libnetcdf 26 | libgdal-hdf5 27 | libgdal-netcdf 28 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | Apache License 2 | Version 2.0, January 2004 3 | http://www.apache.org/licenses/ 4 | 5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 6 | 7 | 1. Definitions. 8 | 9 | "License" shall mean the terms and conditions for use, reproduction, 10 | and distribution as defined by Sections 1 through 9 of this document. 11 | 12 | "Licensor" shall mean the copyright owner or entity authorized by 13 | the copyright owner that is granting the License. 14 | 15 | "Legal Entity" shall mean the union of the acting entity and all 16 | other entities that control, are controlled by, or are under common 17 | control with that entity. For the purposes of this definition, 18 | "control" means (i) the power, direct or indirect, to cause the 19 | direction or management of such entity, whether by contract or 20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the 21 | outstanding shares, or (iii) beneficial ownership of such entity. 22 | 23 | "You" (or "Your") shall mean an individual or Legal Entity 24 | exercising permissions granted by this License. 25 | 26 | "Source" form shall mean the preferred form for making modifications, 27 | including but not limited to software source code, documentation 28 | source, and configuration files. 29 | 30 | "Object" form shall mean any form resulting from mechanical 31 | transformation or translation of a Source form, including but 32 | not limited to compiled object code, generated documentation, 33 | and conversions to other media types. 34 | 35 | "Work" shall mean the work of authorship, whether in Source or 36 | Object form, made available under the License, as indicated by a 37 | copyright notice that is included in or attached to the work 38 | (an example is provided in the Appendix below). 39 | 40 | "Derivative Works" shall mean any work, whether in Source or Object 41 | form, that is based on (or derived from) the Work and for which the 42 | editorial revisions, annotations, elaborations, or other modifications 43 | represent, as a whole, an original work of authorship. For the purposes 44 | of this License, Derivative Works shall not include works that remain 45 | separable from, or merely link (or bind by name) to the interfaces of, 46 | the Work and Derivative Works thereof. 47 | 48 | "Contribution" shall mean any work of authorship, including 49 | the original version of the Work and any modifications or additions 50 | to that Work or Derivative Works thereof, that is intentionally 51 | submitted to Licensor for inclusion in the Work by the copyright owner 52 | or by an individual or Legal Entity authorized to submit on behalf of 53 | the copyright owner. For the purposes of this definition, "submitted" 54 | means any form of electronic, verbal, or written communication sent 55 | to the Licensor or its representatives, including but not limited to 56 | communication on electronic mailing lists, source code control systems, 57 | and issue tracking systems that are managed by, or on behalf of, the 58 | Licensor for the purpose of discussing and improving the Work, but 59 | excluding communication that is conspicuously marked or otherwise 60 | designated in writing by the copyright owner as "Not a Contribution." 61 | 62 | "Contributor" shall mean Licensor and any individual or Legal Entity 63 | on behalf of whom a Contribution has been received by Licensor and 64 | subsequently incorporated within the Work. 65 | 66 | 2. Grant of Copyright License. Subject to the terms and conditions of 67 | this License, each Contributor hereby grants to You a perpetual, 68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 69 | copyright license to reproduce, prepare Derivative Works of, 70 | publicly display, publicly perform, sublicense, and distribute the 71 | Work and such Derivative Works in Source or Object form. 72 | 73 | 3. Grant of Patent License. Subject to the terms and conditions of 74 | this License, each Contributor hereby grants to You a perpetual, 75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable 76 | (except as stated in this section) patent license to make, have made, 77 | use, offer to sell, sell, import, and otherwise transfer the Work, 78 | where such license applies only to those patent claims licensable 79 | by such Contributor that are necessarily infringed by their 80 | Contribution(s) alone or by combination of their Contribution(s) 81 | with the Work to which such Contribution(s) was submitted. If You 82 | institute patent litigation against any entity (including a 83 | cross-claim or counterclaim in a lawsuit) alleging that the Work 84 | or a Contribution incorporated within the Work constitutes direct 85 | or contributory patent infringement, then any patent licenses 86 | granted to You under this License for that Work shall terminate 87 | as of the date such litigation is filed. 88 | 89 | 4. Redistribution. You may reproduce and distribute copies of the 90 | Work or Derivative Works thereof in any medium, with or without 91 | modifications, and in Source or Object form, provided that You 92 | meet the following conditions: 93 | 94 | (a) You must give any other recipients of the Work or 95 | Derivative Works a copy of this License; and 96 | 97 | (b) You must cause any modified files to carry prominent notices 98 | stating that You changed the files; and 99 | 100 | (c) You must retain, in the Source form of any Derivative Works 101 | that You distribute, all copyright, patent, trademark, and 102 | attribution notices from the Source form of the Work, 103 | excluding those notices that do not pertain to any part of 104 | the Derivative Works; and 105 | 106 | (d) If the Work includes a "NOTICE" text file as part of its 107 | distribution, then any Derivative Works that You distribute must 108 | include a readable copy of the attribution notices contained 109 | within such NOTICE file, excluding those notices that do not 110 | pertain to any part of the Derivative Works, in at least one 111 | of the following places: within a NOTICE text file distributed 112 | as part of the Derivative Works; within the Source form or 113 | documentation, if provided along with the Derivative Works; or, 114 | within a display generated by the Derivative Works, if and 115 | wherever such third-party notices normally appear. The contents 116 | of the NOTICE file are for informational purposes only and 117 | do not modify the License. You may add Your own attribution 118 | notices within Derivative Works that You distribute, alongside 119 | or as an addendum to the NOTICE text from the Work, provided 120 | that such additional attribution notices cannot be construed 121 | as modifying the License. 122 | 123 | You may add Your own copyright statement to Your modifications and 124 | may provide additional or different license terms and conditions 125 | for use, reproduction, or distribution of Your modifications, or 126 | for any such Derivative Works as a whole, provided Your use, 127 | reproduction, and distribution of the Work otherwise complies with 128 | the conditions stated in this License. 129 | 130 | 5. Submission of Contributions. Unless You explicitly state otherwise, 131 | any Contribution intentionally submitted for inclusion in the Work 132 | by You to the Licensor shall be under the terms and conditions of 133 | this License, without any additional terms or conditions. 134 | Notwithstanding the above, nothing herein shall supersede or modify 135 | the terms of any separate license agreement you may have executed 136 | with Licensor regarding such Contributions. 137 | 138 | 6. Trademarks. This License does not grant permission to use the trade 139 | names, trademarks, service marks, or product names of the Licensor, 140 | except as required for reasonable and customary use in describing the 141 | origin of the Work and reproducing the content of the NOTICE file. 142 | 143 | 7. Disclaimer of Warranty. Unless required by applicable law or 144 | agreed to in writing, Licensor provides the Work (and each 145 | Contributor provides its Contributions) on an "AS IS" BASIS, 146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or 147 | implied, including, without limitation, any warranties or conditions 148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A 149 | PARTICULAR PURPOSE. You are solely responsible for determining the 150 | appropriateness of using or redistributing the Work and assume any 151 | risks associated with Your exercise of permissions under this License. 152 | 153 | 8. Limitation of Liability. In no event and under no legal theory, 154 | whether in tort (including negligence), contract, or otherwise, 155 | unless required by applicable law (such as deliberate and grossly 156 | negligent acts) or agreed to in writing, shall any Contributor be 157 | liable to You for damages, including any direct, indirect, special, 158 | incidental, or consequential damages of any character arising as a 159 | result of this License or out of the use or inability to use the 160 | Work (including but not limited to damages for loss of goodwill, 161 | work stoppage, computer failure or malfunction, or any and all 162 | other commercial damages or losses), even if such Contributor 163 | has been advised of the possibility of such damages. 164 | 165 | 9. Accepting Warranty or Additional Liability. While redistributing 166 | the Work or Derivative Works thereof, You may choose to offer, 167 | and charge a fee for, acceptance of support, warranty, indemnity, 168 | or other liability obligations and/or rights consistent with this 169 | License. However, in accepting such obligations, You may act only 170 | on Your own behalf and on Your sole responsibility, not on behalf 171 | of any other Contributor, and only if You agree to indemnify, 172 | defend, and hold each Contributor harmless for any liability 173 | incurred by, or claims asserted against, such Contributor by reason 174 | of your accepting any such warranty or additional liability. 175 | 176 | END OF TERMS AND CONDITIONS 177 | 178 | APPENDIX: How to apply the Apache License to your work. 179 | 180 | To apply the Apache License to your work, attach the following 181 | boilerplate notice, with the fields enclosed by brackets "[]" 182 | replaced with your own identifying information. (Don't include 183 | the brackets!) The text should be enclosed in the appropriate 184 | comment syntax for the file format. We also recommend that a 185 | file or class name and description of purpose be included on the 186 | same "printed page" as the copyright notice for easier 187 | identification within third-party archives. 188 | 189 | Copyright [yyyy] [name of copyright owner] 190 | 191 | Licensed under the Apache License, Version 2.0 (the "License"); 192 | you may not use this file except in compliance with the License. 193 | You may obtain a copy of the License at 194 | 195 | http://www.apache.org/licenses/LICENSE-2.0 196 | 197 | Unless required by applicable law or agreed to in writing, software 198 | distributed under the License is distributed on an "AS IS" BASIS, 199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 200 | See the License for the specific language governing permissions and 201 | limitations under the License. 202 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # RTC 2 | NASA's Observational Products for End-Users from Remote Sensing Analysis (OPERA) Radiometric Terrain-Corrected (RTC) SAR backscatter from Sentinel-1 (RTC-S1) Science Application Software developed by the OPERA Algoritm Development Team at NASA's Jet Propulsion Laboratory (JPL). 3 | 4 | 5 | 6 | ### Install 7 | 8 | Instructions to install RTC under a conda environment. 9 | 10 | 1. Download the source code: 11 | 12 | ```bash 13 | git clone https://github.com/opera-adt/RTC.git RTC 14 | ``` 15 | 16 | 2. Install `isce3`: 17 | 18 | ```bash 19 | conda install -c conda-forge isce3 20 | ``` 21 | 22 | 3. Install `s1-reader` via pip: 23 | ```bash 24 | git clone https://github.com/opera-adt/s1-reader.git s1-reader 25 | conda install -c conda-forge --file s1-reader/requirements.txt 26 | python -m pip install ./s1-reader 27 | ``` 28 | 29 | 4. Install `RTC` via pip: 30 | ```bash 31 | git clone https://github.com/opera-adt/RTC.git RTC 32 | python -m pip install ./RTC 33 | ``` 34 | 35 | 36 | 37 | ### Usage 38 | 39 | The command below generates the RTC product: 40 | 41 | ```bash 42 | rtc_s1.py 43 | ``` 44 | 45 | To compare the RTC-S1 products, use `rtc_compare.py`. 46 | 47 | ```bash 48 | python rtc_s1.py <1st product HDF5> <2nd product HDF5> 49 | ``` 50 | 51 | # License 52 | Copyright (c) 2021 California Institute of Technology (“Caltech”). U.S. Government sponsorship acknowledged. 53 | 54 | All rights reserved. 55 | 56 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 57 | 58 | Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 59 | Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 60 | Neither the name of Caltech nor its operating division, the Jet Propulsion Laboratory, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. 61 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 62 | -------------------------------------------------------------------------------- /app/rtc_s1.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | RTC-S1 Science Application Software 5 | ''' 6 | 7 | import logging 8 | from rtc.runconfig import RunConfig, load_parameters 9 | from rtc.core import create_logger 10 | from rtc.rtc_s1_single_job import get_rtc_s1_parser 11 | from rtc.rtc_s1 import run_parallel 12 | 13 | 14 | logger = logging.getLogger('rtc_s1') 15 | 16 | 17 | def main(): 18 | ''' 19 | Main entrypoint of the script 20 | ''' 21 | parser = get_rtc_s1_parser() 22 | # parse arguments 23 | args = parser.parse_args() 24 | 25 | # Spawn multiple processes for parallelization 26 | create_logger(args.log_file, args.full_log_formatting) 27 | 28 | # Get a runconfig dict from command line argumens 29 | cfg = RunConfig.load_from_yaml(args.run_config_path) 30 | 31 | load_parameters(cfg) 32 | 33 | # Run geocode burst workflow 34 | path_logfile_parent = args.log_file 35 | flag_full_log_formatting = args.full_log_formatting 36 | run_parallel(cfg, path_logfile_parent, flag_full_log_formatting) 37 | 38 | 39 | if __name__ == "__main__": 40 | # load arguments from command line 41 | main() 42 | -------------------------------------------------------------------------------- /app/rtc_s1_single_job.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | ''' 4 | RTC-S1 Science Application Software (single job) 5 | ''' 6 | 7 | import logging 8 | from rtc.runconfig import RunConfig, load_parameters 9 | from rtc.core import create_logger 10 | from rtc.rtc_s1_single_job import get_rtc_s1_parser, run_single_job 11 | 12 | 13 | logger = logging.getLogger('rtc_s1') 14 | 15 | if __name__ == "__main__": 16 | ''' 17 | Run geocode rtc workflow from command line 18 | ''' 19 | 20 | # load arguments from command line 21 | parser = get_rtc_s1_parser() 22 | 23 | # parse arguments 24 | args = parser.parse_args() 25 | 26 | # create logger 27 | logger = create_logger(args.log_file, args.full_log_formatting) 28 | 29 | # Get a runconfig dict from command line argumens 30 | cfg = RunConfig.load_from_yaml(args.run_config_path) 31 | 32 | load_parameters(cfg) 33 | 34 | # Run geocode burst workflow 35 | run_single_job(cfg) 36 | -------------------------------------------------------------------------------- /build_docker_image.sh: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env bash 2 | 3 | REPO=opera 4 | IMAGE=rtc 5 | TAG=final_1.0.4 6 | 7 | docker_build_args=(--rm --force-rm --network host -t $REPO/$IMAGE:$TAG -f Docker/Dockerfile) 8 | 9 | if [ $# -eq 0 ]; then 10 | echo "Base image was not specified. Using the default image specified in the Dockerfile." 11 | else 12 | echo "Using $1 as the base image." 13 | docker_build_args+=(--build-arg BASE_IMAGE=$1) 14 | fi 15 | 16 | echo "IMAGE is $REPO/$IMAGE:$TAG" 17 | 18 | # fail on any non-zero exit codes 19 | set -ex 20 | 21 | # Not sure how we are using sdist in this script... 22 | python3 setup.py sdist 23 | 24 | # build image 25 | docker build "${docker_build_args[@]}" . 26 | # run tests - to be worked on when the RTC test module is in place 27 | #docker run --rm -u "$(id -u):$(id -g)" -v "$PWD:/mnt" -w /mnt -it --network host "${IMAGE}:$t" pytest /mnt/tests/ 28 | 29 | # create image tar 30 | docker save $REPO/$IMAGE:$TAG > Docker/dockerimg_rtc_${TAG}.tar 31 | 32 | # remove image 33 | docker image rm $REPO/$IMAGE:$TAG 34 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import os 2 | import re 3 | from setuptools import setup 4 | 5 | 6 | def _get_version(): 7 | """Returns the RTC-S1 science application software version from the 8 | file `src/rtc/version.py` 9 | 10 | Returns 11 | ------- 12 | version : str 13 | RTC-S1 science application software version 14 | """ 15 | 16 | version_file = os.path.join('src', 'rtc', 'version.py') 17 | 18 | with open(version_file, 'r') as f: 19 | text = f.read() 20 | 21 | # Get first match of the version number contained in the version file 22 | # This regex should match a pattern like: VERSION = '3.2.5', but it 23 | # allows for varying spaces, number of major/minor versions, 24 | # and quotation mark styles. 25 | p = re.search("VERSION[ ]*=[ ]*['\"]\d+([.]\d+)*['\"]", text) 26 | 27 | # Check that the version file contains properly formatted text string 28 | if p is None: 29 | raise ValueError( 30 | f'Version file {version_file} not properly formatted.' 31 | " It should contain text matching e.g. VERSION = '2.3.4'") 32 | 33 | # Extract just the numeric version number from the string 34 | p = re.search("\d+([.]\d+)*", p.group(0)) 35 | 36 | return p.group(0) 37 | 38 | 39 | __version__ = version = VERSION = _get_version() 40 | 41 | print(f'RTC-S1 SAS version {version}') 42 | 43 | long_description = '' 44 | 45 | package_data_dict = {} 46 | 47 | package_data_dict['rtc'] = [ 48 | os.path.join('defaults', 'rtc_s1.yaml'), 49 | os.path.join('defaults', 'rtc_s1_static.yaml'), 50 | os.path.join('schemas', 'rtc_s1.yaml')] 51 | 52 | setup( 53 | name='rtc', 54 | version=version, 55 | description=('OPERA Radiometric Terrain-Corrected (RTC) SAR backscatter' 56 | ' from Sentinel-1 Science Application Software (SAS)'), 57 | package_dir={'rtc': 'src/rtc'}, 58 | include_package_data=True, 59 | package_data=package_data_dict, 60 | classifiers=['Programming Language :: Python'], 61 | scripts=['app/rtc_s1.py', 'app/rtc_s1_single_job.py', 62 | 'app/rtc_compare.py'], 63 | install_requires=['argparse', 'numpy', 'yamale', 64 | 'scipy', 'pytest', 'requests', 65 | 'pyproj', 'shapely', 'matplotlib'], 66 | url='https://github.com/opera-adt/RTC', 67 | license=('Copyright by the California Institute of Technology.' 68 | ' ALL RIGHTS RESERVED.'), 69 | long_description=long_description, 70 | ) 71 | -------------------------------------------------------------------------------- /src/rtc/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opera-adt/RTC/c5cc7403af16bb67fd14f43bf2aec15c6a422297/src/rtc/__init__.py -------------------------------------------------------------------------------- /src/rtc/core.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import os 4 | import sys 5 | import shutil 6 | import logging 7 | import numpy as np 8 | import tempfile 9 | 10 | from osgeo import gdal, osr, ogr 11 | 12 | # Buffer for antimeridian crossing test (33 arcsec: ~ 1km) 13 | ANTIMERIDIAN_CROSSING_RIGHT_SIDE_TEST_BUFFER = 33 * 0.0002777 14 | 15 | 16 | class Logger(object): 17 | """ 18 | Class to redirect stdout and stderr to the logger 19 | """ 20 | def __init__(self, logger, level, prefix=''): 21 | """ 22 | Class constructor 23 | """ 24 | self.logger = logger 25 | self.level = level 26 | self.prefix = prefix 27 | self.buffer = '' 28 | 29 | def write(self, message): 30 | 31 | # Add message to the buffer until "\n" is found 32 | if '\n' not in message: 33 | self.buffer += message 34 | return 35 | 36 | message = self.buffer + message 37 | 38 | # check if there is any character after the last \n 39 | # if so, move it to the buffer 40 | message_list = message.split('\n') 41 | if not message.endswith('\n'): 42 | self.buffer = message_list[-1] 43 | message_list = message_list[:-1] 44 | else: 45 | self.buffer = '' 46 | 47 | # print all characters before the last \n 48 | for line in message_list: 49 | if not line: 50 | continue 51 | self.logger.log(self.level, self.prefix + line) 52 | 53 | def flush(self): 54 | if self.buffer != '': 55 | self.logger.log(self.level, self.buffer) 56 | self.buffer = '' 57 | 58 | 59 | def save_as_cog(filename, scratch_dir='.', logger=None, 60 | flag_compress=True, ovr_resamp_algorithm=None, 61 | compression='DEFLATE', nbits=None): 62 | """Save (overwrite) a GeoTIFF file as a cloud-optimized GeoTIFF. 63 | 64 | Parameters 65 | ---------- 66 | filename: str 67 | GeoTIFF to be saved as a cloud-optimized GeoTIFF 68 | scratch_dir: str (optional) 69 | Temporary Directory 70 | ovr_resamp_algorithm: str (optional) 71 | Resampling algorithm for overviews. 72 | Options: "AVERAGE", "AVERAGE_MAGPHASE", "RMS", "BILINEAR", 73 | "CUBIC", "CUBICSPLINE", "GAUSS", "LANCZOS", "MODE", 74 | "NEAREST", or "NONE". Defaults to "NEAREST", if integer, and 75 | "CUBICSPLINE", otherwise. 76 | compression: str (optional) 77 | Compression type. 78 | Optional: "NONE", "LZW", "JPEG", "DEFLATE", "ZSTD", "WEBP", 79 | "LERC", "LERC_DEFLATE", "LERC_ZSTD", "LZMA" 80 | 81 | """ 82 | if logger is None: 83 | logger = logging.getLogger('rtc_s1') 84 | 85 | logger.info(' COG step 1: add overviews') 86 | 87 | # open GeoTIFF file in update mode 88 | try: 89 | gdal_ds = gdal.Open(filename, gdal.GA_Update) 90 | except RuntimeError: 91 | # fix for GDAL >= 3.8 92 | gdal_ds = gdal.OpenEx(filename, gdal.GA_Update, 93 | open_options=["IGNORE_COG_LAYOUT_BREAK=YES"]) 94 | 95 | gdal_dtype = gdal_ds.GetRasterBand(1).DataType 96 | dtype_name = gdal.GetDataTypeName(gdal_dtype).lower() 97 | 98 | overviews_list = [4, 16, 64, 128] 99 | 100 | is_integer = 'byte' in dtype_name or 'int' in dtype_name 101 | if ovr_resamp_algorithm is None and is_integer: 102 | ovr_resamp_algorithm = 'NEAREST' 103 | elif ovr_resamp_algorithm is None: 104 | ovr_resamp_algorithm = 'CUBICSPLINE' 105 | 106 | logger.info(' overview resampling algorithm:' 107 | f' {ovr_resamp_algorithm}') 108 | logger.info(f' overview list: {overviews_list}') 109 | 110 | gdal_ds.BuildOverviews(ovr_resamp_algorithm, overviews_list, 111 | gdal.TermProgress_nocb) 112 | 113 | del gdal_ds # close the dataset (Python object and pointers) 114 | external_overview_file = filename + '.ovr' 115 | if os.path.isfile(external_overview_file): 116 | os.remove(external_overview_file) 117 | 118 | logger.info(' COG step 2: save as COG') 119 | temp_file = tempfile.NamedTemporaryFile( 120 | dir=scratch_dir, suffix='.tif').name 121 | 122 | # Blocks of 512 x 512 => 256 KiB (UInt8) or 1MiB (Float32) 123 | tile_size = 512 124 | gdal_translate_options = ['BIGTIFF=IF_SAFER', 125 | 'MAX_Z_ERROR=0', 126 | 'TILED=YES', 127 | f'BLOCKXSIZE={tile_size}', 128 | f'BLOCKYSIZE={tile_size}', 129 | 'COPY_SRC_OVERVIEWS=YES'] 130 | 131 | if compression: 132 | gdal_translate_options += [f'COMPRESS={compression}'] 133 | 134 | if is_integer: 135 | gdal_translate_options += ['PREDICTOR=2'] 136 | else: 137 | gdal_translate_options += ['PREDICTOR=3'] 138 | 139 | if nbits is not None: 140 | gdal_translate_options += [f'NBITS={nbits}'] 141 | 142 | # suppress type casting errors 143 | gdal.SetConfigOption('CPL_LOG', '/dev/null') 144 | 145 | gdal.Translate(temp_file, filename, 146 | creationOptions=gdal_translate_options) 147 | 148 | shutil.move(temp_file, filename) 149 | 150 | logger.info(' COG step 3: validate') 151 | try: 152 | from rtc.extern.validate_cloud_optimized_geotiff import main as \ 153 | validate_cog 154 | except ModuleNotFoundError: 155 | logger.info('WARNING could not import module' 156 | ' validate_cloud_optimized_geotiff') 157 | return 158 | 159 | argv = ['--full-check=yes', filename] 160 | validate_cog_ret = validate_cog(argv) 161 | if validate_cog_ret == 0: 162 | logger.info(f' file "{filename}" is a valid cloud optimized' 163 | ' GeoTIFF') 164 | else: 165 | logger.warning(f' file "{filename}" is NOT a valid cloud' 166 | f' optimized GeoTIFF!') 167 | 168 | 169 | def _get_ogr_polygon(min_x, max_y, max_x, min_y, file_srs): 170 | file_ring = ogr.Geometry(ogr.wkbLinearRing) 171 | file_ring.AddPoint(min_x, max_y) 172 | file_ring.AddPoint(max_x, max_y) 173 | file_ring.AddPoint(max_x, min_y) 174 | file_ring.AddPoint(min_x, min_y) 175 | file_ring.AddPoint(min_x, max_y) 176 | file_polygon = ogr.Geometry(ogr.wkbPolygon) 177 | file_polygon.AddGeometry(file_ring) 178 | file_polygon.AssignSpatialReference(file_srs) 179 | assert file_polygon.IsValid() 180 | return file_polygon 181 | 182 | 183 | def get_tile_srs_bbox(tile_min_y_projected, tile_max_y_projected, 184 | tile_min_x_projected, tile_max_x_projected, 185 | tile_srs, polygon_srs, logger=None): 186 | """Get tile bounding box for a given spatial reference system (SRS) 187 | 188 | Parameters 189 | ---------- 190 | tile_min_y_projected: float 191 | Tile minimum Y-coordinate 192 | tile_max_y_projected: float 193 | Tile maximum Y-coordinate 194 | tile_min_x_projected: float 195 | Tile minimum X-coordinate 196 | tile_max_x_projected: float 197 | Tile maximum X-coordinate 198 | tile_srs: osr.SpatialReference 199 | Tile original spatial reference system (SRS). If the polygon 200 | SRS is geographic, its Axis Mapping Strategy will 201 | be updated to osr.OAMS_TRADITIONAL_GIS_ORDER 202 | polygon_srs: osr.SpatialReference 203 | Polygon spatial reference system (SRS). If the polygon 204 | SRS is geographic, its Axis Mapping Strategy will 205 | be updated to osr.OAMS_TRADITIONAL_GIS_ORDER 206 | logger : logging.Logger, optional 207 | Logger object 208 | Returns 209 | ------- 210 | tile_polygon: ogr.Geometry 211 | Rectangle representing polygon SRS bounding box 212 | tile_min_y: float 213 | Tile minimum Y-coordinate (polygon SRS) 214 | tile_max_y: float 215 | Tile maximum Y-coordinate (polygon SRS) 216 | tile_min_x: float 217 | Tile minimum X-coordinate (polygon SRS) 218 | tile_max_x: float 219 | Tile maximum X-coordinate (polygon SRS) 220 | """ 221 | if logger is None: 222 | logger = logging.getLogger('rtc_s1') 223 | 224 | # forces returned values from TransformPoint() to be (x, y, z) 225 | # rather than (y, x, z) for geographic SRS 226 | if tile_srs.IsGeographic(): 227 | try: 228 | tile_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) 229 | except AttributeError: 230 | logger.warning('WARNING Could not set the ancillary input SRS axis' 231 | ' mapping strategy (SetAxisMappingStrategy())' 232 | ' to osr.OAMS_TRADITIONAL_GIS_ORDER') 233 | if polygon_srs.IsGeographic(): 234 | try: 235 | polygon_srs.SetAxisMappingStrategy(osr.OAMS_TRADITIONAL_GIS_ORDER) 236 | except AttributeError: 237 | logger.warning('WARNING Could not set the ancillary input SRS axis' 238 | ' mapping strategy (SetAxisMappingStrategy())' 239 | ' to osr.OAMS_TRADITIONAL_GIS_ORDER') 240 | transformation = osr.CoordinateTransformation(tile_srs, polygon_srs) 241 | 242 | elevation = 0 243 | tile_x_array = np.zeros(4) 244 | tile_y_array = np.zeros(4) 245 | tile_x_array[0], tile_y_array[0], z = transformation.TransformPoint( 246 | tile_min_x_projected, tile_max_y_projected, elevation) 247 | tile_x_array[1], tile_y_array[1], z = transformation.TransformPoint( 248 | tile_max_x_projected, tile_max_y_projected, elevation) 249 | tile_x_array[2], tile_y_array[2], z = transformation.TransformPoint( 250 | tile_max_x_projected, tile_min_y_projected, elevation) 251 | tile_x_array[3], tile_y_array[3], z = transformation.TransformPoint( 252 | tile_min_x_projected, tile_min_y_projected, elevation) 253 | tile_min_y = np.min(tile_y_array) 254 | tile_max_y = np.max(tile_y_array) 255 | tile_min_x = np.min(tile_x_array) 256 | tile_max_x = np.max(tile_x_array) 257 | 258 | # handles antimeridian: tile_max_x around +180 and tile_min_x around -180 259 | if polygon_srs.IsGeographic() and tile_max_x - tile_min_x > 180: 260 | # unwrap negative longitude values 261 | # move longitude range from [-180, 180] to [0, 360] 262 | new_tile_x_array = [x + (x < 0) * 360 for x in tile_x_array] 263 | tile_min_x = np.min(new_tile_x_array) 264 | tile_max_x = np.max(new_tile_x_array) 265 | 266 | tile_ring = ogr.Geometry(ogr.wkbLinearRing) 267 | tile_ring.AddPoint(tile_min_x, tile_max_y) 268 | tile_ring.AddPoint(tile_max_x, tile_max_y) 269 | tile_ring.AddPoint(tile_max_x, tile_min_y) 270 | tile_ring.AddPoint(tile_min_x, tile_min_y) 271 | tile_ring.AddPoint(tile_min_x, tile_max_y) 272 | tile_polygon = ogr.Geometry(ogr.wkbPolygon) 273 | tile_polygon.AddGeometry(tile_ring) 274 | tile_polygon.AssignSpatialReference(polygon_srs) 275 | return tile_polygon, tile_min_y, tile_max_y, tile_min_x, tile_max_x 276 | 277 | 278 | def _antimeridian_crossing_requires_special_handling( 279 | file_srs, ancillary_min_x, ancillary_max_x, tile_min_x, tile_max_x): 280 | ''' 281 | Check if ancillary input requires special handling due to 282 | the antimeridian crossing 283 | 284 | Parameters 285 | ---------- 286 | file_srs: osr.SpatialReference 287 | Ancillary file spatial reference system (SRS) 288 | ancillary_min_x: float 289 | Ancillary file min longitude value in degrees 290 | ancillary_max_x: float 291 | Ancillary file max longitude value in degrees 292 | tile_min_x: float 293 | Tile min longitude value in degrees 294 | tile_max_x: float 295 | Tile max longitude value in degrees 296 | 297 | Returns 298 | ------- 299 | flag_requires_special_handling : bool 300 | Flag that indicate if the ancillary input requires special handling 301 | ''' 302 | 303 | # The coordinate wrapping around the ancillary file 304 | # discontinuities such as the antimeridian only happens 305 | # if the DEM is provided in geographic coordinates 306 | if not file_srs.IsGeographic(): 307 | flag_requires_special_handling = False 308 | return flag_requires_special_handling 309 | 310 | # Check whether the ancillary file covers the entire longitude range 311 | # Mark flag as True if the longitude coordinates span completes 312 | # or is close to complete the circle (360 degrees). 313 | # We use 359 instead of 360 degrees to have some buffer 314 | # for incomplete ancillary files 315 | flag_ancillary_covers_entire_longitude_range = \ 316 | (ancillary_max_x - ancillary_min_x) > 359 317 | 318 | if not flag_ancillary_covers_entire_longitude_range: 319 | flag_requires_special_handling = False 320 | return flag_requires_special_handling 321 | 322 | # Flag to indicate if the tile crosses the ancillary file 323 | # discontinuity (ancillary_max_x). 324 | # Notice that`ancillary_max_x` refers to the eastern edge 325 | # of the ancillary file, which, for the antimeridian case, 326 | # may not lay exactly on +/- 180. For example, it may be 327 | # 179.9998611111111 328 | flag_tile_crosses_discontinuity = \ 329 | tile_min_x < ancillary_max_x < tile_max_x 330 | 331 | flag_requires_special_handling = \ 332 | flag_tile_crosses_discontinuity 333 | 334 | return flag_requires_special_handling 335 | 336 | 337 | def check_ancillary_inputs(check_ancillary_inputs_coverage, 338 | dem_file, geogrid, 339 | metadata_dict, logger=None): 340 | """Check for the existence and coverage of provided ancillary inputs 341 | (e.g., DEM) wrt. to a reference geogrid. The function 342 | also updates the product's dictionary metadata indicating the coverage 343 | of each ancillary input wrt. to the reference geogrid 344 | 345 | Parameters 346 | ---------- 347 | check_ancillary_inputs_coverage: bool 348 | Flag that enable/disable checks for all ancillary inputs 349 | excluding the shoreline shapefile 350 | check_shoreline_shapefile: bool 351 | Flag that checks for the shoreline shapefile 352 | dem_file: str 353 | DEM filename 354 | geogrid: isce3.product.GeoGridParameters 355 | Product's ISCE3 geogrid object 356 | metadata_dict: collections.OrderedDict 357 | Metadata dictionary 358 | logger : logging.Logger, optional 359 | Logger object 360 | """ 361 | if logger is None: 362 | logger = logging.getLogger('rtc_s1') 363 | logger.info("Check ancillary inputs' coverage:") 364 | 365 | # file description (to be printed to the user if an error happens) 366 | dem_file_description = 'DEM file' 367 | 368 | if not check_ancillary_inputs_coverage: 369 | 370 | # print messages to the user 371 | logger.info(f' {dem_file_description} coverage:' 372 | ' (not tested)') 373 | 374 | # update RTC-S1 product metadata 375 | metadata_dict['DEM_COVERAGE'] = 'NOT_TESTED' 376 | 377 | return 378 | 379 | rasters_to_check_dict = {'DEM': (dem_file_description, dem_file)} 380 | 381 | geogrid_x0_projected = geogrid.start_x 382 | geogrid_y0_projected = geogrid.start_y 383 | # define end (final) geogrid X/Y edge coordinates 384 | geogrid_xf_projected = (geogrid.start_x + 385 | geogrid.spacing_x * geogrid.width) 386 | geogrid_yf_projected = (geogrid.start_y + 387 | geogrid.spacing_y * geogrid.length) 388 | 389 | geogrid_srs = osr.SpatialReference() 390 | geogrid_srs.ImportFromEPSG(geogrid.epsg) 391 | 392 | for ancillary_file_type, \ 393 | (ancillary_file_description, ancillary_file_name) in \ 394 | rasters_to_check_dict.items(): 395 | 396 | # check if file was provided 397 | if not ancillary_file_name: 398 | error_msg = f'ERROR {ancillary_file_description} not provided' 399 | logger.error(error_msg) 400 | raise ValueError(error_msg) 401 | 402 | # check if file exists 403 | if not os.path.isfile(ancillary_file_name): 404 | error_msg = f'ERROR {ancillary_file_description} not found:' 405 | error_msg += f' {ancillary_file_name}' 406 | logger.error(error_msg) 407 | raise FileNotFoundError(error_msg) 408 | 409 | # test if the reference geogrid is fully covered by the ancillary input 410 | # by checking all ancillary input vertices are located 411 | # outside of the reference geogrid. 412 | ancillary_gdal_ds = gdal.Open(ancillary_file_name, gdal.GA_ReadOnly) 413 | 414 | ancillary_geotransform = ancillary_gdal_ds.GetGeoTransform() 415 | ancillary_projection = ancillary_gdal_ds.GetProjection() 416 | ancillary_x0, ancillary_dx, _, ancillary_y0, _, ancillary_dy = \ 417 | ancillary_geotransform 418 | ancillary_width = ancillary_gdal_ds.GetRasterBand(1).XSize 419 | ancillary_length = ancillary_gdal_ds.GetRasterBand(1).YSize 420 | 421 | del ancillary_gdal_ds 422 | 423 | # define end (final) ancillary input X/Y edge coordinates 424 | ancillary_xf = ancillary_x0 + ancillary_width * ancillary_dx 425 | ancillary_yf = ancillary_y0 + ancillary_length * ancillary_dy 426 | 427 | ancillary_srs = osr.SpatialReference() 428 | ancillary_srs.ImportFromProj4(ancillary_projection) 429 | 430 | ret = get_tile_srs_bbox(geogrid_yf_projected, 431 | geogrid_y0_projected, 432 | geogrid_x0_projected, 433 | geogrid_xf_projected, 434 | geogrid_srs, 435 | ancillary_srs) 436 | geogrid_polygon, geogrid_yf, geogrid_y0, geogrid_x0, geogrid_xf = ret 437 | 438 | # Create input ancillary polygon 439 | ancillary_polygon = _get_ogr_polygon(ancillary_x0, 440 | ancillary_y0, 441 | ancillary_xf, 442 | ancillary_yf, 443 | ancillary_srs) 444 | 445 | coverage_logger_str = ancillary_file_description + ' coverage' 446 | coverage_metadata_str = ancillary_file_type + '_COVERAGE' 447 | 448 | if geogrid_polygon.Within(ancillary_polygon): 449 | # print messages to the user 450 | logger.info(f' {coverage_logger_str}: Full') 451 | 452 | # update RTC-S1 product metadata 453 | metadata_dict[coverage_metadata_str] = 'FULL' 454 | continue 455 | 456 | # If needed, test for antimeridian ("dateline") crossing 457 | if _antimeridian_crossing_requires_special_handling( 458 | ancillary_srs, ancillary_x0, ancillary_xf, 459 | geogrid_x0, geogrid_xf): 460 | 461 | logger.info(f'The input RTC-S1 product crosses the antimeridian' 462 | ' (dateline). Verifying the' 463 | f' {ancillary_file_description}:' 464 | f' {ancillary_file_name}') 465 | 466 | # Left side of the antimeridian crossing: -180 -> +180 467 | ancillary_polygon_1 = _get_ogr_polygon(-180, 90, ancillary_xf, -90, 468 | ancillary_srs) 469 | intersection_1 = geogrid_polygon.Intersection(ancillary_polygon_1) 470 | flag_1_ok = intersection_1.Within(ancillary_polygon) 471 | check_1_str = 'ok' if flag_1_ok else 'fail' 472 | logger.info(f' left side (-180 -> +180): {check_1_str}') 473 | 474 | # Right side of the antimeridian crossing: +180 -> +360 475 | # 476 | # Get the intersection between the geogrid and the right side 477 | # of the antimeridian (with a litter buffer represented by 478 | # ANTIMERIDIAN_CROSSING_RIGHT_SIDE_TEST_BUFFER) 479 | antimeridian_right_side_polygon = _get_ogr_polygon( 480 | ancillary_xf + ANTIMERIDIAN_CROSSING_RIGHT_SIDE_TEST_BUFFER, 481 | 90, ancillary_xf + 360, -90, ancillary_srs) 482 | 483 | intersection_2 = geogrid_polygon.Intersection( 484 | antimeridian_right_side_polygon) 485 | 486 | # Create a polygon representing the ancillary dataset 487 | # at the right side of the antimeridian 488 | ancillary_polygon_2 = _get_ogr_polygon( 489 | ancillary_x0 + 360, ancillary_y0, 490 | ancillary_xf + 360, ancillary_yf, 491 | ancillary_srs) 492 | 493 | # Check if the geogrid-intersected area (if valid) is within 494 | # the ancillary polygon 495 | flag_2_ok = (intersection_2.IsEmpty() or 496 | intersection_2.Within(ancillary_polygon_2)) 497 | check_2_str = 'ok' if flag_2_ok else 'fail' 498 | logger.info(f' right side (+180 -> +360): {check_2_str}') 499 | 500 | if flag_1_ok and flag_2_ok: 501 | # print messages to the user 502 | logger.info(f' {coverage_logger_str}:' 503 | ' Full (with antimeridian crossing)') 504 | 505 | # update RTC-S1 product metadata 506 | metadata_dict[coverage_metadata_str] = \ 507 | 'FULL_WITH_ANTIMERIDIAN_CROSSING' 508 | continue 509 | 510 | # prepare message to the user 511 | msg = f'ERROR the {ancillary_file_description} with extents' 512 | msg += f' S/N: [{ancillary_yf},{ancillary_y0}]' 513 | msg += f' W/E: [{ancillary_x0},{ancillary_xf}],' 514 | msg += ' does not fully cover product geogrid with' 515 | msg += f' extents S/N: [{geogrid_yf},{geogrid_y0}]' 516 | msg += f' W/E: [{geogrid_x0},{geogrid_xf}]' 517 | 518 | logger.error(msg) 519 | raise ValueError(msg) 520 | 521 | 522 | def create_logger(log_file, full_log_formatting=None): 523 | """Create logger object for a log file 524 | 525 | Parameters 526 | ---------- 527 | log_file: str 528 | Log file 529 | full_log_formatting : bool 530 | Flag to enable full formatting of logged messages 531 | 532 | Returns 533 | ------- 534 | logger : logging.Logger 535 | Logger object 536 | """ 537 | if log_file: 538 | logfile_directory = os.path.dirname(log_file) 539 | else: 540 | logfile_directory = None 541 | 542 | if logfile_directory: 543 | os.makedirs(logfile_directory, exist_ok=True) 544 | # create logger 545 | 546 | logger = logging.getLogger('rtc_s1') 547 | logger.setLevel(logging.DEBUG) 548 | 549 | # create console handler and set level to debug 550 | ch = logging.StreamHandler() 551 | ch.setLevel(logging.DEBUG) 552 | 553 | # create formatter 554 | # configure full log format, if enabled 555 | if full_log_formatting: 556 | msgfmt = ('%(asctime)s.%(msecs)03d, %(levelname)s, RTC-S1, ' 557 | '%(module)s, 999999, %(pathname)s:%(lineno)d, "%(message)s"') 558 | 559 | formatter = logging.Formatter(msgfmt, "%Y-%m-%d %H:%M:%S") 560 | else: 561 | formatter = logging.Formatter('%(message)s') 562 | 563 | # add formatter to ch 564 | ch.setFormatter(formatter) 565 | 566 | # add ch to logger 567 | logger.addHandler(ch) 568 | 569 | if log_file: 570 | file_handler = logging.FileHandler(log_file) 571 | 572 | file_handler.setFormatter(formatter) 573 | 574 | # add file handler to logger 575 | logger.addHandler(file_handler) 576 | 577 | sys.stdout = Logger(logger, logging.INFO) 578 | sys.stderr = Logger(logger, logging.ERROR, prefix='[StdErr] ') 579 | 580 | return logger 581 | 582 | 583 | def build_empty_vrt(filename, length, width, fill_value, dtype='Float32', 584 | geotransform=None): 585 | """Build an empty VRT file, i.e, not pointing to any rasters, 586 | with given input dimensions (length and width), data type, and 587 | fill value. 588 | 589 | Parameters 590 | ---------- 591 | filename: str 592 | VRT file name 593 | length: int 594 | VRT data length 595 | width: int 596 | VRT data width 597 | fill_value: scalar 598 | VRT data fill value 599 | dtype: str 600 | VRT data type 601 | geotransform: list(scalar), optional 602 | VRT data geotransform 603 | 604 | Returns 605 | ------- 606 | logger : logging.Logger 607 | Logger object 608 | """ 609 | vrt_contents = f' \n' 611 | if geotransform is not None: 612 | assert len(geotransform) == 6 613 | geotransform_str = ', '.join([str(x) for x in geotransform]) 614 | vrt_contents += f' {geotransform_str}' 615 | vrt_contents += ' \n' 616 | vrt_contents += ( 617 | f' \n' 618 | f' {fill_value} \n' 619 | f' {fill_value} \n' 620 | f' \n' 621 | f' \n') 622 | 623 | with open(filename, 'w') as out: 624 | out.write(vrt_contents) 625 | 626 | if os.path.isfile(filename): 627 | print('file saved:', filename) 628 | -------------------------------------------------------------------------------- /src/rtc/defaults/rtc_s1.yaml: -------------------------------------------------------------------------------- 1 | runconfig: 2 | name: rtc_s1_workflow_default 3 | 4 | groups: 5 | 6 | # Required. Output product type: "RTC_S1" or "RTC_S1_STATIC" 7 | primary_executable: 8 | product_type: RTC_S1 9 | 10 | pge_name_group: 11 | pge_name: RTC_S1_PGE 12 | 13 | input_file_group: 14 | # Required. List of SAFE files (min=1) 15 | safe_file_path: 16 | 17 | # Location from where the source data can be retrieved (URL or DOI) 18 | source_data_access: 19 | 20 | # Required. List of orbit (EOF) files (min=1) 21 | orbit_file_path: 22 | 23 | # Optional. Burst ID to process (empty for all bursts) 24 | burst_id: 25 | 26 | dynamic_ancillary_file_group: 27 | # Digital elevation model 28 | dem_file: 29 | 30 | # Digital elevation model source description 31 | dem_file_description: 32 | 33 | static_ancillary_file_group: 34 | 35 | # burst database sqlite file 36 | burst_database_file: 37 | 38 | product_group: 39 | 40 | processing_type: NOMINAL 41 | 42 | product_version: 43 | 44 | # Directory where PGE will place results 45 | product_path: 46 | # Directory where SAS writes temporary data 47 | scratch_path: 48 | 49 | # If option `save_bursts` is set, output bursts are saved to: 50 | # {output_dir}/{burst_id}/{product_id}{suffix}.{ext} 51 | # If option `save_mosaics` is set, output mosaics are saved to: 52 | # {output_dir}/{product_id}{suffix}.{ext} 53 | # 54 | # If the `product_id` contains the substring "_{burst_id}", the 55 | # substring will be substituted by either: 56 | # - "_" followed by the burst ID, if the product is a burst; or 57 | # - An empty string, if the product is a mosaic. 58 | # 59 | # For example, the `product_id` = `RTC-S1_{burst_id}_S1B` will become 60 | # `RTC-S1_T069-147170-IW1_S1B` for the burst t069-147170-IW1; and it 61 | # will become `RTC-S1_S1B` for the mosaic product. 62 | # 63 | # If the field `product_id` is left empty, the burst product ID will 64 | # follow the RTC-S1 file naming conventions: 65 | # `OPERA_L2_RTC-S1_{burst_id}_{sensing_start_datetime}_ 66 | # {processing_datetime}_{sensor}_{pixel_spacing} 67 | # _{product_version}`. 68 | # 69 | # `suffix` is only used when there are multiple output files. 70 | # `ext` is determined by geocoding_options.output_imagery_format. 71 | output_dir: 72 | product_id: 73 | 74 | # Validity start date for RTC-S1-STATIC products in the format YYYYMMDD 75 | rtc_s1_static_validity_start_date: 76 | 77 | # Location from where the output product can be retrieved (URL or DOI) 78 | product_data_access: 79 | 80 | # Location of the static layers product associated with this product (URL or DOI 81 | static_layers_data_access: 82 | 83 | # Save RTC-S1 bursts 84 | save_bursts: True 85 | 86 | # Save mosaic of RTC-S1 bursts 87 | save_mosaics: False 88 | 89 | # Save browse image(s) 90 | save_browse: True 91 | 92 | output_imagery_format: COG 93 | output_imagery_compression: DEFLATE 94 | output_imagery_nbits: 32 95 | 96 | # Optional. Save secondary layers (e.g., inc. angle) within 97 | # the HDF5 file 98 | save_secondary_layers_as_hdf5: False 99 | 100 | # Save RTC-S1 metadata in the HDF5 format 101 | # Optional for `output_imagery_format` equal to 'ENVI', 'GTiff', or 102 | # 'COG', and enabled by default for `output_imagery_format` equal 103 | # to 'HDF5' or 'NETCDF' or `save_secondary_layers_as_hdf5` is True 104 | save_metadata: True 105 | 106 | processing: 107 | 108 | # Check if ancillary inputs cover entirely the output product 109 | check_ancillary_inputs_coverage: True 110 | 111 | # Polarization channels to process. 112 | polarization: 113 | 114 | # Options to run geo2rdr 115 | geo2rdr: 116 | threshold: 1.0e-7 117 | numiter: 50 118 | 119 | # Options to run rdr2geo 120 | rdr2geo: 121 | threshold: 1.0e-7 122 | numiter: 25 123 | 124 | # DEM interpolation method 125 | dem_interpolation_method: biquintic 126 | 127 | # Apply absolute radiometric correction 128 | apply_absolute_radiometric_correction: True 129 | 130 | # Apply thermal noise correction 131 | apply_thermal_noise_correction: True 132 | 133 | # slant range spacing of the correction LUT in meters 134 | correction_lut_range_spacing_in_meters: 120 135 | # Azimuth time spacing of the correction LUT in meters 136 | correction_lut_azimuth_spacing_in_meters: 120 137 | 138 | # OPTIONAL - Apply RTC 139 | apply_rtc: True 140 | 141 | # Apply bistatic delay correction 142 | apply_bistatic_delay_correction: True 143 | 144 | # Apply static tropospheric delay correction 145 | apply_static_tropospheric_delay_correction: True 146 | 147 | # OPTIONAL - to control behavior of RTC module 148 | # (only applicable if geocode.apply_rtc is True) 149 | rtc: 150 | # OPTIONAL - Choices: 151 | # "gamma0" (default) 152 | # "sigma0" 153 | output_type: gamma0 154 | 155 | # OPTIONAL - Choices: 156 | # "bilinear_distribution" (default) 157 | # "area_projection" 158 | algorithm_type: area_projection 159 | 160 | # OPTIONAL - Choices: 161 | # "beta0" (default) 162 | # "sigma0" 163 | input_terrain_radiometry: beta0 164 | 165 | # OPTIONAL - Minimum RTC area factor in dB 166 | rtc_min_value_db: -30 167 | 168 | # RTC DEM upsampling 169 | dem_upsampling: 2 170 | 171 | # RTC area beta mode 172 | area_beta_mode: auto 173 | 174 | # OPTIONAL - to provide the number of processes when processing the bursts in parallel 175 | # "0" means that the number will be automatically decided based on 176 | # the number of cores, `OMP_NUM_THREADS` in environment setting, 177 | # and the number of burst to process in runconfig 178 | num_workers: 0 179 | 180 | # Geocoding options 181 | geocoding: 182 | 183 | # Apply valid-samples sub-swath masking 184 | apply_valid_samples_sub_swath_masking: True 185 | 186 | # Apply shadow masking 187 | apply_shadow_masking: True 188 | 189 | # Skip geocoding already processed, which is tested by the existence of the output files 190 | skip_if_output_files_exist: False 191 | 192 | # Geocoding algorithm type. Choices "area_projection" 193 | # for adaptive multilooking or an interpolation algorithm: 194 | # "sinc", "bilinear", "bicubic", "nearest", and "biquintic" 195 | algorithm_type: area_projection 196 | 197 | # OPTIONAL - Choices: "single_block", "geogrid", "geogrid_and_radargrid", and "auto" (default) 198 | memory_mode: 199 | 200 | # Save the incidence angle 201 | save_incidence_angle: False 202 | 203 | # Save the local-incidence angle 204 | save_local_inc_angle: False 205 | 206 | # Save the projection angle 207 | save_projection_angle: False 208 | 209 | # Save the RTC area normalization factor (ANF) computed with 210 | # the projection angle method 211 | save_rtc_anf_projection_angle: False 212 | 213 | # Save the range slope angle 214 | save_range_slope: False 215 | 216 | # Save the number of looks used to generate the RTC-S1 product 217 | save_nlooks: False 218 | 219 | # Save the area normalization factor (ANF) to normalize RTC-S1 220 | # imagery to the original SLC backscatter convention: 221 | # beta0 or sigma0 (ellipsoid) 222 | save_rtc_anf: False 223 | 224 | # Save the RTC area normalization factor (ANF) gamma0 to sigma0 225 | save_rtc_anf_gamma0_to_sigma0: False 226 | 227 | # Save the interpolated DEM used to generate the RTC-S1 product 228 | save_dem: False 229 | 230 | # Save layover shadow mask 231 | save_mask: True 232 | 233 | # Layover/shadow mask dilation size of shadow pixels 234 | # (values 1 and 3) 235 | shadow_dilation_size: 0 236 | 237 | # OPTIONAL - Absolute radiometric correction 238 | abs_rad_cal: 1 239 | 240 | # OPTIONAL - Clip values above threshold 241 | clip_max: 242 | 243 | # OPTIONAL - Clip values below threshold 244 | clip_min: 245 | 246 | # Double SLC sampling in the range direction 247 | upsample_radargrid: False 248 | 249 | # Fields to populate the products' metadata required by 250 | # CEOS Analysis Ready Data specifications 251 | estimated_geometric_accuracy_bias_x: 252 | estimated_geometric_accuracy_bias_y: 253 | estimated_geometric_accuracy_stddev_x: 254 | estimated_geometric_accuracy_stddev_y: 255 | 256 | bursts_geogrid: 257 | 258 | # Bursts' EPSG code. If not provided, `output_epsg` will 259 | # be determined based on the scene center: 260 | # - If center_lat >= 75.0: 3413 261 | # - If center_lat <= -75.0: 3031 262 | # - Otherwise: EPSG code associated with the closest UTM zone 263 | output_epsg: 264 | x_posting: 30 265 | y_posting: 30 266 | x_snap: 30 267 | y_snap: 30 268 | top_left: 269 | x: 270 | y: 271 | bottom_right: 272 | x: 273 | y: 274 | 275 | # Mosaicking options 276 | mosaicking: 277 | 278 | # OPTIONAL - Choices: "average", "first", "bursts_center" (default) 279 | mosaic_mode: first 280 | 281 | mosaic_geogrid: 282 | 283 | # Mosaic EPSG code. If not provided, `output_epsg` will 284 | # be determined based on the scene center: 285 | # - If center_lat >= 75.0: 3413 286 | # - If center_lat <= -75.0: 3031 287 | # - Otherwise: EPSG code associated with the closest UTM zone 288 | output_epsg: 289 | x_posting: 30 290 | y_posting: 30 291 | x_snap: 30 292 | y_snap: 30 293 | top_left: 294 | x: 295 | y: 296 | bottom_right: 297 | x: 298 | y: 299 | 300 | 301 | browse_image_group: 302 | 303 | # If neither height or width parameters are provided, the browse 304 | # image is generated with the same pixel spacing of the RTC-S1 305 | # imagery (burst or mosaic). 306 | 307 | # If the height parameter is provided but the width is not provided, 308 | # a new width is assigned in order to keep the aspect ratio 309 | # of the RTC-S1 geographic grid. 310 | 311 | # Conversely, if the width parameter is provided but the height is not, 312 | # a new height is assigned in order to keep the aspect ratio 313 | # of the RTC-S1 geographic grid. 314 | 315 | # Height in pixels for the PNG browse image of RTC-S1 bursts. 316 | browse_image_burst_height: 2048 317 | 318 | # Width in pixels for the PNG browse image of RTC-S1 bursts 319 | browse_image_burst_width: 320 | 321 | # Height in pixels for the PNG browse image of RTC-S1 mosaics. 322 | browse_image_mosaic_height: 2048 323 | 324 | # Width in pixels for the PNG browse image of RTC-S1 mosaics 325 | browse_image_mosaic_width: 326 | -------------------------------------------------------------------------------- /src/rtc/defaults/rtc_s1_static.yaml: -------------------------------------------------------------------------------- 1 | runconfig: 2 | name: rtc_s1_workflow_default 3 | 4 | groups: 5 | 6 | primary_executable: 7 | 8 | # Required. Output product type: "RTC_S1" or "RTC_S1_STATIC" 9 | product_type: RTC_S1_STATIC 10 | 11 | pge_name_group: 12 | pge_name: RTC_S1_PGE 13 | 14 | input_file_group: 15 | # Required. List of SAFE files (min=1) 16 | safe_file_path: 17 | 18 | # Location from where the source data can be retrieved (URL or DOI) 19 | source_data_access: 20 | 21 | # Required. List of orbit (EOF) files (min=1) 22 | orbit_file_path: 23 | 24 | # Optional. Burst ID to process (empty for all bursts) 25 | burst_id: 26 | 27 | dynamic_ancillary_file_group: 28 | # Digital elevation model 29 | dem_file: 30 | 31 | # Digital elevation model source description 32 | dem_file_description: 33 | 34 | static_ancillary_file_group: 35 | 36 | # burst database sqlite file 37 | burst_database_file: 38 | 39 | product_group: 40 | 41 | processing_type: NOMINAL 42 | 43 | product_version: 44 | 45 | # Directory where PGE will place results 46 | product_path: 47 | # Directory where SAS writes temporary data 48 | scratch_path: 49 | 50 | # If option `save_bursts` is set, output bursts are saved to: 51 | # {output_dir}/{burst_id}/{product_id}{suffix}.{ext} 52 | # If option `save_mosaics` is set, output mosaics are saved to: 53 | # {output_dir}/{product_id}{suffix}.{ext} 54 | # 55 | # If the `product_id` contains the substring "_{burst_id}", the 56 | # substring will be substituted by either: 57 | # - "_" followed by the burst ID, if the product is a burst; or 58 | # - An empty string, if the product is a mosaic. 59 | # 60 | # For example, the `product_id` = `RTC-S1-STATIC_{burst_id}_S1B` will become 61 | # `RTC-S1-STATIC_069-147170-IW1_S1B` for the burst t069-147170-IW1; and it 62 | # will become `RTC-S1-STATIC_S1B` for the mosaic product. 63 | # 64 | # If the field `product_id` is left empty, the burst product ID will 65 | # follow the RTC-S1-STATIC file naming conventions: 66 | # `OPERA_L2_RTC-S1-STATIC_{burst_id}_{rtc_s1_static_validity_start_date}_ 67 | # {processing_datetime}_{sensor}_{pixel_spacing} 68 | # _{product_version}`. 69 | # 70 | # `suffix` is only used when there are multiple output files. 71 | # `ext` is determined by geocoding_options.output_imagery_format. 72 | output_dir: 73 | product_id: 74 | 75 | # Validity start date for RTC-S1-STATIC products in the format YYYYMMDD 76 | rtc_s1_static_validity_start_date: 77 | 78 | # Location from where the output product can be retrieved (URL or DOI) 79 | product_data_access: 80 | 81 | # Location of the static layers product associated with this product (URL or DOI 82 | static_layers_data_access: 83 | 84 | # Save RTC-S1 bursts 85 | save_bursts: True 86 | 87 | # Save mosaic of RTC-S1 bursts 88 | save_mosaics: False 89 | 90 | # Save browse image(s) 91 | save_browse: True 92 | 93 | output_imagery_format: COG 94 | output_imagery_compression: DEFLATE 95 | output_imagery_nbits: 32 96 | 97 | # Optional. Save secondary layers (e.g., inc. angle) within 98 | # the HDF5 file 99 | save_secondary_layers_as_hdf5: False 100 | 101 | # Save RTC-S1 metadata in the HDF5 format 102 | # Optional for `output_imagery_format` equal to 'ENVI', 'GTiff', or 103 | # 'COG', and enabled by default for `output_imagery_format` equal 104 | # to 'HDF5' or 'NETCDF' or `save_secondary_layers_as_hdf5` is True 105 | save_metadata: False 106 | 107 | processing: 108 | 109 | # Check if ancillary inputs cover entirely the output product 110 | check_ancillary_inputs_coverage: True 111 | 112 | # Polarization channels to process. 113 | polarization: 114 | 115 | # Options to run geo2rdr 116 | geo2rdr: 117 | threshold: 1.0e-7 118 | numiter: 50 119 | 120 | # Options to run rdr2geo 121 | rdr2geo: 122 | threshold: 1.0e-7 123 | numiter: 25 124 | 125 | # DEM interpolation method 126 | dem_interpolation_method: biquintic 127 | 128 | # Apply absolute radiometric correction 129 | apply_absolute_radiometric_correction: True 130 | 131 | # Apply thermal noise correction 132 | apply_thermal_noise_correction: True 133 | 134 | # slant range spacing of the correction LUT in meters 135 | correction_lut_range_spacing_in_meters: 120 136 | # Azimuth time spacing of the correction LUT in meters 137 | correction_lut_azimuth_spacing_in_meters: 120 138 | 139 | # OPTIONAL - Apply RTC 140 | apply_rtc: True 141 | 142 | # Apply bistatic delay correction 143 | apply_bistatic_delay_correction: False 144 | 145 | # Apply static tropospheric delay correction 146 | apply_static_tropospheric_delay_correction: False 147 | 148 | # OPTIONAL - to control behavior of RTC module 149 | # (only applicable if geocode.apply_rtc is True) 150 | rtc: 151 | # OPTIONAL - Choices: 152 | # "gamma0" (default) 153 | # "sigma0" 154 | output_type: gamma0 155 | 156 | # OPTIONAL - Choices: 157 | # "bilinear_distribution" (default) 158 | # "area_projection" 159 | algorithm_type: area_projection 160 | 161 | # OPTIONAL - Choices: 162 | # "beta0" (default) 163 | # "sigma0" 164 | input_terrain_radiometry: beta0 165 | 166 | # OPTIONAL - Minimum RTC area factor in dB 167 | rtc_min_value_db: -30 168 | 169 | # RTC DEM upsampling 170 | dem_upsampling: 2 171 | 172 | # RTC area beta mode 173 | area_beta_mode: auto 174 | 175 | # OPTIONAL - to provide the number of processes when processing the bursts in parallel 176 | # "0" means that the number will be automatically decided based on 177 | # the number of cores, `OMP_NUM_THREADS` in environment setting, 178 | # and the number of burst to process in runconfig 179 | num_workers: 0 180 | 181 | # Geocoding options 182 | geocoding: 183 | 184 | # Apply valid-samples sub-swath masking 185 | apply_valid_samples_sub_swath_masking: True 186 | 187 | # Apply shadow masking 188 | apply_shadow_masking: True 189 | 190 | # Skip geocoding already processed, which is tested by the existence of the output files 191 | skip_if_output_files_exist: False 192 | 193 | # Geocoding algorithm type. Choices "area_projection" 194 | # for adaptive multilooking or an interpolation algorithm: 195 | # "sinc", "bilinear", "bicubic", "nearest", and "biquintic" 196 | algorithm_type: area_projection 197 | 198 | # OPTIONAL - Choices: "single_block", "geogrid", "geogrid_and_radargrid", and "auto" (default) 199 | memory_mode: 200 | 201 | # Save the incidence angle 202 | save_incidence_angle: True 203 | 204 | # Save the local-incidence angle 205 | save_local_inc_angle: True 206 | 207 | # Save the projection angle 208 | save_projection_angle: False 209 | 210 | # Save the RTC area normalization factor (ANF) computed with 211 | # the projection angle method 212 | save_rtc_anf_projection_angle: False 213 | 214 | # Save the range slope angle 215 | save_range_slope: False 216 | 217 | # Save the number of looks used to generate the RTC-S1 product 218 | save_nlooks: True 219 | 220 | # Save the area normalization factor (ANF) to normalize RTC-S1 221 | # imagery to the original SLC backscatter convention: 222 | # beta0 or sigma0 (ellipsoid) 223 | save_rtc_anf: True 224 | 225 | # Save the RTC area normalization factor (ANF) gamma0 to sigma0 226 | save_rtc_anf_gamma0_to_sigma0: True 227 | 228 | # Save the interpolated DEM used to generate the RTC-S1 product 229 | save_dem: False 230 | 231 | # Save layover shadow mask 232 | save_mask: True 233 | 234 | # Layover/shadow mask dilation size of shadow pixels 235 | # (values 1 and 3) 236 | shadow_dilation_size: 0 237 | 238 | # OPTIONAL - Absolute radiometric correction 239 | abs_rad_cal: 1 240 | 241 | # OPTIONAL - Clip values above threshold 242 | clip_max: 243 | 244 | # OPTIONAL - Clip values below threshold 245 | clip_min: 246 | 247 | # Double SLC sampling in the range direction 248 | upsample_radargrid: False 249 | 250 | # Fields to populate the products' metadata required by 251 | # CEOS Analysis Ready Data specifications 252 | estimated_geometric_accuracy_bias_x: 253 | estimated_geometric_accuracy_bias_y: 254 | estimated_geometric_accuracy_stddev_x: 255 | estimated_geometric_accuracy_stddev_y: 256 | 257 | bursts_geogrid: 258 | 259 | # Bursts' EPSG code. If not provided, `output_epsg` will 260 | # be determined based on the scene center: 261 | # - If center_lat >= 75.0: 3413 262 | # - If center_lat <= -75.0: 3031 263 | # - Otherwise: EPSG code associated with the closest UTM zone 264 | output_epsg: 265 | x_posting: 30 266 | y_posting: 30 267 | x_snap: 30 268 | y_snap: 30 269 | top_left: 270 | x: 271 | y: 272 | bottom_right: 273 | x: 274 | y: 275 | 276 | # Mosaicking options 277 | mosaicking: 278 | 279 | # OPTIONAL - Choices: "average", "first", "bursts_center" (default) 280 | mosaic_mode: first 281 | 282 | mosaic_geogrid: 283 | 284 | # Mosaic EPSG code. If not provided, `output_epsg` will 285 | # be determined based on the scene center: 286 | # - If center_lat >= 75.0: 3413 287 | # - If center_lat <= -75.0: 3031 288 | # - Otherwise: EPSG code associated with the closest UTM zone 289 | output_epsg: 290 | x_posting: 30 291 | y_posting: 30 292 | x_snap: 30 293 | y_snap: 30 294 | top_left: 295 | x: 296 | y: 297 | bottom_right: 298 | x: 299 | y: 300 | 301 | 302 | browse_image_group: 303 | 304 | # If neither height or width parameters are provided, the browse 305 | # image is generated with the same pixel spacing of the RTC-S1 306 | # imagery (burst or mosaic). 307 | 308 | # If the height parameter is provided but the width is not provided, 309 | # a new width is assigned in order to keep the aspect ratio 310 | # of the RTC-S1 geographic grid. 311 | 312 | # Conversely, if the width parameter is provided but the height is not, 313 | # a new height is assigned in order to keep the aspect ratio 314 | # of the RTC-S1 geographic grid. 315 | 316 | # Height in pixels for the PNG browse image of RTC-S1 bursts. 317 | browse_image_burst_height: 2048 318 | 319 | # Width in pixels for the PNG browse image of RTC-S1 bursts 320 | browse_image_burst_width: 321 | 322 | # Height in pixels for the PNG browse image of RTC-S1 mosaics. 323 | browse_image_mosaic_height: 2048 324 | 325 | # Width in pixels for the PNG browse image of RTC-S1 mosaics 326 | browse_image_mosaic_width: 327 | -------------------------------------------------------------------------------- /src/rtc/extern/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/opera-adt/RTC/c5cc7403af16bb67fd14f43bf2aec15c6a422297/src/rtc/extern/__init__.py -------------------------------------------------------------------------------- /src/rtc/extern/validate_cloud_optimized_geotiff.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # ***************************************************************************** 4 | # $Id$ 5 | # 6 | # Project: GDAL 7 | # Purpose: Validate Cloud Optimized GeoTIFF file structure 8 | # Author: Even Rouault, 9 | # 10 | # ***************************************************************************** 11 | # Copyright (c) 2017, Even Rouault 12 | # 13 | # Permission is hereby granted, free of charge, to any person obtaining a 14 | # copy of this software and associated documentation files (the "Software"), 15 | # to deal in the Software without restriction, including without limitation 16 | # the rights to use, copy, modify, merge, publish, distribute, sublicense, 17 | # and/or sell copies of the Software, and to permit persons to whom the 18 | # Software is furnished to do so, subject to the following conditions: 19 | # 20 | # The above copyright notice and this permission notice shall be included 21 | # in all copies or substantial portions of the Software. 22 | # 23 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS 24 | # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 25 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL 26 | # THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 27 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING 28 | # FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER 29 | # DEALINGS IN THE SOFTWARE. 30 | # ***************************************************************************** 31 | 32 | import os.path 33 | import struct 34 | import sys 35 | from osgeo import gdal 36 | 37 | 38 | def Usage(): 39 | print('Usage: validate_cloud_optimized_geotiff.py [-q] [--full-check=yes/no/auto] test.tif') 40 | print('') 41 | print('Options:') 42 | print('-q: quiet mode') 43 | print('--full-check=yes/no/auto: check tile/strip leader/trailer bytes. auto=yes for local files, and no for remote files') 44 | return 1 45 | 46 | 47 | class ValidateCloudOptimizedGeoTIFFException(Exception): 48 | pass 49 | 50 | 51 | def full_check_band(f, band_name, band, errors, 52 | block_order_row_major, 53 | block_leader_size_as_uint4, 54 | block_trailer_last_4_bytes_repeated, 55 | mask_interleaved_with_imagery): 56 | 57 | block_size = band.GetBlockSize() 58 | mask_band = None 59 | if mask_interleaved_with_imagery: 60 | mask_band = band.GetMaskBand() 61 | mask_block_size = mask_band.GetBlockSize() 62 | if block_size != mask_block_size: 63 | errors += [band_name + ': mask block size is different from its imagery band'] 64 | mask_band = None 65 | 66 | yblocks = (band.YSize + block_size[1] - 1) // block_size[1] 67 | xblocks = (band.XSize + block_size[0] - 1) // block_size[0] 68 | last_offset = 0 69 | for y in range(yblocks): 70 | for x in range(xblocks): 71 | 72 | offset = band.GetMetadataItem('BLOCK_OFFSET_%d_%d' % (x, y), 'TIFF') 73 | offset = int(offset) if offset is not None else 0 74 | bytecount = band.GetMetadataItem('BLOCK_SIZE_%d_%d' % (x, y), 'TIFF') 75 | bytecount = int(bytecount) if bytecount is not None else 0 76 | 77 | if offset > 0: 78 | if block_order_row_major and offset < last_offset: 79 | errors += [band_name + 80 | ': offset of block (%d, %d) is smaller than previous block' % (x, y)] 81 | 82 | if block_leader_size_as_uint4: 83 | gdal.VSIFSeekL(f, offset - 4, 0) 84 | leader_size = struct.unpack('= 4: 91 | gdal.VSIFSeekL(f, offset + bytecount - 4, 0) 92 | last_bytes = gdal.VSIFReadL(8, 1, f) 93 | if last_bytes[0:4] != last_bytes[4:8]: 94 | errors += [band_name + 95 | ': for block (%d, %d), trailer bytes are invalid' % (x, y)] 96 | 97 | if mask_band: 98 | offset_mask = mask_band.GetMetadataItem('BLOCK_OFFSET_%d_%d' % (x, y), 'TIFF') 99 | offset_mask = int(offset_mask) if offset_mask is not None else 0 100 | if offset > 0 and offset_mask > 0: 101 | #bytecount_mask = int(mask_band.GetMetadataItem('BLOCK_SIZE_%d_%d' % (x,y), 'TIFF')) 102 | expected_offset_mask = offset + bytecount + \ 103 | (4 if block_leader_size_as_uint4 else 0) + \ 104 | (4 if block_trailer_last_4_bytes_repeated else 0) 105 | if offset_mask != expected_offset_mask: 106 | errors += ['Mask of ' + band_name + ': for block (%d, %d), offset is %d, whereas %d was expected' % ( 107 | x, y, offset_mask, expected_offset_mask)] 108 | elif offset == 0 and offset_mask > 0: 109 | if block_order_row_major and offset_mask < last_offset: 110 | errors += ['Mask of ' + band_name + 111 | ': offset of block (%d, %d) is smaller than previous block' % (x, y)] 112 | 113 | offset = offset_mask 114 | 115 | last_offset = offset 116 | 117 | 118 | def validate(ds, check_tiled=True, full_check=False): 119 | """Check if a file is a (Geo)TIFF with cloud optimized compatible structure. 120 | 121 | Args: 122 | ds: GDAL Dataset for the file to inspect. 123 | check_tiled: Set to False to ignore missing tiling. 124 | full_check: Set to TRUe to check tile/strip leader/trailer bytes. Might be slow on remote files 125 | 126 | Returns 127 | A tuple, whose first element is an array of error messages 128 | (empty if there is no error), and the second element, a dictionary 129 | with the structure of the GeoTIFF file. 130 | 131 | Raises: 132 | ValidateCloudOptimizedGeoTIFFException: Unable to open the file or the 133 | file is not a Tiff. 134 | """ 135 | 136 | if int(gdal.VersionInfo('VERSION_NUM')) < 2020000: 137 | raise ValidateCloudOptimizedGeoTIFFException( 138 | 'GDAL 2.2 or above required') 139 | 140 | unicode_type = type(''.encode('utf-8').decode('utf-8')) 141 | if isinstance(ds, (str, unicode_type)): 142 | gdal.PushErrorHandler() 143 | ds = gdal.Open(ds) 144 | gdal.PopErrorHandler() 145 | if ds is None: 146 | raise ValidateCloudOptimizedGeoTIFFException( 147 | 'Invalid file : %s' % gdal.GetLastErrorMsg()) 148 | if ds.GetDriver().ShortName != 'GTiff': 149 | raise ValidateCloudOptimizedGeoTIFFException( 150 | 'The file is not a GeoTIFF') 151 | 152 | details = {} 153 | errors = [] 154 | warnings = [] 155 | filename = ds.GetDescription() 156 | main_band = ds.GetRasterBand(1) 157 | ovr_count = main_band.GetOverviewCount() 158 | filelist = ds.GetFileList() 159 | if filelist is not None and filename + '.ovr' in filelist: 160 | errors += [ 161 | 'Overviews found in external .ovr file. They should be internal'] 162 | 163 | if main_band.XSize > 512 or main_band.YSize > 512: 164 | if check_tiled: 165 | block_size = main_band.GetBlockSize() 166 | if block_size[0] == main_band.XSize and block_size[0] > 1024: 167 | errors += [ 168 | 'The file is greater than 512xH or Wx512, but is not tiled'] 169 | 170 | if ovr_count == 0: 171 | warnings += [ 172 | 'The file is greater than 512xH or Wx512, it is recommended ' 173 | 'to include internal overviews'] 174 | 175 | ifd_offset = int(main_band.GetMetadataItem('IFD_OFFSET', 'TIFF')) 176 | ifd_offsets = [ifd_offset] 177 | 178 | block_order_row_major = False 179 | block_leader_size_as_uint4 = False 180 | block_trailer_last_4_bytes_repeated = False 181 | mask_interleaved_with_imagery = False 182 | 183 | if ifd_offset not in (8, 16): 184 | 185 | # Check if there is GDAL hidden structural metadata 186 | f = gdal.VSIFOpenL(filename, 'rb') 187 | if not f: 188 | raise ValidateCloudOptimizedGeoTIFFException("Cannot open file") 189 | signature = struct.unpack('B' * 4, gdal.VSIFReadL(4, 1, f)) 190 | bigtiff = signature in ((0x49, 0x49, 0x2B, 0x00), (0x4D, 0x4D, 0x00, 0x2B)) 191 | if bigtiff: 192 | expected_ifd_pos = 16 193 | else: 194 | expected_ifd_pos = 8 195 | gdal.VSIFSeekL(f, expected_ifd_pos, 0) 196 | pattern = "GDAL_STRUCTURAL_METADATA_SIZE=%06d bytes\n" % 0 197 | got = gdal.VSIFReadL(len(pattern), 1, f).decode('LATIN1') 198 | if len(got) == len(pattern) and got.startswith('GDAL_STRUCTURAL_METADATA_SIZE='): 199 | size = int(got[len('GDAL_STRUCTURAL_METADATA_SIZE='):][0:6]) 200 | extra_md = gdal.VSIFReadL(size, 1, f).decode('LATIN1') 201 | block_order_row_major = 'BLOCK_ORDER=ROW_MAJOR' in extra_md 202 | block_leader_size_as_uint4 = 'BLOCK_LEADER=SIZE_AS_UINT4' in extra_md 203 | block_trailer_last_4_bytes_repeated = 'BLOCK_TRAILER=LAST_4_BYTES_REPEATED' in extra_md 204 | mask_interleaved_with_imagery = 'MASK_INTERLEAVED_WITH_IMAGERY=YES' in extra_md 205 | if 'KNOWN_INCOMPATIBLE_EDITION=YES' in extra_md: 206 | errors += ["KNOWN_INCOMPATIBLE_EDITION=YES is declared in the file"] 207 | expected_ifd_pos += len(pattern) + size 208 | expected_ifd_pos += expected_ifd_pos % 2 # IFD offset starts on a 2-byte boundary 209 | gdal.VSIFCloseL(f) 210 | 211 | if expected_ifd_pos != ifd_offsets[0]: 212 | errors += [ 213 | 'The offset of the main IFD should be %d. It is %d instead' % (expected_ifd_pos, ifd_offsets[0])] 214 | 215 | details['ifd_offsets'] = {} 216 | details['ifd_offsets']['main'] = ifd_offset 217 | 218 | for i in range(ovr_count): 219 | # Check that overviews are by descending sizes 220 | ovr_band = ds.GetRasterBand(1).GetOverview(i) 221 | if i == 0: 222 | if (ovr_band.XSize > main_band.XSize or 223 | ovr_band.YSize > main_band.YSize): 224 | errors += [ 225 | 'First overview has larger dimension than main band'] 226 | else: 227 | prev_ovr_band = ds.GetRasterBand(1).GetOverview(i - 1) 228 | if (ovr_band.XSize > prev_ovr_band.XSize or 229 | ovr_band.YSize > prev_ovr_band.YSize): 230 | errors += [ 231 | 'Overview of index %d has larger dimension than ' 232 | 'overview of index %d' % (i, i - 1)] 233 | 234 | if check_tiled: 235 | block_size = ovr_band.GetBlockSize() 236 | if block_size[0] == ovr_band.XSize and block_size[0] > 1024: 237 | errors += [ 238 | 'Overview of index %d is not tiled' % i] 239 | 240 | # Check that the IFD of descending overviews are sorted by increasing 241 | # offsets 242 | ifd_offset = int(ovr_band.GetMetadataItem('IFD_OFFSET', 'TIFF')) 243 | ifd_offsets.append(ifd_offset) 244 | details['ifd_offsets']['overview_%d' % i] = ifd_offset 245 | if ifd_offsets[-1] < ifd_offsets[-2]: 246 | if i == 0: 247 | errors += [ 248 | 'The offset of the IFD for overview of index %d is %d, ' 249 | 'whereas it should be greater than the one of the main ' 250 | 'image, which is at byte %d' % 251 | (i, ifd_offsets[-1], ifd_offsets[-2])] 252 | else: 253 | errors += [ 254 | 'The offset of the IFD for overview of index %d is %d, ' 255 | 'whereas it should be greater than the one of index %d, ' 256 | 'which is at byte %d' % 257 | (i, ifd_offsets[-1], i - 1, ifd_offsets[-2])] 258 | 259 | # Check that the imagery starts by the smallest overview and ends with 260 | # the main resolution dataset 261 | 262 | def get_block_offset(band): 263 | blockxsize, blockysize = band.GetBlockSize() 264 | for y in range(int((band.YSize + blockysize - 1) / blockysize)): 265 | for x in range(int((band.XSize + blockxsize - 1) / blockxsize)): 266 | block_offset = band.GetMetadataItem('BLOCK_OFFSET_%d_%d' % (x, y), 'TIFF') 267 | if block_offset: 268 | return int(block_offset) 269 | return 0 270 | 271 | block_offset = get_block_offset(main_band) 272 | data_offsets = [block_offset] 273 | details['data_offsets'] = {} 274 | details['data_offsets']['main'] = block_offset 275 | for i in range(ovr_count): 276 | ovr_band = ds.GetRasterBand(1).GetOverview(i) 277 | block_offset = get_block_offset(ovr_band) 278 | data_offsets.append(block_offset) 279 | details['data_offsets']['overview_%d' % i] = block_offset 280 | 281 | if data_offsets[-1] != 0 and data_offsets[-1] < ifd_offsets[-1]: 282 | if ovr_count > 0: 283 | errors += [ 284 | 'The offset of the first block of the smallest overview ' 285 | 'should be after its IFD'] 286 | else: 287 | errors += [ 288 | 'The offset of the first block of the image should ' 289 | 'be after its IFD'] 290 | for i in range(len(data_offsets) - 2, 0, -1): 291 | if data_offsets[i] != 0 and data_offsets[i] < data_offsets[i + 1]: 292 | errors += [ 293 | 'The offset of the first block of overview of index %d should ' 294 | 'be after the one of the overview of index %d' % 295 | (i - 1, i)] 296 | if len(data_offsets) >= 2 and data_offsets[0] != 0 and data_offsets[0] < data_offsets[1]: 297 | errors += [ 298 | 'The offset of the first block of the main resolution image ' 299 | 'should be after the one of the overview of index %d' % 300 | (ovr_count - 1)] 301 | 302 | if full_check and (block_order_row_major or block_leader_size_as_uint4 or 303 | block_trailer_last_4_bytes_repeated or 304 | mask_interleaved_with_imagery): 305 | f = gdal.VSIFOpenL(filename, 'rb') 306 | if not f: 307 | raise ValidateCloudOptimizedGeoTIFFException("Cannot open file") 308 | 309 | full_check_band(f, 'Main resolution image', main_band, errors, 310 | block_order_row_major, 311 | block_leader_size_as_uint4, 312 | block_trailer_last_4_bytes_repeated, 313 | mask_interleaved_with_imagery) 314 | if main_band.GetMaskFlags() == gdal.GMF_PER_DATASET and \ 315 | (filename + '.msk') not in ds.GetFileList(): 316 | full_check_band(f, 'Mask band of main resolution image', 317 | main_band.GetMaskBand(), errors, 318 | block_order_row_major, 319 | block_leader_size_as_uint4, 320 | block_trailer_last_4_bytes_repeated, False) 321 | for i in range(ovr_count): 322 | ovr_band = ds.GetRasterBand(1).GetOverview(i) 323 | full_check_band(f, 'Overview %d' % i, ovr_band, errors, 324 | block_order_row_major, 325 | block_leader_size_as_uint4, 326 | block_trailer_last_4_bytes_repeated, 327 | mask_interleaved_with_imagery) 328 | if ovr_band.GetMaskFlags() == gdal.GMF_PER_DATASET and \ 329 | (filename + '.msk') not in ds.GetFileList(): 330 | full_check_band(f, 'Mask band of overview %d' % i, 331 | ovr_band.GetMaskBand(), errors, 332 | block_order_row_major, 333 | block_leader_size_as_uint4, 334 | block_trailer_last_4_bytes_repeated, False) 335 | gdal.VSIFCloseL(f) 336 | 337 | return warnings, errors, details 338 | 339 | 340 | def main(argv): 341 | """Return 0 in case of success, 1 for failure.""" 342 | 343 | i = 1 344 | filename = None 345 | quiet = False 346 | full_check = None 347 | while i < len(argv): 348 | if argv[i] == '-q': 349 | quiet = True 350 | elif argv[i] == '--full-check=yes': 351 | full_check = True 352 | elif argv[i] == '--full-check=no': 353 | full_check = False 354 | elif argv[i] == '--full-check=auto': 355 | full_check = None 356 | elif argv[i][0] == '-': 357 | return Usage() 358 | elif filename is None: 359 | filename = argv[i] 360 | else: 361 | return Usage() 362 | 363 | i += 1 364 | 365 | if filename is None: 366 | return Usage() 367 | 368 | if full_check is None: 369 | full_check = filename.startswith('/vsimem/') or os.path.exists(filename) 370 | 371 | try: 372 | ret = 0 373 | warnings, errors, details = validate(filename, full_check=full_check) 374 | if warnings: 375 | if not quiet: 376 | print('The following warnings were found:') 377 | for warning in warnings: 378 | print(' - ' + warning) 379 | print('') 380 | if errors: 381 | if not quiet: 382 | print('%s is NOT a valid cloud optimized GeoTIFF.' % filename) 383 | print('The following errors were found:') 384 | for error in errors: 385 | print(' - ' + error) 386 | print('') 387 | ret = 1 388 | else: 389 | if not quiet: 390 | print('%s is a valid cloud optimized GeoTIFF' % filename) 391 | 392 | if not quiet and not warnings and not errors: 393 | headers_size = min(details['data_offsets'][k] for k in details['data_offsets']) 394 | if headers_size == 0: 395 | headers_size = gdal.VSIStatL(filename).size 396 | print('\nThe size of all IFD headers is %d bytes' % headers_size) 397 | except ValidateCloudOptimizedGeoTIFFException as e: 398 | if not quiet: 399 | print('%s is NOT a valid cloud optimized GeoTIFF : %s' % 400 | (filename, str(e))) 401 | ret = 1 402 | 403 | return ret 404 | 405 | 406 | if __name__ == '__main__': 407 | sys.exit(main(sys.argv)) 408 | -------------------------------------------------------------------------------- /src/rtc/helpers.py: -------------------------------------------------------------------------------- 1 | '''collection of useful functions used across workflows''' 2 | 3 | import os 4 | import sqlite3 5 | import logging 6 | import journal 7 | import numpy as np 8 | from pyproj.transformer import Transformer 9 | from osgeo import gdal 10 | from shapely import geometry 11 | 12 | import isce3 13 | 14 | import rtc 15 | 16 | logger = logging.getLogger('rtc_s1') 17 | 18 | WORKFLOW_SCRIPTS_DIR = os.path.dirname(rtc.__file__) 19 | 20 | # get the basename given an input file path 21 | # example: get_module_name(__file__) 22 | get_module_name = lambda x : os.path.basename(x).split('.')[0] 23 | 24 | 25 | def check_file_path(file_path: str) -> None: 26 | """Check if file_path exist else raise an error. 27 | 28 | Parameters 29 | ---------- 30 | file_path : str 31 | Path to file to be checked 32 | """ 33 | if not os.path.exists(file_path): 34 | err_str = f'{file_path} not found' 35 | logger.error(err_str) 36 | raise FileNotFoundError(err_str) 37 | 38 | 39 | def check_directory(file_path: str) -> None: 40 | """Check if directory in file_path exists else raise an error. 41 | 42 | Parameters 43 | ---------- 44 | file_path: str 45 | Path to directory to be checked 46 | """ 47 | if not os.path.isdir(file_path): 48 | err_str = f'{file_path} not found' 49 | logger.error(err_str) 50 | raise FileNotFoundError(err_str) 51 | 52 | def get_file_polarization_mode(file_path: str) -> str: 53 | '''Check polarization mode from file name 54 | 55 | Taking PP from SAFE file name with following format: 56 | MMM_BB_TTTR_LFPP_YYYYMMDDTHHMMSS_YYYYMMDDTHHMMSS_OOOOOO_DDDDDD_CCCC.SAFE 57 | 58 | Parameters 59 | ---------- 60 | file_path : str 61 | SAFE file name to parse 62 | 63 | Returns 64 | ------- 65 | original: dict 66 | Default dictionary updated with user-defined options 67 | 68 | References 69 | ---------- 70 | https://sentinel.esa.int/web/sentinel/user-guides/sentinel-1-sar/naming-conventions 71 | ''' 72 | # index split tokens from rear to account for R in TTTR being possibly 73 | # replaced with '_' 74 | safe_pol_mode = os.path.basename(file_path).split('_')[-6][2:] 75 | 76 | return safe_pol_mode 77 | 78 | 79 | def deep_update(original, update): 80 | """Update default runconfig dict with user-supplied dict. 81 | 82 | Parameters 83 | ---------- 84 | original : dict 85 | Dict with default options to be updated 86 | update: dict 87 | Dict with user-defined options used to update original/default 88 | 89 | Returns 90 | ------- 91 | original: dict 92 | Default dictionary updated with user-defined options 93 | 94 | """ 95 | for key, val in update.items(): 96 | 97 | if isinstance(val, dict): 98 | original[key] = deep_update(original.get(key, {}), val) 99 | elif val is not None: 100 | original[key] = val 101 | 102 | # return updated original 103 | return original 104 | 105 | 106 | def check_write_dir(dst_path: str): 107 | """Check if given directory is writeable; else raise error. 108 | 109 | Parameters 110 | ---------- 111 | dst_path : str 112 | File path to directory for which to check writing permission 113 | """ 114 | if not dst_path: 115 | dst_path = '.' 116 | 117 | # check if scratch path exists 118 | dst_path_ok = os.path.isdir(dst_path) 119 | 120 | if not dst_path_ok: 121 | try: 122 | os.makedirs(dst_path, exist_ok=True) 123 | except OSError: 124 | err_str = f"Unable to create {dst_path}" 125 | logger.error(err_str) 126 | raise OSError(err_str) 127 | 128 | # check if path writeable 129 | write_ok = os.access(dst_path, os.W_OK) 130 | if not write_ok: 131 | err_str = f"{dst_path} scratch directory lacks write permission." 132 | logger.error(err_str) 133 | raise PermissionError(err_str) 134 | 135 | 136 | def check_dem(dem_path: str): 137 | """Check if given path is a GDAL-compatible file; else raise error 138 | 139 | Parameters 140 | ---------- 141 | dem_path : str 142 | File path to DEM for which to check GDAL-compatibility 143 | """ 144 | try: 145 | gdal.Open(dem_path, gdal.GA_ReadOnly) 146 | except: 147 | err_str = f'{dem_path} cannot be opened by GDAL' 148 | logger.error(err_str) 149 | raise ValueError(err_str) 150 | 151 | epsg = isce3.io.Raster(dem_path).get_epsg() 152 | if not 1024 <= epsg <= 32767: 153 | err_str = f'DEM epsg of {epsg} out of bounds' 154 | logger.error(err_str) 155 | raise ValueError(err_str) 156 | 157 | 158 | 159 | def bbox_to_utm(bbox, *, epsg_src, epsg_dst): 160 | """Convert bounding box coordinates to UTM. 161 | 162 | Parameters 163 | ---------- 164 | bbox : tuple 165 | Tuple containing the lon/lat bounding box coordinates 166 | (left, bottom, right, top) in degrees 167 | epsg_src : int 168 | EPSG code identifying input bbox coordinate system 169 | epsg_dst : int 170 | EPSG code identifying output coordinate system 171 | 172 | Returns 173 | ------- 174 | tuple 175 | Tuple containing the bounding box coordinates in UTM (meters) 176 | (left, bottom, right, top) 177 | """ 178 | xmin, ymin, xmax, ymax = bbox 179 | xys = _convert_to_utm([(xmin, ymin), (xmax, ymax)], epsg_src, epsg_dst) 180 | return (*xys[0], *xys[1]) 181 | 182 | 183 | def polygon_to_utm(poly, *, epsg_src, epsg_dst): 184 | """Convert a shapely.Polygon's coordinates to UTM. 185 | 186 | Parameters 187 | ---------- 188 | poly: shapely.geometry.Polygon 189 | Polygon object 190 | epsg : int 191 | EPSG code identifying output projection system 192 | 193 | Returns 194 | ------- 195 | tuple 196 | Tuple containing the bounding box coordinates in UTM (meters) 197 | (left, bottom, right, top) 198 | """ 199 | coords = np.array(poly.exterior.coords) 200 | xys = _convert_to_utm(coords, epsg_src, epsg_dst) 201 | return geometry.Polygon(xys) 202 | 203 | 204 | def _convert_to_utm(points_xy, epsg_src, epsg_dst): 205 | """Convert a list of points to a specified UTM coordinate system. 206 | 207 | If epsg_src is 4326 (lat/lon), assumes points_xy are in degrees. 208 | """ 209 | if epsg_dst == epsg_src: 210 | return points_xy 211 | 212 | t = Transformer.from_crs(epsg_src, epsg_dst, always_xy=True) 213 | xs, ys = np.array(points_xy).T 214 | xt, yt = t.transform(xs, ys) 215 | return list(zip(xt, yt)) 216 | 217 | 218 | def burst_bbox_from_db(burst_id, burst_db_file=None, burst_db_conn=None): 219 | """Find the bounding box of a burst in the database. 220 | 221 | Parameters 222 | ---------- 223 | burst_id : str 224 | JPL burst ID 225 | burst_db_file : str 226 | Location of burst database sqlite file, by default None 227 | burst_db_conn : sqlite3.Connection 228 | Connection object to burst database (If already connected) 229 | Alternative to providing burst_db_file, will be faster 230 | for multiply queries. 231 | 232 | Returns 233 | ------- 234 | epsg : int 235 | EPSG code(s) of burst bounding box(es) 236 | bbox : tuple[float] 237 | Bounding box of burst in EPSG coordinates. Bounding box given as 238 | tuple(xmin, ymin, xmax, ymax) 239 | 240 | Raises 241 | ------ 242 | ValueError 243 | If burst_id is not found in burst database 244 | """ 245 | # example burst db: 246 | # /home/staniewi/dev/burst_map_IW_000001_375887.OPERA-JPL.sqlite3 247 | if burst_db_conn is None: 248 | burst_db_conn = sqlite3.connect(burst_db_file) 249 | burst_db_conn.row_factory = sqlite3.Row # return rows as dicts 250 | 251 | query = "SELECT epsg, xmin, ymin, xmax, ymax FROM burst_id_map WHERE burst_id_jpl = ?" 252 | cur = burst_db_conn.execute(query, (burst_id,)) 253 | result = cur.fetchone() 254 | 255 | if not result: 256 | raise ValueError(f"Failed to find {burst_id} in {burst_db_file}") 257 | 258 | epsg = result["epsg"] 259 | bbox = (result["xmin"], result["ymin"], result["xmax"], result["ymax"]) 260 | 261 | return epsg, bbox 262 | 263 | 264 | def burst_bboxes_from_db(burst_ids, burst_db_file=None, burst_db_conn=None): 265 | """Find the bounding box of bursts in the database. 266 | 267 | Parameters 268 | ---------- 269 | burst_id : list[str] 270 | list of JPL burst IDs. 271 | burst_db_file : str 272 | Location of burst database sqlite file, by default None 273 | burst_db_conn : sqlite3.Connection 274 | Connection object to burst database (If already connected) 275 | Alternative to providing burst_db_file, will be faster 276 | for multiply queries. 277 | 278 | Returns 279 | ------- 280 | bboxes : dict 281 | Burst bounding boxes as a dict with burst IDs as key and tuples of 282 | EPSG and bounding boxes (tuple[float]) as values. Bounding box given as 283 | tuple(xmin, ymin, xmax, ymax) 284 | 285 | Raises 286 | ------ 287 | ValueError 288 | If no burst_ids are found in burst database 289 | """ 290 | # example burst db: 291 | # /home/staniewi/dev/burst_map_IW_000001_375887.OPERA-JPL.sqlite3 292 | if burst_db_conn is None: 293 | burst_db_conn = sqlite3.connect(burst_db_file) 294 | burst_db_conn.row_factory = sqlite3.Row # return rows as dicts 295 | 296 | # concatenate '?, ' with for each burst ID for IN query 297 | qs_in_query = ', '.join('?' for _ in burst_ids) 298 | query = f"SELECT * FROM burst_id_map WHERE burst_id_jpl IN ({qs_in_query})" 299 | cur = burst_db_conn.execute(query, burst_ids) 300 | results = cur.fetchall() 301 | 302 | if not results: 303 | raise ValueError(f"Failed to find {burst_ids} in {burst_db_file}") 304 | 305 | n_results = len(results) 306 | epsgs = [[]] * n_results 307 | bboxes = [[]] * n_results 308 | burst_ids = [[]] * n_results 309 | for i_result, result in enumerate(results): 310 | epsgs[i_result] = result["epsg"] 311 | bboxes[i_result] = (result["xmin"], result["ymin"], 312 | result["xmax"], result["ymax"]) 313 | burst_ids[i_result] = result["burst_id_jpl"] 314 | 315 | # TODO add warning if not all burst bounding boxes found 316 | return dict(zip(burst_ids, zip(epsgs, bboxes))) 317 | -------------------------------------------------------------------------------- /src/rtc/radar_grid.py: -------------------------------------------------------------------------------- 1 | import isce3 2 | 3 | 4 | def file_to_rdr_grid(ref_grid_path: str) -> isce3.product.RadarGridParameters: 5 | '''read parameters from text file needed to create radar grid object''' 6 | with open(ref_grid_path, 'r') as f_rdr_grid: 7 | sensing_start = float(f_rdr_grid.readline()) 8 | wavelength = float(f_rdr_grid.readline()) 9 | prf = float(f_rdr_grid.readline()) 10 | starting_range = float(f_rdr_grid.readline()) 11 | range_pixel_spacing = float(f_rdr_grid.readline()) 12 | length = int(f_rdr_grid.readline()) 13 | width = int(f_rdr_grid.readline()) 14 | # read date string and remove newline 15 | date_str = f_rdr_grid.readline() 16 | ref_epoch = isce3.core.DateTime(date_str[:-1]) 17 | 18 | rdr_grid = isce3.product.RadarGridParameters( 19 | sensing_start, wavelength, prf, starting_range, 20 | range_pixel_spacing, "right", length, width, 21 | ref_epoch) 22 | 23 | return rdr_grid 24 | 25 | 26 | def rdr_grid_to_file(ref_grid_path: str, 27 | rdr_grid: isce3.product.RadarGridParameters) -> None: 28 | '''save parameters needed to create a new radar grid object''' 29 | with open(ref_grid_path, "w") as f_rdr_grid: 30 | f_rdr_grid.write(str(rdr_grid.sensing_start) + '\n') 31 | f_rdr_grid.write(str(rdr_grid.wavelength) + '\n') 32 | f_rdr_grid.write(str(rdr_grid.prf) + '\n') 33 | f_rdr_grid.write(str(rdr_grid.starting_range) + '\n') 34 | f_rdr_grid.write(str(rdr_grid.range_pixel_spacing) + '\n') 35 | f_rdr_grid.write(str(rdr_grid.length) + '\n') 36 | f_rdr_grid.write(str(rdr_grid.width) + '\n') 37 | f_rdr_grid.write(str(rdr_grid.ref_epoch) + '\n') 38 | -------------------------------------------------------------------------------- /src/rtc/runconfig.py: -------------------------------------------------------------------------------- 1 | from __future__ import annotations 2 | from dataclasses import dataclass 3 | import os 4 | from types import SimpleNamespace 5 | import sys 6 | import numpy as np 7 | 8 | import isce3 9 | from isce3.product import GeoGridParameters 10 | 11 | import logging 12 | import yamale 13 | from ruamel.yaml import YAML 14 | 15 | from rtc import helpers 16 | from rtc.radar_grid import file_to_rdr_grid 17 | from rtc.geogrid import generate_geogrids, generate_geogrids_from_db 18 | from rtc.wrap_namespace import wrap_namespace, unwrap_to_dict 19 | from s1reader.s1_burst_slc import Sentinel1BurstSlc 20 | from s1reader.s1_orbit import get_orbit_file_from_list 21 | from s1reader.s1_reader import load_bursts 22 | 23 | STATIC_LAYERS_PRODUCT_TYPE = 'RTC_S1_STATIC' 24 | 25 | logger = logging.getLogger('rtc_s1') 26 | 27 | 28 | def load_parameters(cfg): 29 | ''' 30 | Load RTC-S1 specific parameters. 31 | ''' 32 | 33 | geocode_namespace = cfg.groups.processing.geocoding 34 | rtc_namespace = cfg.groups.processing.rtc 35 | 36 | if geocode_namespace.clip_max is None: 37 | geocode_namespace.clip_max = np.nan 38 | 39 | if geocode_namespace.clip_min is None: 40 | geocode_namespace.clip_min = np.nan 41 | 42 | if geocode_namespace.memory_mode == 'single_block': 43 | geocode_namespace.memory_mode = \ 44 | isce3.core.GeocodeMemoryMode.SingleBlock 45 | elif geocode_namespace.memory_mode == 'geogrid': 46 | geocode_namespace.memory_mode = \ 47 | isce3.core.GeocodeMemoryMode.BlocksGeogrid 48 | elif geocode_namespace.memory_mode == 'geogrid_and_radargrid': 49 | geocode_namespace.memory_mode = \ 50 | isce3.core.GeocodeMemoryMode.BlocksGeogridAndRadarGrid 51 | elif (geocode_namespace.memory_mode == 'auto' or 52 | geocode_namespace.memory_mode is None): 53 | geocode_namespace.memory_mode = \ 54 | isce3.core.GeocodeMemoryMode.Auto 55 | else: 56 | err_msg = f"ERROR memory_mode: {geocode_namespace.memory_mode}" 57 | raise ValueError(err_msg) 58 | 59 | rtc_output_type = rtc_namespace.output_type 60 | if rtc_output_type == 'sigma0': 61 | rtc_namespace.output_type_enum = \ 62 | isce3.geometry.RtcOutputTerrainRadiometry.SIGMA_NAUGHT 63 | else: 64 | rtc_namespace.output_type_enum = \ 65 | isce3.geometry.RtcOutputTerrainRadiometry.GAMMA_NAUGHT 66 | 67 | if rtc_namespace.input_terrain_radiometry == "sigma0": 68 | rtc_namespace.input_terrain_radiometry_enum = \ 69 | isce3.geometry.RtcInputTerrainRadiometry.SIGMA_NAUGHT_ELLIPSOID 70 | else: 71 | rtc_namespace.input_terrain_radiometry_enum = \ 72 | isce3.geometry.RtcInputTerrainRadiometry.BETA_NAUGHT 73 | 74 | if rtc_namespace.rtc_min_value_db is None: 75 | rtc_namespace.rtc_min_value_db = np.nan 76 | 77 | # Update the DEM interpolation method 78 | dem_interp_method = \ 79 | cfg.groups.processing.dem_interpolation_method 80 | 81 | if dem_interp_method == 'biquintic': 82 | dem_interp_method_enum = isce3.core.DataInterpMethod.BIQUINTIC 83 | elif dem_interp_method == 'sinc': 84 | dem_interp_method_enum = isce3.core.DataInterpMethod.SINC 85 | elif dem_interp_method == 'bilinear': 86 | dem_interp_method_enum = isce3.core.DataInterpMethod.BILINEAR 87 | elif dem_interp_method == 'bicubic': 88 | dem_interp_method_enum = isce3.core.DataInterpMethod.BICUBIC 89 | elif dem_interp_method == 'nearest': 90 | dem_interp_method_enum = isce3.core.DataInterpMethod.NEAREST 91 | else: 92 | err_msg = ('ERROR invalid DEM interpolation method:' 93 | f' {dem_interp_method}') 94 | raise ValueError(err_msg) 95 | 96 | cfg.groups.processing.dem_interpolation_method_enum = \ 97 | dem_interp_method_enum 98 | 99 | 100 | def load_validate_yaml(yaml_path: str) -> dict: 101 | """Initialize RunConfig class with options from given yaml file. 102 | 103 | Parameters 104 | ---------- 105 | yaml_path : str 106 | Path to yaml file containing the options to load 107 | """ 108 | schema_filepath = f'{helpers.WORKFLOW_SCRIPTS_DIR}/schemas/rtc_s1.yaml' 109 | try: 110 | schema = yamale.make_schema(schema_filepath, parser='ruamel') 111 | except: 112 | err_str = f'unable to load schema from file {schema_filepath}' 113 | logger.error(err_str) 114 | raise ValueError(err_str) 115 | 116 | # load yaml file or string from command line 117 | if os.path.isfile(yaml_path): 118 | try: 119 | data = yamale.make_data(yaml_path, parser='ruamel') 120 | except yamale.YamaleError as yamale_err: 121 | err_str = (f'Yamale unable to load runconfig yaml' 122 | f'{yaml_path} for validation.') 123 | logger.error(err_str) 124 | raise yamale.YamaleError(err_str) from yamale_err 125 | else: 126 | error_msg = f'ERROR file not found: {yaml_path}' 127 | raise FileNotFoundError(error_msg) 128 | 129 | # validate yaml file taken from command line 130 | try: 131 | yamale.validate(schema, data) 132 | except yamale.YamaleError as yamale_err: 133 | err_str = (f'Validation failed for runconfig yaml {yaml_path}.') 134 | logger.error(err_str) 135 | raise yamale.YamaleError(err_str) from yamale_err 136 | 137 | # load default runconfig 138 | parser = YAML(typ='safe') 139 | 140 | with open(yaml_path, 'r') as f_yaml: 141 | user_cfg = parser.load(f_yaml) 142 | 143 | product_type = user_cfg['runconfig']['groups']['primary_executable'][ 144 | 'product_type'] 145 | if (product_type == STATIC_LAYERS_PRODUCT_TYPE): 146 | default_cfg_path = (f'{helpers.WORKFLOW_SCRIPTS_DIR}/defaults/' 147 | f'rtc_s1_static.yaml') 148 | print('Loading RTC-S1 runconfig default for static layers') 149 | else: 150 | default_cfg_path = (f'{helpers.WORKFLOW_SCRIPTS_DIR}/defaults/' 151 | f'rtc_s1.yaml') 152 | print('Loading RTC-S1 runconfig default') 153 | 154 | with open(default_cfg_path, 'r') as f_default: 155 | default_cfg = parser.load(f_default) 156 | 157 | # Copy user-supplied configuration options into default runconfig 158 | helpers.deep_update(default_cfg, user_cfg) 159 | 160 | # Validate YAML values under groups dict 161 | validate_group_dict(default_cfg['runconfig']['groups']) 162 | 163 | return default_cfg 164 | 165 | 166 | def validate_group_dict(group_cfg: dict) -> None: 167 | """Check and validate runconfig entries. 168 | 169 | Parameters 170 | ---------- 171 | group_cfg : dict 172 | Dictionary storing runconfig options to validate 173 | """ 174 | 175 | # Check 'input_file_group' section of runconfig 176 | input_group = group_cfg['input_file_group'] 177 | 178 | # Check SAFE files 179 | run_pol_mode = group_cfg['processing']['polarization'] 180 | safe_pol_modes = [] 181 | for safe_file in input_group['safe_file_path']: 182 | # Check if files exists 183 | helpers.check_file_path(safe_file) 184 | 185 | # Get and save safe pol mode ('SV', 'SH', 'DH', 'DV') 186 | safe_pol_mode = helpers.get_file_polarization_mode(safe_file) 187 | safe_pol_modes.append(safe_pol_mode) 188 | 189 | # Raise error if given co-pol file and expecting cross-pol or dual-pol 190 | if run_pol_mode != 'co-pol' and safe_pol_mode in ['SV', 'SH']: 191 | err_str = f'{run_pol_mode} polarization lacks cross-pol in {safe_file}' 192 | logger.error(err_str) 193 | raise ValueError(err_str) 194 | 195 | # Check SAFE file pols consistency. i.e. no *H/*V with *V/*H respectively 196 | if len(safe_pol_modes) > 1: 197 | first_safe_pol_mode = safe_pol_modes[0][1] 198 | for safe_pol_mode in safe_pol_modes[1:]: 199 | if safe_pol_mode[1] != first_safe_pol_mode: 200 | err_str = 'SH/SV SAFE file mixed with DH/DV' 201 | logger.error(err_str) 202 | raise ValueError(err_str) 203 | 204 | for orbit_file in input_group['orbit_file_path']: 205 | helpers.check_file_path(orbit_file) 206 | 207 | # Check 'dynamic_ancillary_file_groups' section of runconfig 208 | # Check that DEM file exists and is GDAL-compatible 209 | dem_path = group_cfg['dynamic_ancillary_file_group']['dem_file'] 210 | helpers.check_file_path(dem_path) 211 | helpers.check_dem(dem_path) 212 | 213 | # Check 'product_group' section of runconfig. 214 | # Check that directories herein have writing permissions 215 | product_group = group_cfg['product_group'] 216 | helpers.check_write_dir(product_group['product_path']) 217 | helpers.check_write_dir(product_group['scratch_path']) 218 | 219 | 220 | def runconfig_to_bursts(cfg: SimpleNamespace): 221 | '''Return bursts based on parameters in given runconfig 222 | 223 | Parameters 224 | ---------- 225 | cfg : SimpleNamespace 226 | Configuration of bursts to be loaded. 227 | 228 | Returns 229 | ------- 230 | bursts : list[Sentinel1BurstSlc] 231 | List of bursts loaded according to given configuration. 232 | orbit_file_path : str 233 | Orbit file path 234 | ''' 235 | 236 | # dict to store list of bursts keyed by burst_ids 237 | bursts = {} 238 | 239 | # extract given SAFE zips to find bursts identified in cfg.burst_id 240 | for safe_file in cfg.input_file_group.safe_file_path: 241 | # get orbit file 242 | orbit_file_path = get_orbit_file_from_list( 243 | safe_file, 244 | cfg.input_file_group.orbit_file_path) 245 | 246 | if not orbit_file_path: 247 | err_str = ("No orbit file correlates to safe file:" 248 | f" {os.path.basename(safe_file)}") 249 | logger.error(err_str) 250 | raise ValueError(err_str) 251 | 252 | # from SAFE file mode, create dict of runconfig pol mode to 253 | # polarization(s) 254 | safe_pol_mode = helpers.get_file_polarization_mode(safe_file) 255 | if safe_pol_mode == 'SV': 256 | mode_to_pols = {'co-pol': ['VV']} 257 | elif safe_pol_mode == 'DV': 258 | mode_to_pols = {'co-pol': ['VV'], 'cross-pol': ['VH'], 259 | 'dual-pol': ['VV', 'VH']} 260 | elif safe_pol_mode == 'SH': 261 | mode_to_pols = {'co-pol': ['HH']} 262 | else: 263 | mode_to_pols = {'co-pol': ['HH'], 'cross-pol': ['HV'], 264 | 'dual-pol': ['HH', 'HV']} 265 | pols = mode_to_pols[cfg.processing.polarization] 266 | 267 | # zip pol and IW subswath indices together 268 | i_subswaths = [1, 2, 3] 269 | pol_subswath_index_pairs = [(pol, i) 270 | for pol in pols for i in i_subswaths] 271 | 272 | # list of burst ID + polarization tuples 273 | # used to prevent reference repeats 274 | id_pols_found = [] 275 | 276 | # loop over pol and subswath index combinations 277 | for pol, i_subswath in pol_subswath_index_pairs: 278 | 279 | # loop over burst objs extracted from SAFE zip 280 | for burst in load_bursts(safe_file, orbit_file_path, i_subswath, 281 | pol, flag_apply_eap=False): 282 | # get burst ID 283 | burst_id = str(burst.burst_id) 284 | 285 | # is burst_id wanted? skip if not given in config 286 | if (cfg.input_file_group.burst_id is not None and 287 | burst_id not in cfg.input_file_group.burst_id): 288 | continue 289 | 290 | # get polarization and save as tuple with burst ID 291 | pol = burst.polarization 292 | id_pol = (burst_id, pol) 293 | 294 | # has burst_id + pol combo been found? 295 | burst_id_pol_exist = id_pol in id_pols_found 296 | if burst_id_pol_exist: 297 | continue 298 | 299 | id_pols_found.append(id_pol) 300 | 301 | # append burst to bursts dict 302 | if burst_id not in bursts.keys(): 303 | bursts[burst_id] = {} 304 | bursts[burst_id][pol] = burst 305 | 306 | # check if no bursts were found 307 | if not bursts: 308 | err_str = ("Could not find any of the burst IDs in the provided safe" 309 | " files") 310 | logger.error(err_str) 311 | raise ValueError(err_str) 312 | 313 | return bursts, orbit_file_path 314 | 315 | 316 | def get_ref_radar_grid_info(ref_path, burst_id): 317 | ''' Find all reference radar grids info 318 | 319 | Parameters 320 | ---------- 321 | ref_path: str 322 | Path where reference radar grids processing is stored 323 | burst_id: str 324 | Burst IDs for reference radar grids 325 | 326 | Returns 327 | ------- 328 | ref_radar_grids: 329 | reference radar path and grid values found associated with 330 | burst ID keys 331 | ''' 332 | rdr_grid_files = f'{ref_path}/radar_grid.txt' 333 | 334 | if not os.path.isfile(rdr_grid_files): 335 | raise FileNotFoundError('No reference radar grids not found in' 336 | f' {ref_path}') 337 | 338 | ref_rdr_path = os.path.dirname(rdr_grid_files) 339 | ref_rdr_grid = file_to_rdr_grid(rdr_grid_files) 340 | 341 | return ReferenceRadarInfo(ref_rdr_path, ref_rdr_grid) 342 | 343 | 344 | def check_geogrid_dict(geocode_cfg: dict) -> None: 345 | 346 | # check output EPSG 347 | output_epsg = geocode_cfg['output_epsg'] 348 | if output_epsg is not None: 349 | # check 1024 <= output_epsg <= 32767: 350 | if output_epsg < 1024 or 32767 < output_epsg: 351 | err_str = f'output epsg {output_epsg} in YAML out of bounds' 352 | logger.error(err_str) 353 | raise ValueError(err_str) 354 | 355 | for xy in 'xy': 356 | # check posting value in current axis 357 | posting_key = f'{xy}_posting' 358 | if geocode_cfg[posting_key] is not None: 359 | posting = geocode_cfg[posting_key] 360 | if posting <= 0: 361 | err_str = '{xy} posting from config of {posting} <= 0' 362 | logger.error(err_str) 363 | raise ValueError(err_str) 364 | 365 | # check snap value in current axis 366 | snap_key = f'{xy}_snap' 367 | if geocode_cfg[snap_key] is not None: 368 | snap = geocode_cfg[snap_key] 369 | if snap <= 0: 370 | err_str = '{xy} snap from config of {snap} <= 0' 371 | logger.error(err_str) 372 | raise ValueError(err_str) 373 | 374 | 375 | @dataclass(frozen=True) 376 | class ReferenceRadarInfo: 377 | path: str 378 | grid: isce3.product.RadarGridParameters 379 | 380 | 381 | @dataclass(frozen=True) 382 | class RunConfig: 383 | '''dataclass containing RTC runconfig''' 384 | # workflow name 385 | name: str 386 | # runconfig options converted from dict 387 | groups: SimpleNamespace 388 | # list of lists where bursts in interior list have a common burst_id 389 | bursts: list[Sentinel1BurstSlc] 390 | # dict of reference radar paths and grids values keyed on burst ID 391 | # (empty/unused if rdr2geo) 392 | reference_radar_info: ReferenceRadarInfo 393 | # run config path 394 | run_config_path: str 395 | # output product geogrid 396 | geogrid: GeoGridParameters 397 | # dict of geogrids associated to burst IDs 398 | geogrids: dict[str, GeoGridParameters] 399 | # orbit file path 400 | orbit_file_path: str 401 | 402 | @classmethod 403 | def load_from_yaml(cls, yaml_path: str) -> RunConfig: 404 | """Initialize RunConfig class with options from given yaml file. 405 | 406 | Parameters 407 | ---------- 408 | yaml_path : str 409 | Path to yaml file containing the options to load 410 | workflow_name: str 411 | Name of the workflow for which uploading default options 412 | """ 413 | cfg = load_validate_yaml(yaml_path) 414 | groups_cfg = cfg['runconfig']['groups'] 415 | 416 | # Read mosaic dict 417 | mosaic_dict = groups_cfg['processing']['mosaicking'] 418 | check_geogrid_dict(mosaic_dict['mosaic_geogrid']) 419 | 420 | # Read geocoding dict 421 | geocoding_dict = groups_cfg['processing']['geocoding'] 422 | check_geogrid_dict(geocoding_dict['bursts_geogrid']) 423 | 424 | # Convert runconfig dict to SimpleNamespace 425 | sns = wrap_namespace(groups_cfg) 426 | 427 | # Load bursts 428 | bursts, orbit_file_path = runconfig_to_bursts(sns) 429 | 430 | # Load geogrids 431 | burst_database_file = groups_cfg['static_ancillary_file_group'][ 432 | 'burst_database_file'] 433 | if burst_database_file is None: 434 | geogrid_all, geogrids = generate_geogrids(bursts, geocoding_dict, 435 | mosaic_dict) 436 | else: 437 | geogrid_all, geogrids = generate_geogrids_from_db( 438 | bursts, geocoding_dict, mosaic_dict, burst_database_file) 439 | 440 | # Empty reference dict for base runconfig class constructor 441 | empty_ref_dict = {} 442 | 443 | return cls(cfg['runconfig']['name'], sns, bursts, empty_ref_dict, 444 | yaml_path, geogrid_all, geogrids, orbit_file_path) 445 | 446 | @property 447 | def geocoding_params(self) -> dict: 448 | return self.groups.processing.geocoding 449 | 450 | @property 451 | def burst_id(self) -> list[str]: 452 | return self.groups.input_file_group.burst_id 453 | 454 | @property 455 | def dem(self) -> str: 456 | return self.groups.dynamic_ancillary_file_group.dem_file 457 | 458 | @property 459 | def dem_file_description(self) -> str: 460 | return self.groups.dynamic_ancillary_file_group.dem_file_description 461 | 462 | @property 463 | def is_reference(self) -> bool: 464 | return self.groups.input_file_group.reference_burst.is_reference 465 | 466 | @property 467 | def orbit_path(self) -> bool: 468 | return self.groups.input_file_group.orbit_file_path 469 | 470 | @property 471 | def polarization(self) -> list[str]: 472 | return self.groups.processing.polarization 473 | 474 | @property 475 | def product_path(self): 476 | return self.groups.product_group.product_path 477 | 478 | @property 479 | def reference_path(self) -> str: 480 | return self.groups.input_file_group.reference_burst.file_path 481 | 482 | @property 483 | def rdr2geo_params(self) -> dict: 484 | return self.groups.processing.rdr2geo 485 | 486 | @property 487 | def geo2rdr_params(self) -> dict: 488 | return self.groups.processing.geo2rdr 489 | 490 | @property 491 | def split_spectrum_params(self) -> dict: 492 | return self.groups.processing.range_split_spectrum 493 | 494 | @property 495 | def resample_params(self) -> dict: 496 | return self.groups.processing.resample 497 | 498 | @property 499 | def safe_files(self) -> list[str]: 500 | return self.groups.input_file_group.safe_file_path 501 | 502 | @property 503 | def product_id(self): 504 | return self.groups.product_group.product_id 505 | 506 | @property 507 | def scratch_path(self): 508 | return self.groups.product_group.scratch_path 509 | 510 | @property 511 | def gpu_enabled(self): 512 | return self.groups.worker.gpu_enabled 513 | 514 | @property 515 | def gpu_id(self): 516 | return self.groups.worker.gpu_id 517 | 518 | def as_dict(self): 519 | ''' Convert self to dict for write to YAML/JSON 520 | 521 | Unable to dataclasses.asdict() because isce3 objects can not be pickled 522 | ''' 523 | self_as_dict = {} 524 | for key, val in self.__dict__.items(): 525 | if key == 'groups': 526 | val = unwrap_to_dict(val) 527 | elif key == 'bursts': 528 | # just date in datetime obj as string 529 | date_str = lambda b : b.sensing_start.date().strftime('%Y%m%d') 530 | 531 | # create an unique burst key 532 | burst_as_key = lambda b : '_'.join([str(b.burst_id), 533 | date_str(b), 534 | b.polarization]) 535 | 536 | val = {burst_as_key(burst): burst.as_dict() for burst in val} 537 | self_as_dict[key] = val 538 | return self_as_dict 539 | 540 | def to_yaml(self): 541 | self_as_dict = self.as_dict() 542 | yaml = YAML(typ='safe') 543 | yaml.dump(self_as_dict, sys.stdout, indent=4) 544 | -------------------------------------------------------------------------------- /src/rtc/schemas/rtc_s1.yaml: -------------------------------------------------------------------------------- 1 | runconfig: 2 | name: str() 3 | 4 | groups: 5 | 6 | primary_executable: 7 | 8 | # Required. Output product type: "RTC_S1" or "RTC_S1_STATIC" 9 | product_type: enum('RTC_S1', 'RTC_S1_STATIC') 10 | 11 | pge_name_group: 12 | pge_name: enum('RTC_S1_PGE') 13 | 14 | input_file_group: 15 | # Required. List of SAFE files (min=1) 16 | safe_file_path: list(str(), min=1) 17 | 18 | # Location from where the source data can be retrieved (URL or DOI) 19 | source_data_access: str(required=False) 20 | 21 | # Required. List of orbit (EOF) files 22 | orbit_file_path: list(str(), min=1) 23 | 24 | # Optional. Burst ID to process (empty for all bursts) 25 | burst_id: list(str(), min=1, required=False) 26 | 27 | dynamic_ancillary_file_group: 28 | # Digital Elevation Model. 29 | dem_file: str(required=False) 30 | 31 | # Digital elevation model description 32 | dem_file_description: str(required=False) 33 | 34 | static_ancillary_file_group: 35 | 36 | # burst database sqlite file 37 | burst_database_file: str(required=False) 38 | 39 | product_group: 40 | 41 | processing_type: enum('NOMINAL', 'URGENT', 'CUSTOM', 'UNDEFINED', required=False) 42 | 43 | product_version: any(str(), num(), required=False) 44 | 45 | # Directory where PGE will place results 46 | product_path: str() 47 | # Directory where SAS can write temporary data 48 | scratch_path: str() 49 | 50 | # If option `save_bursts` is set, output bursts are saved to: 51 | # {output_dir}/{burst_id}/{product_id}{suffix}.{ext} 52 | # If option `save_mosaics` is set, output mosaics are saved to: 53 | # {output_dir}/{product_id}{suffix}.{ext} 54 | # 55 | # If the `product_id` contains the substring "_{burst_id}", the 56 | # substring will be substituted by either: 57 | # - "_" followed by the burst ID, if the product is a burst; or 58 | # - An empty string, if the product is a mosaic. 59 | # 60 | # For example, the `product_id` = `RTC-S1_{burst_id}_S1B` will become 61 | # `RTC-S1_069-147170-IW1_S1B` for the burst t069-147170-IW1; and it 62 | # will become `RTC-S1_S1B` for the mosaic product. 63 | # 64 | # For RTC-S1 products, if the field `product_id`` is left empty, 65 | # the burst product ID will follow the RTC-S1 file naming conventions: 66 | # `OPERA_L2_RTC-S1_{burst_id}_{sensing_start_datetime}_ 67 | # {processing_datetime}_{sensor}_{pixel_spacing} 68 | # _{product_version}`. 69 | # 70 | # For RTC-S1-STATIC products, if the field `product_id` is left empty, 71 | # the burst product ID will follow the RTC-S1-STATIC file naming 72 | # conventions: 73 | # `OPERA_L2_RTC-S1-STATIC_{burst_id}_{rtc_s1_static_validity_start_date}_ 74 | # {processing_datetime}_{sensor}_{pixel_spacing} 75 | # _{product_version}`. 76 | # 77 | # `suffix` is only used when there are multiple output files. 78 | # `ext` is determined by geocoding_options.output_imagery_format. 79 | output_dir: str() 80 | product_id: str(required=False) 81 | 82 | # Validity start date for RTC-S1-STATIC products in the format YYYYMMDD 83 | rtc_s1_static_validity_start_date: int(min=20000101, max=21991231,required=False) 84 | 85 | # Location from where the output product can be retrieved (URL or DOI) 86 | product_data_access: str(required=False) 87 | 88 | # Location of the static layers product associated with this product (URL or DOI 89 | static_layers_data_access: str(required=False) 90 | 91 | # Save RTC-S1 products 92 | save_bursts: bool(required=False) 93 | 94 | # Save mosaic of RTC-S1 bursts 95 | save_mosaics: bool(required=False) 96 | 97 | # Save browse image(s) 98 | save_browse: bool(required=False) 99 | 100 | output_imagery_format: enum('HDF5', 'NETCDF', 'GTiff', 'COG', 'ENVI', required=False) 101 | output_imagery_compression: str(required=False) 102 | output_imagery_nbits: int(min=1, required=False) 103 | 104 | # Optional. Save secondary layers (e.g., inc. angle) within 105 | # the HDF5 file 106 | save_secondary_layers_as_hdf5: bool(required=False) 107 | 108 | # Save RTC-S1 metadata in the HDF5 format 109 | # Optional for `output_imagery_format` equal to 'ENVI', 'GTiff', or 110 | # 'COG', and enabled by default for `output_imagery_format` equal 111 | # to 'HDF5' or 'NETCDF' or `save_secondary_layers_as_hdf5` is True 112 | save_metadata: bool(required=False) 113 | 114 | # This section includes parameters to tweak the workflow 115 | processing: include('processing_options', required=False) 116 | 117 | 118 | --- 119 | geo2rdr_options: 120 | # Convergence threshold for rdr2geo algorithm 121 | threshold: num(min=0, required=False) 122 | # Maximum number of iterations 123 | numiter: int(min=1, required=False) 124 | 125 | rdr2geo_options: 126 | # Convergence threshold for rdr2geo algorithm 127 | threshold: num(min=0, required=False) 128 | # Maximum number of iterations 129 | numiter: int(min=1, required=False) 130 | 131 | # Group of processing options 132 | processing_options: 133 | 134 | # Check if ancillary inputs cover entirely the output product 135 | check_ancillary_inputs_coverage: bool(required=False) 136 | 137 | # Polarization channels to process. 3 modes below correspond to VV, VH, VV+VH 138 | polarization: enum('co-pol', 'cross-pol', 'dual-pol', required=False) 139 | 140 | # Options to run geo2rdr 141 | geo2rdr: include('geo2rdr_options', required=False) 142 | 143 | # Options to run rdr2geo (for running topo when calculating layover shadow mask) 144 | rdr2geo: include('rdr2geo_options', required=False) 145 | 146 | # Range split-spectrum options 147 | range_split_spectrum: include('range_split_spectrum_options', required=False) 148 | 149 | # DEM interpolation method 150 | dem_interpolation_method: enum('sinc', 'bilinear', 'bicubic', 'nearest', 'biquintic', required=False) 151 | 152 | # Apply absolute radiometric correction 153 | apply_absolute_radiometric_correction: bool(required=False) 154 | 155 | # Apply thermal noise correction 156 | apply_thermal_noise_correction: bool(required=False) 157 | 158 | # slant range spacing of the correction LUT in meters 159 | correction_lut_range_spacing_in_meters: num(required=False) 160 | 161 | # Azimuth time spacing of the correction LUT in meters 162 | correction_lut_azimuth_spacing_in_meters: num(required=False) 163 | 164 | # Apply RTC 165 | apply_rtc: bool(required=False) 166 | 167 | # Apply bistatic delay correction 168 | apply_bistatic_delay_correction: bool(required=False) 169 | 170 | # Apply static tropospheric delay correction 171 | apply_static_tropospheric_delay_correction: bool(required=False) 172 | 173 | # Radiometric Terrain Correction (RTC) 174 | rtc: include('rtc_options', required=False) 175 | 176 | # Geocoding options 177 | geocoding: include('geocoding_options', required=False) 178 | 179 | # Mosaicking options 180 | mosaicking: include('mosaicking_options', required=False) 181 | 182 | # Browse image 183 | browse_image_group: include('browse_image_options', required=False) 184 | 185 | # Numper of parallel processes for burst processing 186 | num_workers: int(required=False) 187 | 188 | 189 | rtc_options: 190 | # RTC output type: empty value to turn off the RTC 191 | # The output_type defaults to "gamma0" if the key is absent 192 | output_type: enum('gamma0', 'sigma0', required=False) 193 | 194 | algorithm_type: enum('area_projection', 'bilinear_distribution', required=False) 195 | 196 | input_terrain_radiometry: enum('beta0', 'sigma0', required=False) 197 | 198 | # Minimum RTC area factor in dB 199 | rtc_min_value_db: num(required=False) 200 | 201 | # RTC DEM upsampling 202 | dem_upsampling: int(min=1, required=False) 203 | 204 | # RTC area beta mode 205 | area_beta_mode: enum('auto', 'pixel_area', 'projection_angle', required=False) 206 | 207 | 208 | geocoding_options: 209 | 210 | # OPTIONAL - Apply valid-samples sub-swath masking 211 | apply_valid_samples_sub_swath_masking: bool(required=False) 212 | 213 | # OPTIONAL - Apply shadow masking 214 | apply_shadow_masking: bool(required=False) 215 | 216 | # Skip geocoding already processed, which is tested by the existence of the output files 217 | skip_if_output_files_exist: bool(required=False) 218 | 219 | # Geocoding algorithm type, area projection or interpolation: 220 | # sinc, bilinear, bicubic, nearest, and biquintic 221 | algorithm_type: enum('area_projection', 'sinc', 'bilinear', 'bicubic', 'nearest', 'biquintic', required=False) 222 | 223 | # Memory mode 224 | memory_mode: enum('auto', 'single_block', 'geogrid', 'geogrid_and_radargrid', required=False) 225 | 226 | # Save the incidence angle 227 | save_incidence_angle: bool(required=False) 228 | 229 | # Save the local-incidence angle 230 | save_local_inc_angle: bool(required=False) 231 | 232 | # Save the projection angle 233 | save_projection_angle: bool(required=False) 234 | 235 | # Save the RTC area normalization factor (ANF) computed with 236 | # the projection angle method 237 | save_rtc_anf_projection_angle: bool(required=False) 238 | 239 | # Save the range slope angle 240 | save_range_slope: bool(required=False) 241 | 242 | # Save the number of looks used to generate the RTC-S1 product 243 | save_nlooks: bool(required=False) 244 | 245 | # Save the RTC area normalization factor (ANF) used to generate 246 | # the RTC product 247 | save_rtc_anf: bool(required=False) 248 | 249 | # Save the RTC area normalization factor (ANF) gamma0 to sigma0 250 | save_rtc_anf_gamma0_to_sigma0: bool(required=False) 251 | 252 | # Save the interpolated DEM used to generate the RTC-S1 product 253 | save_dem: bool(required=False) 254 | 255 | # Save layover/shadow mask 256 | save_mask: bool(required=False) 257 | 258 | # Layover/shadow mask dilation size of shadow pixels 259 | shadow_dilation_size: int(min=0, required=False) 260 | 261 | # OPTIONAL - Absolute radiometric correction 262 | abs_rad_cal: num(required=False) 263 | 264 | # Clip values above threshold 265 | clip_max: num(required=False) 266 | 267 | # Clip values below threshold 268 | clip_min: num(required=False) 269 | 270 | # Double SLC sampling in the range direction 271 | upsample_radargrid: bool(required=False) 272 | 273 | # Fields to populate the products' metadata required by 274 | # CEOS Analysis Ready Data specifications 275 | estimated_geometric_accuracy_bias_x: num(required=False) 276 | estimated_geometric_accuracy_bias_y: num(required=False) 277 | estimated_geometric_accuracy_stddev_x: num(required=False) 278 | estimated_geometric_accuracy_stddev_y: num(required=False) 279 | 280 | # Bursts geographic grid 281 | bursts_geogrid: include('output_grid_options', required=False) 282 | 283 | 284 | mosaicking_options: 285 | 286 | # Mosaic mode 287 | mosaic_mode: enum('average', 'first', 'bursts_center', required=False) 288 | 289 | # Mosaic geographic grid 290 | mosaic_geogrid: include('output_grid_options', required=False) 291 | 292 | 293 | output_grid_options: 294 | # Product EPSG code. If not provided, `output_epsg` will 295 | # be determined based on the scene center: 296 | # - If center_lat >= 75.0: 3413 297 | # - If center_lat <= -75.0: 3031 298 | # - Otherwise: EPSG code associated with the closest UTM zone 299 | output_epsg: int(min=1024, max=32767, required=False) 300 | 301 | # Product posting along X (same units as output_epsg) 302 | x_posting: num(min=0, required=False) 303 | 304 | # Product posting along Y (same units as output_epsg) 305 | y_posting: num(min=0, required=False) 306 | 307 | # Controls the product grid along X (same units as output_epsg) 308 | x_snap: num(min=0, required=False) 309 | 310 | # Controls the product grid along Y (same units as output_epsg) 311 | y_snap: num(min=0, required=False) 312 | 313 | # Top-left coordinates (same units as output_epsg) 314 | top_left: 315 | x: num(required=False) 316 | y: num(required=False) 317 | 318 | # Bottom-right coordinates (same units as output_epsg) 319 | bottom_right: 320 | x: num(required=False) 321 | y: num(required=False) 322 | 323 | 324 | browse_image_options: 325 | 326 | # If neither height or width parameters are provided, the browse 327 | # image is generated with the same pixel spacing of the RTC-S1 328 | # imagery (burst or mosaic). 329 | 330 | # If the height parameter is provided but the width is not provided, 331 | # a new width is assigned in order to keep the aspect ratio 332 | # of the RTC-S1 geographic grid. 333 | 334 | # Conversely, if the width parameter is provided but the height is not, 335 | # a new height is assigned in order to keep the aspect ratio 336 | # of the RTC-S1 geographic grid. 337 | 338 | # Height in pixels for the PNG browse image of RTC-S1 bursts. 339 | browse_image_burst_height: int(min=1, required=False) 340 | 341 | # Width in pixels for the PNG browse image of RTC-S1 bursts 342 | browse_image_burst_width: int(min=1, required=False) 343 | 344 | # Height in pixels for the PNG browse image of RTC-S1 mosaics. 345 | browse_image_mosaic_height: int(min=1, required=False) 346 | 347 | # Width in pixels for the PNG browse image of RTC-S1 mosaics 348 | browse_image_mosaic_width: int(min=1, required=False) 349 | -------------------------------------------------------------------------------- /src/rtc/version.py: -------------------------------------------------------------------------------- 1 | VERSION='1.0.4' 2 | -------------------------------------------------------------------------------- /src/rtc/wrap_namespace.py: -------------------------------------------------------------------------------- 1 | '''Utility functions to convert to/from nested dicts or lists, and nested namespaces 2 | 3 | References 4 | ---------- 5 | https://stackoverflow.com/a/50491016 6 | ''' 7 | 8 | from functools import singledispatch 9 | from types import SimpleNamespace 10 | 11 | 12 | @singledispatch 13 | def wrap_namespace(ob): 14 | return ob 15 | 16 | 17 | @wrap_namespace.register(dict) 18 | def _wrap_dict(ob): 19 | return SimpleNamespace(**{key: wrap_namespace(val) 20 | for key, val in ob.items()}) 21 | 22 | 23 | @wrap_namespace.register(list) 24 | def _wrap_list(ob): 25 | return [wrap_namespace(val) for val in ob] 26 | 27 | 28 | def unwrap_to_dict(sns: SimpleNamespace) -> dict: 29 | sns_as_dict = {} 30 | for key, val in sns.__dict__.items(): 31 | if isinstance(val, SimpleNamespace): 32 | sns_as_dict[key] = unwrap_to_dict(val) 33 | else: 34 | sns_as_dict[key] = val 35 | 36 | return sns_as_dict 37 | -------------------------------------------------------------------------------- /tests/runconfigs/s1b_los_angeles_mask_off.yaml: -------------------------------------------------------------------------------- 1 | runconfig: 2 | name: rtc_s1_workflow_default 3 | 4 | groups: 5 | primary_executable: 6 | product_type: RTC_S1 7 | 8 | pge_name_group: 9 | pge_name: RTC_S1_PGE 10 | 11 | input_file_group: 12 | # Required. List of SAFE files (min=1) 13 | safe_file_path: [data/s1b_los_angeles/input_dir/S1B_IW_SLC__1SDV_20200926T135152_20200926T135219_023547_02CBCC_F988-CROPPED.zip] 14 | 15 | # Required. List of orbit (EOF) files (min=1) 16 | orbit_file_path: [data/s1b_los_angeles/input_dir/S1B_OPER_AUX_PREORB_OPOD_20200926T111139_V20200926T095426_20200926T162926.EOF] 17 | 18 | # Optional. Burst ID to process (empty for all bursts) 19 | # burst_id: [t069_147173_iw1, t069_147174_iw1] 20 | burst_id: [t071_151225_iw1, t071_151226_iw1] 21 | 22 | dynamic_ancillary_file_group: 23 | # Digital elevation model 24 | dem_file: data/s1b_los_angeles/input_dir/dem.tif 25 | 26 | # Digital elevation model description 27 | dem_file_description: 28 | 29 | static_ancillary_file_group: 30 | 31 | # burst database sqlite file 32 | burst_database_file: 33 | 34 | product_group: 35 | processing_type: 'CUSTOM' 36 | 37 | # Directory where PGE will place results 38 | product_path: . 39 | 40 | # Directory where SAS writes temporary data 41 | scratch_path: data/s1b_los_angeles/scratch_dir 42 | 43 | # If option `save_bursts` is set, output bursts are saved to: 44 | # {output_dir}/{burst_id}/{product_id}{suffix}.{ext} 45 | # If option `save_mosaics` is set, output mosaics are saved to: 46 | # {output_dir}/{product_id}{suffix}.{ext} 47 | # If the field `product_id`` is left empty, the prefix "rtc_product" 48 | # will be used instead. 49 | # `suffix` is only used when there are multiple output files. 50 | # `ext` is determined by geocoding_options.output_imagery_format. 51 | output_dir: data/s1b_los_angeles/output_dir 52 | product_id: OPERA_L2_RTC-S1_071-151230-IW2_20200926T135152Z_20230125T134122Z_S1B_30 53 | 54 | save_bursts: True 55 | save_mosaics: True 56 | output_imagery_format: COG 57 | output_imagery_compression: ZSTD 58 | output_imagery_nbits: 16 59 | 60 | # Optional. Save secondary layers (e.g., inc. angle) within 61 | # the HDF5 file 62 | save_secondary_layers_as_hdf5: False 63 | 64 | # Save RTC-S1 metadata in the HDF5 format 65 | # Optional for `output_imagery_format` equal to 'ENVI', 'GTiff', or 66 | # 'COG', and enabled by default for `output_imagery_format` equal 67 | # to 'HDF5' or 'NETCDF' or `save_secondary_layers_as_hdf5` is True 68 | save_metadata: True 69 | 70 | processing: 71 | 72 | # Check if ancillary input cover entirely output products 73 | check_ancillary_inputs_coverage: True 74 | 75 | polarization: co-pol 76 | 77 | # Options to run geo2rdr 78 | geo2rdr: 79 | threshold: 1.0e-8 80 | numiter: 25 81 | 82 | # Options to run rdr2geo 83 | rdr2geo: 84 | threshold: 1.0e-7 85 | numiter: 25 86 | 87 | # Apply absolute radiometric correction 88 | apply_absolute_radiometric_correction: True 89 | 90 | # Apply thermal noise correction 91 | apply_thermal_noise_correction: True 92 | 93 | # OPTIONAL - Apply RTC 94 | apply_rtc: True 95 | 96 | # Apply bistatic delay correction 97 | apply_bistatic_delay_correction: True 98 | 99 | # Apply static tropospheric delay correction 100 | apply_static_tropospheric_delay_correction: True 101 | 102 | # OPTIONAL - to control behavior of RTC module 103 | # (only applicable if geocode.apply_rtc is True) 104 | rtc: 105 | # OPTIONAL - Choices: 106 | # "gamma0" (default) 107 | # "sigma0" 108 | output_type: gamma0 109 | 110 | # OPTIONAL - Choices: 111 | # "bilinear_distribution" (default) 112 | # "area_projection" 113 | algorithm_type: area_projection 114 | 115 | # OPTIONAL - Choices: 116 | # "beta0" (default) 117 | # "sigma0" 118 | input_terrain_radiometry: beta0 119 | 120 | # OPTIONAL - Minimum RTC area factor in dB 121 | rtc_min_value_db: -30 122 | 123 | # RTC DEM upsampling 124 | dem_upsampling: 1 125 | 126 | # OPTIONAL - Mechanism to specify output posting and DEM 127 | geocoding: 128 | 129 | # OPTIONAL - Apply valid-samples sub-swath masking 130 | apply_valid_samples_sub_swath_masking: True 131 | 132 | # OPTIONAL - Apply shadow masking 133 | apply_shadow_masking: False 134 | 135 | # OPTIONAL - 136 | algorithm_type: area_projection 137 | 138 | # OPTIONAL - Choices: "single_block", "geogrid", "geogrid_radargrid", and "auto" (default) 139 | memory_mode: auto 140 | 141 | # Save the incidence angle 142 | save_incidence_angle: False 143 | 144 | # Save the local-incidence angle 145 | save_local_inc_angle: True 146 | 147 | # Save the projection angle 148 | save_projection_angle: False 149 | 150 | # Save the RTC ANF compuated with the projection angle method 151 | save_rtc_anf_projection_angle: False 152 | 153 | # Save the range slope angle 154 | save_range_slope: False 155 | 156 | # Save the number of looks used to compute RTC-S1 157 | save_nlooks: True 158 | 159 | # Save the RTC area normalization factor (ANF) used to generate 160 | # the RTC product 161 | save_rtc_anf: True 162 | 163 | # Save the RTC area normalization factor (ANF) gamma0 to sigma0 164 | save_rtc_anf_gamma0_to_sigma0: False 165 | 166 | # Save interpolated DEM used to compute RTC-S1 167 | save_dem: False 168 | 169 | # Save layover shadow mask 170 | save_mask: False 171 | 172 | # OPTIONAL - Absolute radiometric correction 173 | abs_rad_cal: 1 174 | 175 | # OPTIONAL - Clip values above threshold 176 | clip_max: 177 | 178 | # OPTIONAL - Clip values below threshold 179 | clip_min: 180 | 181 | # OPTIONAL - Double sampling of the radar-grid 182 | # input sampling in the range direction 183 | upsample_radargrid: False 184 | 185 | bursts_geogrid: 186 | output_epsg: 187 | x_posting: 300 188 | y_posting: 300 189 | x_snap: 300 190 | y_snap: 300 191 | top_left: 192 | x: 193 | y: 194 | bottom_right: 195 | x: 196 | y: 197 | 198 | 199 | mosaicking: 200 | mosaic_geogrid: 201 | output_epsg: 202 | x_posting: 300 203 | y_posting: 300 204 | x_snap: 300 205 | y_snap: 300 206 | top_left: 207 | x: 208 | y: 209 | bottom_right: 210 | x: 211 | y: 212 | -------------------------------------------------------------------------------- /tests/runconfigs/s1b_los_angeles_mask_off_h5.yaml: -------------------------------------------------------------------------------- 1 | runconfig: 2 | name: rtc_s1_workflow_default 3 | 4 | groups: 5 | primary_executable: 6 | product_type: RTC_S1 7 | 8 | pge_name_group: 9 | pge_name: RTC_S1_PGE 10 | 11 | input_file_group: 12 | # Required. List of SAFE files (min=1) 13 | safe_file_path: [data/s1b_los_angeles/input_dir/S1B_IW_SLC__1SDV_20200926T135152_20200926T135219_023547_02CBCC_F988-CROPPED.zip] 14 | 15 | # Required. List of orbit (EOF) files (min=1) 16 | orbit_file_path: [data/s1b_los_angeles/input_dir/S1B_OPER_AUX_PREORB_OPOD_20200926T111139_V20200926T095426_20200926T162926.EOF] 17 | 18 | # Optional. Burst ID to process (empty for all bursts) 19 | # burst_id: [t069_147173_iw1, t069_147174_iw1] 20 | burst_id: [t071_151225_iw1, t071_151226_iw1] 21 | 22 | dynamic_ancillary_file_group: 23 | # Digital elevation model 24 | dem_file: data/s1b_los_angeles/input_dir/dem.tif 25 | 26 | # Digital elevation model description 27 | dem_file_description: 28 | 29 | static_ancillary_file_group: 30 | 31 | # burst database sqlite file 32 | burst_database_file: 33 | 34 | product_group: 35 | processing_type: 'CUSTOM' 36 | 37 | # Directory where PGE will place results 38 | product_path: . 39 | 40 | # Directory where SAS writes temporary data 41 | scratch_path: data/s1b_los_angeles/scratch_dir 42 | 43 | # If option `save_bursts` is set, output bursts are saved to: 44 | # {output_dir}/{burst_id}/{product_id}{suffix}.{ext} 45 | # If option `save_mosaics` is set, output mosaics are saved to: 46 | # {output_dir}/{product_id}{suffix}.{ext} 47 | # If the field `product_id`` is left empty, the prefix "rtc_product" 48 | # will be used instead. 49 | # `suffix` is only used when there are multiple output files. 50 | # `ext` is determined by geocoding_options.output_imagery_format. 51 | output_dir: data/s1b_los_angeles/output_dir 52 | product_id: OPERA_L2_RTC-S1_071-151230-IW2_20200926T135152Z_20230125T134122Z_S1B_30 53 | 54 | save_bursts: True 55 | save_mosaics: True 56 | output_imagery_format: HDF5 57 | output_imagery_compression: ZSTD 58 | output_imagery_nbits: 16 59 | 60 | # Optional. Save secondary layers (e.g., inc. angle) within 61 | # the HDF5 file 62 | save_secondary_layers_as_hdf5: True 63 | 64 | # Save RTC-S1 metadata in the HDF5 format 65 | # Optional for `output_imagery_format` equal to 'ENVI', 'GTiff', or 66 | # 'COG', and enabled by default for `output_imagery_format` equal 67 | # to 'HDF5' or 'NETCDF' or `save_secondary_layers_as_hdf5` is True 68 | save_metadata: True 69 | 70 | processing: 71 | 72 | # Check if ancillary input cover entirely output products 73 | check_ancillary_inputs_coverage: True 74 | 75 | polarization: co-pol 76 | 77 | # Options to run geo2rdr 78 | geo2rdr: 79 | threshold: 1.0e-8 80 | numiter: 25 81 | 82 | # Options to run rdr2geo 83 | rdr2geo: 84 | threshold: 1.0e-7 85 | numiter: 25 86 | 87 | # Apply absolute radiometric correction 88 | apply_absolute_radiometric_correction: True 89 | 90 | # Apply thermal noise correction 91 | apply_thermal_noise_correction: True 92 | 93 | # OPTIONAL - Apply RTC 94 | apply_rtc: True 95 | 96 | # Apply bistatic delay correction 97 | apply_bistatic_delay_correction: True 98 | 99 | # Apply static tropospheric delay correction 100 | apply_static_tropospheric_delay_correction: True 101 | 102 | # OPTIONAL - to control behavior of RTC module 103 | # (only applicable if geocode.apply_rtc is True) 104 | rtc: 105 | # OPTIONAL - Choices: 106 | # "gamma0" (default) 107 | # "sigma0" 108 | output_type: gamma0 109 | 110 | # OPTIONAL - Choices: 111 | # "bilinear_distribution" (default) 112 | # "area_projection" 113 | algorithm_type: area_projection 114 | 115 | # OPTIONAL - Choices: 116 | # "beta0" (default) 117 | # "sigma0" 118 | input_terrain_radiometry: beta0 119 | 120 | # OPTIONAL - Minimum RTC area factor in dB 121 | rtc_min_value_db: -30 122 | 123 | # RTC DEM upsampling 124 | dem_upsampling: 1 125 | 126 | # OPTIONAL - Mechanism to specify output posting and DEM 127 | geocoding: 128 | 129 | # OPTIONAL - Apply valid-samples sub-swath masking 130 | apply_valid_samples_sub_swath_masking: True 131 | 132 | # OPTIONAL - Apply shadow masking 133 | apply_shadow_masking: False 134 | 135 | # OPTIONAL - 136 | algorithm_type: area_projection 137 | 138 | # OPTIONAL - Choices: "single_block", "geogrid", "geogrid_radargrid", and "auto" (default) 139 | memory_mode: auto 140 | 141 | # Save the incidence angle 142 | save_incidence_angle: False 143 | 144 | # Save the local-incidence angle 145 | save_local_inc_angle: True 146 | 147 | # Save the projection angle 148 | save_projection_angle: False 149 | 150 | # Save the RTC ANF compuated with the projection angle method 151 | save_rtc_anf_projection_angle: False 152 | 153 | # Save the range slope angle 154 | save_range_slope: False 155 | 156 | # Save the number of looks used to compute RTC-S1 157 | save_nlooks: True 158 | 159 | # Save the RTC area normalization factor (ANF) used to generate 160 | # the RTC product 161 | save_rtc_anf: True 162 | 163 | # Save the RTC area normalization factor (ANF) gamma0 to sigma0 164 | save_rtc_anf_gamma0_to_sigma0: False 165 | 166 | # Save interpolated DEM used to compute RTC-S1 167 | save_dem: False 168 | 169 | # Save layover shadow mask 170 | save_mask: False 171 | 172 | # OPTIONAL - Absolute radiometric correction 173 | abs_rad_cal: 1 174 | 175 | # OPTIONAL - Clip values above threshold 176 | clip_max: 177 | 178 | # OPTIONAL - Clip values below threshold 179 | clip_min: 180 | 181 | # OPTIONAL - Double sampling of the radar-grid 182 | # input sampling in the range direction 183 | upsample_radargrid: False 184 | 185 | bursts_geogrid: 186 | output_epsg: 187 | x_posting: 300 188 | y_posting: 300 189 | x_snap: 300 190 | y_snap: 300 191 | top_left: 192 | x: 193 | y: 194 | bottom_right: 195 | x: 196 | y: 197 | 198 | 199 | mosaicking: 200 | mosaic_geogrid: 201 | output_epsg: 202 | x_posting: 300 203 | y_posting: 300 204 | x_snap: 300 205 | y_snap: 300 206 | top_left: 207 | x: 208 | y: 209 | bottom_right: 210 | x: 211 | y: 212 | -------------------------------------------------------------------------------- /tests/runconfigs/s1b_los_angeles_mask_on.yaml: -------------------------------------------------------------------------------- 1 | runconfig: 2 | name: rtc_s1_workflow_default 3 | 4 | groups: 5 | primary_executable: 6 | product_type: RTC_S1 7 | 8 | pge_name_group: 9 | pge_name: RTC_S1_PGE 10 | 11 | input_file_group: 12 | # Required. List of SAFE files (min=1) 13 | safe_file_path: [data/s1b_los_angeles/input_dir/S1B_IW_SLC__1SDV_20200926T135152_20200926T135219_023547_02CBCC_F988-CROPPED.zip] 14 | 15 | # Required. List of orbit (EOF) files (min=1) 16 | orbit_file_path: [data/s1b_los_angeles/input_dir/S1B_OPER_AUX_PREORB_OPOD_20200926T111139_V20200926T095426_20200926T162926.EOF] 17 | 18 | # Optional. Burst ID to process (empty for all bursts) 19 | # burst_id: [t069_147173_iw1, t069_147174_iw1] 20 | burst_id: [t071_151225_iw1, t071_151226_iw1] 21 | 22 | dynamic_ancillary_file_group: 23 | # Digital elevation model 24 | dem_file: data/s1b_los_angeles/input_dir/dem.tif 25 | 26 | # Digital elevation model description 27 | dem_file_description: 28 | 29 | static_ancillary_file_group: 30 | 31 | # burst database sqlite file 32 | burst_database_file: 33 | 34 | product_group: 35 | processing_type: 'CUSTOM' 36 | 37 | # Directory where PGE will place results 38 | product_path: . 39 | 40 | # Directory where SAS writes temporary data 41 | scratch_path: data/s1b_los_angeles/scratch_dir 42 | 43 | # If option `save_bursts` is set, output bursts are saved to: 44 | # {output_dir}/{burst_id}/{product_id}{suffix}.{ext} 45 | # If option `save_mosaics` is set, output mosaics are saved to: 46 | # {output_dir}/{product_id}{suffix}.{ext} 47 | # If the field `product_id`` is left empty, the prefix "rtc_product" 48 | # will be used instead. 49 | # `suffix` is only used when there are multiple output files. 50 | # `ext` is determined by geocoding_options.output_imagery_format. 51 | output_dir: data/s1b_los_angeles/output_dir 52 | product_id: OPERA_L2_RTC-S1_071-151230-IW2_20200926T135152Z_20230125T134122Z_S1B_30 53 | 54 | save_bursts: True 55 | save_mosaics: True 56 | output_imagery_format: COG 57 | output_imagery_compression: ZSTD 58 | output_imagery_nbits: 16 59 | 60 | # Optional. Save secondary layers (e.g., inc. angle) within 61 | # the HDF5 file 62 | save_secondary_layers_as_hdf5: False 63 | 64 | # Save RTC-S1 metadata in the HDF5 format 65 | # Optional for `output_imagery_format` equal to 'ENVI', 'GTiff', or 66 | # 'COG', and enabled by default for `output_imagery_format` equal 67 | # to 'HDF5' or 'NETCDF' or `save_secondary_layers_as_hdf5` is True 68 | save_metadata: True 69 | 70 | processing: 71 | 72 | # Check if ancillary input cover entirely output products 73 | check_ancillary_inputs_coverage: True 74 | 75 | polarization: co-pol 76 | 77 | # Options to run geo2rdr 78 | geo2rdr: 79 | threshold: 1.0e-8 80 | numiter: 25 81 | 82 | # Options to run rdr2geo 83 | rdr2geo: 84 | threshold: 1.0e-7 85 | numiter: 25 86 | 87 | # Apply absolute radiometric correction 88 | apply_absolute_radiometric_correction: True 89 | 90 | # Apply thermal noise correction 91 | apply_thermal_noise_correction: True 92 | 93 | # OPTIONAL - Apply RTC 94 | apply_rtc: True 95 | 96 | # Apply bistatic delay correction 97 | apply_bistatic_delay_correction: True 98 | 99 | # Apply static tropospheric delay correction 100 | apply_static_tropospheric_delay_correction: True 101 | 102 | # OPTIONAL - to control behavior of RTC module 103 | # (only applicable if geocode.apply_rtc is True) 104 | rtc: 105 | # OPTIONAL - Choices: 106 | # "gamma0" (default) 107 | # "sigma0" 108 | output_type: gamma0 109 | 110 | # OPTIONAL - Choices: 111 | # "bilinear_distribution" (default) 112 | # "area_projection" 113 | algorithm_type: area_projection 114 | 115 | # OPTIONAL - Choices: 116 | # "beta0" (default) 117 | # "sigma0" 118 | input_terrain_radiometry: beta0 119 | 120 | # OPTIONAL - Minimum RTC area factor in dB 121 | rtc_min_value_db: -30 122 | 123 | # RTC DEM upsampling 124 | dem_upsampling: 1 125 | 126 | # OPTIONAL - Mechanism to specify output posting and DEM 127 | geocoding: 128 | 129 | # OPTIONAL - Apply valid-samples sub-swath masking 130 | apply_valid_samples_sub_swath_masking: True 131 | 132 | # OPTIONAL - Apply shadow masking 133 | apply_shadow_masking: False 134 | 135 | # OPTIONAL - 136 | algorithm_type: area_projection 137 | 138 | # OPTIONAL - Choices: "single_block", "geogrid", "geogrid_radargrid", and "auto" (default) 139 | memory_mode: auto 140 | 141 | # Save the incidence angle 142 | save_incidence_angle: False 143 | 144 | # Save the local-incidence angle 145 | save_local_inc_angle: True 146 | 147 | # Save the projection angle 148 | save_projection_angle: False 149 | 150 | # Save the RTC ANF compuated with the projection angle method 151 | save_rtc_anf_projection_angle: False 152 | 153 | # Save the range slope angle 154 | save_range_slope: False 155 | 156 | # Save the number of looks used to compute RTC-S1 157 | save_nlooks: True 158 | 159 | # Save the RTC area normalization factor (ANF) used to generate 160 | # the RTC product 161 | save_rtc_anf: True 162 | 163 | # Save the RTC area normalization factor (ANF) gamma0 to sigma0 164 | save_rtc_anf_gamma0_to_sigma0: False 165 | 166 | # Save interpolated DEM used to compute RTC-S1 167 | save_dem: False 168 | 169 | # Save layover shadow mask 170 | save_mask: True 171 | 172 | # OPTIONAL - Absolute radiometric correction 173 | abs_rad_cal: 1 174 | 175 | # OPTIONAL - Clip values above threshold 176 | clip_max: 177 | 178 | # OPTIONAL - Clip values below threshold 179 | clip_min: 180 | 181 | # OPTIONAL - Double sampling of the radar-grid 182 | # input sampling in the range direction 183 | upsample_radargrid: False 184 | 185 | bursts_geogrid: 186 | output_epsg: 187 | x_posting: 300 188 | y_posting: 300 189 | x_snap: 300 190 | y_snap: 300 191 | top_left: 192 | x: 193 | y: 194 | bottom_right: 195 | x: 196 | y: 197 | 198 | 199 | mosaicking: 200 | mosaic_geogrid: 201 | output_epsg: 202 | x_posting: 300 203 | y_posting: 300 204 | x_snap: 300 205 | y_snap: 300 206 | top_left: 207 | x: 208 | y: 209 | bottom_right: 210 | x: 211 | y: 212 | -------------------------------------------------------------------------------- /tests/runconfigs/s1b_los_angeles_mask_on_h5.yaml: -------------------------------------------------------------------------------- 1 | runconfig: 2 | name: rtc_s1_workflow_default 3 | 4 | groups: 5 | primary_executable: 6 | product_type: RTC_S1 7 | 8 | pge_name_group: 9 | pge_name: RTC_S1_PGE 10 | 11 | input_file_group: 12 | # Required. List of SAFE files (min=1) 13 | safe_file_path: [data/s1b_los_angeles/input_dir/S1B_IW_SLC__1SDV_20200926T135152_20200926T135219_023547_02CBCC_F988-CROPPED.zip] 14 | 15 | # Required. List of orbit (EOF) files (min=1) 16 | orbit_file_path: [data/s1b_los_angeles/input_dir/S1B_OPER_AUX_PREORB_OPOD_20200926T111139_V20200926T095426_20200926T162926.EOF] 17 | 18 | # Optional. Burst ID to process (empty for all bursts) 19 | # burst_id: [t069_147173_iw1, t069_147174_iw1] 20 | burst_id: [t071_151225_iw1, t071_151226_iw1] 21 | 22 | dynamic_ancillary_file_group: 23 | # Digital elevation model 24 | dem_file: data/s1b_los_angeles/input_dir/dem.tif 25 | 26 | # Digital elevation model description 27 | dem_file_description: 28 | 29 | static_ancillary_file_group: 30 | 31 | # burst database sqlite file 32 | burst_database_file: 33 | 34 | product_group: 35 | processing_type: 'CUSTOM' 36 | 37 | # Directory where PGE will place results 38 | product_path: . 39 | 40 | # Directory where SAS writes temporary data 41 | scratch_path: data/s1b_los_angeles/scratch_dir 42 | 43 | # If option `save_bursts` is set, output bursts are saved to: 44 | # {output_dir}/{burst_id}/{product_id}{suffix}.{ext} 45 | # If option `save_mosaics` is set, output mosaics are saved to: 46 | # {output_dir}/{product_id}{suffix}.{ext} 47 | # If the field `product_id`` is left empty, the prefix "rtc_product" 48 | # will be used instead. 49 | # `suffix` is only used when there are multiple output files. 50 | # `ext` is determined by geocoding_options.output_imagery_format. 51 | output_dir: data/s1b_los_angeles/output_dir 52 | product_id: OPERA_L2_RTC-S1_071-151230-IW2_20200926T135152Z_20230125T134122Z_S1B_30 53 | 54 | save_bursts: True 55 | save_mosaics: True 56 | output_imagery_format: HDF5 57 | output_imagery_compression: ZSTD 58 | output_imagery_nbits: 16 59 | 60 | # Optional. Save secondary layers (e.g., inc. angle) within 61 | # the HDF5 file 62 | save_secondary_layers_as_hdf5: True 63 | 64 | # Save RTC-S1 metadata in the HDF5 format 65 | # Optional for `output_imagery_format` equal to 'ENVI', 'GTiff', or 66 | # 'COG', and enabled by default for `output_imagery_format` equal 67 | # to 'HDF5' or 'NETCDF' or `save_secondary_layers_as_hdf5` is True 68 | save_metadata: True 69 | 70 | processing: 71 | 72 | # Check if ancillary input cover entirely output products 73 | check_ancillary_inputs_coverage: True 74 | 75 | polarization: co-pol 76 | 77 | # Options to run geo2rdr 78 | geo2rdr: 79 | threshold: 1.0e-8 80 | numiter: 25 81 | 82 | # Options to run rdr2geo 83 | rdr2geo: 84 | threshold: 1.0e-7 85 | numiter: 25 86 | 87 | # Apply absolute radiometric correction 88 | apply_absolute_radiometric_correction: True 89 | 90 | # Apply thermal noise correction 91 | apply_thermal_noise_correction: True 92 | 93 | # OPTIONAL - Apply RTC 94 | apply_rtc: True 95 | 96 | # Apply bistatic delay correction 97 | apply_bistatic_delay_correction: True 98 | 99 | # Apply static tropospheric delay correction 100 | apply_static_tropospheric_delay_correction: True 101 | 102 | # OPTIONAL - to control behavior of RTC module 103 | # (only applicable if geocode.apply_rtc is True) 104 | rtc: 105 | # OPTIONAL - Choices: 106 | # "gamma0" (default) 107 | # "sigma0" 108 | output_type: gamma0 109 | 110 | # OPTIONAL - Choices: 111 | # "bilinear_distribution" (default) 112 | # "area_projection" 113 | algorithm_type: area_projection 114 | 115 | # OPTIONAL - Choices: 116 | # "beta0" (default) 117 | # "sigma0" 118 | input_terrain_radiometry: beta0 119 | 120 | # OPTIONAL - Minimum RTC area factor in dB 121 | rtc_min_value_db: -30 122 | 123 | # RTC DEM upsampling 124 | dem_upsampling: 1 125 | 126 | # OPTIONAL - Mechanism to specify output posting and DEM 127 | geocoding: 128 | 129 | # OPTIONAL - Apply valid-samples sub-swath masking 130 | apply_valid_samples_sub_swath_masking: True 131 | 132 | # OPTIONAL - Apply shadow masking 133 | apply_shadow_masking: False 134 | 135 | # OPTIONAL - 136 | algorithm_type: area_projection 137 | 138 | # OPTIONAL - Choices: "single_block", "geogrid", "geogrid_radargrid", and "auto" (default) 139 | memory_mode: auto 140 | 141 | # Save the incidence angle 142 | save_incidence_angle: False 143 | 144 | # Save the local-incidence angle 145 | save_local_inc_angle: True 146 | 147 | # Save the projection angle 148 | save_projection_angle: False 149 | 150 | # Save the RTC ANF compuated with the projection angle method 151 | save_rtc_anf_projection_angle: False 152 | 153 | # Save the range slope angle 154 | save_range_slope: False 155 | 156 | # Save the number of looks used to compute RTC-S1 157 | save_nlooks: True 158 | 159 | # Save the RTC area normalization factor (ANF) used to generate 160 | # the RTC product 161 | save_rtc_anf: True 162 | 163 | # Save the RTC area normalization factor (ANF) gamma0 to sigma0 164 | save_rtc_anf_gamma0_to_sigma0: False 165 | 166 | # Save interpolated DEM used to compute RTC-S1 167 | save_dem: False 168 | 169 | # Save layover shadow mask 170 | save_mask: True 171 | 172 | # OPTIONAL - Absolute radiometric correction 173 | abs_rad_cal: 1 174 | 175 | # OPTIONAL - Clip values above threshold 176 | clip_max: 177 | 178 | # OPTIONAL - Clip values below threshold 179 | clip_min: 180 | 181 | # OPTIONAL - Double sampling of the radar-grid 182 | # input sampling in the range direction 183 | upsample_radargrid: False 184 | 185 | bursts_geogrid: 186 | output_epsg: 187 | x_posting: 300 188 | y_posting: 300 189 | x_snap: 300 190 | y_snap: 300 191 | top_left: 192 | x: 193 | y: 194 | bottom_right: 195 | x: 196 | y: 197 | 198 | 199 | mosaicking: 200 | mosaic_geogrid: 201 | output_epsg: 202 | x_posting: 300 203 | y_posting: 300 204 | x_snap: 300 205 | y_snap: 300 206 | top_left: 207 | x: 208 | y: 209 | bottom_right: 210 | x: 211 | y: 212 | -------------------------------------------------------------------------------- /tests/test_rtc_s1_workflow.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | import os 4 | import requests 5 | import tarfile 6 | from osgeo import gdal 7 | from requests.adapters import HTTPAdapter 8 | from urllib3.util.retry import Retry 9 | 10 | from rtc.runconfig import RunConfig, load_parameters 11 | from rtc.core import create_logger 12 | from rtc.rtc_s1_single_job import run_single_job 13 | from rtc.rtc_s1 import run_parallel 14 | from rtc.h5_prep import DATA_BASE_GROUP 15 | 16 | FLAG_ALWAYS_DOWNLOAD = False 17 | 18 | 19 | def _load_cfg_parameters(cfg): 20 | 21 | load_parameters(cfg) 22 | 23 | # Load parameters 24 | output_dir = cfg.groups.product_group.output_dir 25 | product_id = cfg.groups.product_group.product_id 26 | if product_id is None: 27 | product_id = 'OPERA_L2_RTC-S1_{burst_id}' 28 | product_prefix = product_id 29 | 30 | output_imagery_format = \ 31 | cfg.groups.product_group.output_imagery_format 32 | save_imagery_as_hdf5 = (output_imagery_format == 'HDF5' or 33 | output_imagery_format == 'NETCDF') 34 | save_secondary_layers_as_hdf5 = \ 35 | cfg.groups.product_group.save_secondary_layers_as_hdf5 36 | 37 | save_metadata = (cfg.groups.product_group.save_metadata or 38 | save_imagery_as_hdf5 or 39 | save_secondary_layers_as_hdf5) 40 | 41 | if output_imagery_format == 'NETCDF': 42 | hdf5_file_extension = 'nc' 43 | else: 44 | hdf5_file_extension = 'h5' 45 | 46 | if save_imagery_as_hdf5 or output_imagery_format == 'COG': 47 | output_raster_format = 'GTiff' 48 | else: 49 | output_raster_format = output_imagery_format 50 | 51 | if output_raster_format == 'GTiff': 52 | imagery_extension = 'tif' 53 | else: 54 | imagery_extension = 'bin' 55 | 56 | return output_dir, product_prefix, save_imagery_as_hdf5, \ 57 | save_secondary_layers_as_hdf5, save_metadata, \ 58 | hdf5_file_extension, imagery_extension 59 | 60 | 61 | def _is_valid_gdal_reference(gdal_reference): 62 | try: 63 | gdal_ds = gdal.Open(gdal_reference, gdal.GA_ReadOnly) 64 | return gdal_ds is not None 65 | except RuntimeError: 66 | return False 67 | return False 68 | 69 | 70 | def _check_results(output_dir, product_prefix, save_imagery_as_hdf5, 71 | save_secondary_layers_as_hdf5, save_metadata, 72 | hdf5_file_extension, imagery_extension): 73 | 74 | # Check RTC-S1 HDF5 metadata 75 | assert save_metadata 76 | geo_h5_filename = os.path.join( 77 | output_dir, f'{product_prefix}.{hdf5_file_extension}') 78 | 79 | # Check RTC-S1 imagery 80 | if save_imagery_as_hdf5: 81 | 82 | # assert that VV image is present 83 | geo_vv_file = (f'NETCDF:"{geo_h5_filename}":' 84 | f'{DATA_BASE_GROUP}/VV') 85 | assert _is_valid_gdal_reference(geo_vv_file) 86 | 87 | # assert that HH image is not present 88 | geo_hh_file = (f'NETCDF:"{geo_h5_filename}":' 89 | f'{DATA_BASE_GROUP}/HH') 90 | assert not _is_valid_gdal_reference(geo_hh_file) 91 | 92 | else: 93 | 94 | # assert that VV image is present 95 | geo_vv_filename = os.path.join( 96 | output_dir, f'{product_prefix}_VV.{imagery_extension}') 97 | assert os.path.isfile(geo_vv_filename) 98 | 99 | # assert that HH image is not present 100 | geo_hh_filename = os.path.join( 101 | output_dir, f'{product_prefix}_HH.{imagery_extension}') 102 | assert not os.path.isfile(geo_hh_filename) 103 | 104 | # Check RTC-S1 secondary layers 105 | if save_secondary_layers_as_hdf5: 106 | 107 | # assert that the following secondary layers are present: 108 | ds_list = ['numberOfLooks', 109 | 'rtcAreaNormalizationFactorGamma0ToBeta0', 110 | # 'rtcAreaNormalizationFactorGamma0ToSigma0', 111 | 'localIncidenceAngle'] 112 | for ds_name in ds_list: 113 | current_file = (f'NETCDF:"{geo_h5_filename}":' 114 | f'{DATA_BASE_GROUP}/' 115 | f'{ds_name}') 116 | assert _is_valid_gdal_reference(current_file) 117 | 118 | # assert that the following secondary layers are not present: 119 | ds_list = ['incidenceAngle', 'projectionAngle'] 120 | for ds_name in ds_list: 121 | current_file = (f'NETCDF:"{geo_h5_filename}":' 122 | f'{DATA_BASE_GROUP}/' 123 | f'{ds_name}') 124 | assert not _is_valid_gdal_reference(current_file) 125 | 126 | else: 127 | # assert that the following secondary layers are present: 128 | ds_list = ['number_of_looks', 'rtc_anf_gamma0_to_beta0', 129 | # 'rtc_area_normalization_factor_gamma0_to_sigma0', 130 | 'local_incidence_angle'] 131 | for ds_name in ds_list: 132 | current_file = os.path.join( 133 | output_dir, f'{product_prefix}_' 134 | f'{ds_name}.{imagery_extension}') 135 | assert os.path.isfile(current_file) 136 | 137 | # assert that the following secondary layers are not present: 138 | ds_list = ['incidence_angle', 'projectionAngle'] 139 | for ds_name in ds_list: 140 | current_file = os.path.join( 141 | output_dir, f'{product_prefix}_' 142 | f'{ds_name}.{imagery_extension}') 143 | assert not os.path.isfile(current_file) 144 | 145 | 146 | def test_workflow(): 147 | 148 | test_data_directory = 'data' 149 | 150 | if not os.path.isdir(test_data_directory): 151 | os.makedirs(test_data_directory, exist_ok=True) 152 | 153 | dataset_name = 's1b_los_angeles' 154 | dataset_url = ('https://zenodo.org/records/7753472/files/' 155 | 's1b_los_angeles.tar.gz?download=1') 156 | 157 | tests_dir = os.path.dirname(__file__) 158 | dataset_dir = os.path.join(test_data_directory, dataset_name) 159 | if FLAG_ALWAYS_DOWNLOAD or not os.path.isdir(dataset_dir): 160 | 161 | print(f'Test dataset {dataset_name} not found. Downloading' 162 | f' file {dataset_url}.') 163 | # To avoid the issue in downloading, try again. 164 | session = requests.Session() 165 | retries = Retry( 166 | total=5, # up to 5 attempts 167 | backoff_factor=2, # 2 s, 4 s, 8 s, … 168 | status_forcelist=[502, 503, 504], 169 | ) 170 | session.mount("https://", HTTPAdapter(max_retries=retries)) 171 | 172 | compressed_path = os.path.join(test_data_directory, 173 | f"{dataset_name}.tar.gz") 174 | with session.get(dataset_url, stream=True, timeout=120) as r: 175 | r.raise_for_status() 176 | with open(compressed_path, "wb") as f: 177 | for chunk in r.iter_content(chunk_size=1024 * 1024): # 1 MB 178 | f.write(chunk) 179 | 180 | print(f"Extracting {compressed_path}") 181 | with tarfile.open(compressed_path, "r:gz") as tf: 182 | tf.extractall(test_data_directory) 183 | 184 | # create logger 185 | log_file = os.path.join('data', 'log.txt') 186 | full_log_formatting = False 187 | create_logger(log_file, full_log_formatting) 188 | 189 | for runconfig_mode in ['mask_off', 'mask_on', 190 | 'mask_off_h5', 'mask_on_h5']: 191 | 192 | # Get a runconfig dict from command line argumens 193 | runconfig_path = os.path.join( 194 | tests_dir, 'runconfigs', 195 | f's1b_los_angeles_{runconfig_mode}.yaml') 196 | 197 | cfg = RunConfig.load_from_yaml(runconfig_path) 198 | 199 | output_dir_single_job, product_prefix, save_imagery_as_hdf5, \ 200 | save_secondary_layers_as_hdf5, save_metadata, \ 201 | hdf5_file_extension, imagery_extension = _load_cfg_parameters(cfg) 202 | 203 | # Run geocode burst workflow (single job) 204 | run_single_job(cfg) 205 | 206 | _check_results(output_dir_single_job, product_prefix, 207 | save_imagery_as_hdf5, save_secondary_layers_as_hdf5, 208 | save_metadata, hdf5_file_extension, 209 | imagery_extension) 210 | 211 | # Run geocode burst workflow (parallel) 212 | output_dir_parallel = os.path.join('data', 's1b_los_angeles', 213 | 'output_dir_parallel') 214 | 215 | cfg.groups.product_group.output_dir = output_dir_parallel 216 | 217 | log_file_path = 'log.txt' 218 | flag_logger_full_format = False 219 | run_parallel(cfg, log_file_path, flag_logger_full_format) 220 | 221 | _check_results(output_dir_parallel, product_prefix, 222 | save_imagery_as_hdf5, save_secondary_layers_as_hdf5, 223 | save_metadata, hdf5_file_extension, imagery_extension) 224 | --------------------------------------------------------------------------------