├── docs
├── _static
│ └── .gitkeep
├── _templates
│ └── .gitkeep
├── index.md
├── Makefile
├── make.bat
├── conf.py
├── DATATREE.md
└── USER-REQUIREMENTS.md
├── .python-version
├── notebooks
├── data
├── download_data.sh
├── fsspec.ipynb
├── Sentinel-1_SLC_IW.ipynb
├── Sentinel-1_SLC_SM.ipynb
├── Sentinel-1_GRD_IW.ipynb
└── Sentinel-1_GRD_SM.ipynb
├── xarray_sentinel
├── py.typed
├── resources
│ └── sentinel1
│ │ ├── my-gml.xsd
│ │ ├── my-safe-sentinel-1.0-sentinel-1.xsd
│ │ ├── my-safe-sentinel-1.0-sentinel-1-sar-level-1.xsd
│ │ ├── my-xfdu.xsd
│ │ ├── my-no-namespace.xsd
│ │ ├── s1-level-1-calibration.xsd
│ │ ├── my-schema_orb.xsd
│ │ ├── s1-level-1-noise.xsd
│ │ ├── my-safe-sentinel-1.0.xsd
│ │ └── s1-level-1-rfi.xsd
├── xarray_backends.py
├── __init__.py
├── reformat.py
├── eopf_product.py
├── eopf_metadata.py
├── conventions.py
└── esa_safe.py
├── tests
├── test_00_version.py
├── data
│ ├── S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.zip
│ ├── S1A_EW_SLC__1SDH_20210403T122536_20210403T122630_037286_046484_8152.SAFE
│ │ └── measurement
│ │ │ └── s1a-ew1-slc-hh-20210403t122536-20210403t122628-037286-046484-001.tiff
│ ├── S1A_IW_SLC__1SDH_20220414T102209_20220414T102236_042768_051AA4_E677.SAFE
│ │ └── measurement
│ │ │ └── s1a-iw1-slc-hh-20220414t102211-20220414t102236-042768-051aa4-001.tiff
│ ├── S1A_S3_SLC__1SDV_20210401T152855_20210401T152914_037258_04638E_6001.SAFE
│ │ └── measurement
│ │ │ └── s1a-s3-slc-vh-20210401t152855-20210401t152914-037258-04638e-001.tiff
│ ├── S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8.SAFE
│ │ └── measurement
│ │ │ └── s1b-iw-grd-vv-20210401t052623-20210401t052648-026269-032297-001.tiff
│ ├── S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE
│ │ └── measurement
│ │ │ ├── s1b-iw1-slc-vh-20210401t052624-20210401t052649-026269-032297-001.tiff
│ │ │ ├── s1b-iw1-slc-vv-20210401t052624-20210401t052649-026269-032297-004.tiff
│ │ │ └── s1b-iw2-slc-vh-20210401t052622-20210401t052650-026269-032297-002.tiff
│ └── download.sh
├── test_30_eopf_product.py
├── test_35_xarray_backends_dask.py
├── test_40_reformat_zarr.py
├── test_40_reformat_netcdf4.py
├── slow_test_40_reformat.py
├── slow_test_50_cfchecker.py
├── test_30_sentinel1_fsspec.py
├── test_15_eopf_metadata.py
├── test_30_xarray_backends.py
└── test_10_esa_safe.py
├── .github
├── dependabot.yml
└── workflows
│ └── on-push.yml
├── .pre-commit-config-cruft.yaml
├── environment-dev.yml
├── environment.yml
├── Dockerfile
├── ci
├── environment-ci.yml
└── environment-integration.yml
├── CONTRIBUTING.md
├── .cruft.json
├── .pre-commit-config.yaml
├── Makefile
├── pyproject.toml
├── .gitignore
└── LICENSE
/docs/_static/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/.python-version:
--------------------------------------------------------------------------------
1 | 3.13
2 |
--------------------------------------------------------------------------------
/docs/_templates/.gitkeep:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/notebooks/data:
--------------------------------------------------------------------------------
1 | ../../data
--------------------------------------------------------------------------------
/xarray_sentinel/py.typed:
--------------------------------------------------------------------------------
1 |
--------------------------------------------------------------------------------
/tests/test_00_version.py:
--------------------------------------------------------------------------------
1 | import xarray_sentinel
2 |
3 |
4 | def test_version() -> None:
5 | assert xarray_sentinel.__version__ != "999"
6 |
--------------------------------------------------------------------------------
/.github/dependabot.yml:
--------------------------------------------------------------------------------
1 | version: 2
2 | updates:
3 | - package-ecosystem: github-actions
4 | directory: /
5 | schedule:
6 | interval: weekly
7 |
--------------------------------------------------------------------------------
/.pre-commit-config-cruft.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/cruft/cruft
3 | rev: 2.15.0
4 | hooks:
5 | - id: cruft
6 | entry: cruft update -y
7 | additional_dependencies: [toml]
8 |
--------------------------------------------------------------------------------
/tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.zip:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bopen/xarray-sentinel/HEAD/tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.zip
--------------------------------------------------------------------------------
/environment-dev.yml:
--------------------------------------------------------------------------------
1 | channels:
2 | - conda-forge
3 | dependencies:
4 | - adlfs
5 | - build
6 | - cfchecker
7 | - gcsfs
8 | - ipytree
9 | - matplotlib
10 | - mamba
11 | - notebook
12 | - sentinelsat
13 | - pytest
14 | - shapely
15 | - stac-validator
16 |
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | channels:
2 | - conda-forge
3 | dependencies:
4 | - dask >= 2022.6.1
5 | - fsspec >= 2022.3.0
6 | - netcdf4
7 | - numpy >= 1.23.0
8 | - rasterio >= 1.3.0
9 | - rioxarray >= 0.10.0
10 | - scipy
11 | - xarray >= 2022.06.0
12 | - xmlschema >= 2.0.1
13 | - zarr
14 |
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM continuumio/miniconda3
2 |
3 | WORKDIR /src/xarray-sentinel
4 |
5 | COPY environment.yml /src/xarray-sentinel/
6 |
7 | RUN conda install -c conda-forge gcc python=3.11 \
8 | && conda env update -n base -f environment.yml
9 |
10 | COPY . /src/xarray-sentinel
11 |
12 | RUN pip install --no-deps -e .
13 |
--------------------------------------------------------------------------------
/xarray_sentinel/resources/sentinel1/my-gml.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
--------------------------------------------------------------------------------
/tests/data/S1A_EW_SLC__1SDH_20210403T122536_20210403T122630_037286_046484_8152.SAFE/measurement/s1a-ew1-slc-hh-20210403t122536-20210403t122628-037286-046484-001.tiff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bopen/xarray-sentinel/HEAD/tests/data/S1A_EW_SLC__1SDH_20210403T122536_20210403T122630_037286_046484_8152.SAFE/measurement/s1a-ew1-slc-hh-20210403t122536-20210403t122628-037286-046484-001.tiff
--------------------------------------------------------------------------------
/tests/data/S1A_IW_SLC__1SDH_20220414T102209_20220414T102236_042768_051AA4_E677.SAFE/measurement/s1a-iw1-slc-hh-20220414t102211-20220414t102236-042768-051aa4-001.tiff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bopen/xarray-sentinel/HEAD/tests/data/S1A_IW_SLC__1SDH_20220414T102209_20220414T102236_042768_051AA4_E677.SAFE/measurement/s1a-iw1-slc-hh-20220414t102211-20220414t102236-042768-051aa4-001.tiff
--------------------------------------------------------------------------------
/tests/data/S1A_S3_SLC__1SDV_20210401T152855_20210401T152914_037258_04638E_6001.SAFE/measurement/s1a-s3-slc-vh-20210401t152855-20210401t152914-037258-04638e-001.tiff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bopen/xarray-sentinel/HEAD/tests/data/S1A_S3_SLC__1SDV_20210401T152855_20210401T152914_037258_04638E_6001.SAFE/measurement/s1a-s3-slc-vh-20210401t152855-20210401t152914-037258-04638e-001.tiff
--------------------------------------------------------------------------------
/tests/data/S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8.SAFE/measurement/s1b-iw-grd-vv-20210401t052623-20210401t052648-026269-032297-001.tiff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bopen/xarray-sentinel/HEAD/tests/data/S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8.SAFE/measurement/s1b-iw-grd-vv-20210401t052623-20210401t052648-026269-032297-001.tiff
--------------------------------------------------------------------------------
/tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE/measurement/s1b-iw1-slc-vh-20210401t052624-20210401t052649-026269-032297-001.tiff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bopen/xarray-sentinel/HEAD/tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE/measurement/s1b-iw1-slc-vh-20210401t052624-20210401t052649-026269-032297-001.tiff
--------------------------------------------------------------------------------
/tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE/measurement/s1b-iw1-slc-vv-20210401t052624-20210401t052649-026269-032297-004.tiff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bopen/xarray-sentinel/HEAD/tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE/measurement/s1b-iw1-slc-vv-20210401t052624-20210401t052649-026269-032297-004.tiff
--------------------------------------------------------------------------------
/tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE/measurement/s1b-iw2-slc-vh-20210401t052622-20210401t052650-026269-032297-002.tiff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/bopen/xarray-sentinel/HEAD/tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE/measurement/s1b-iw2-slc-vh-20210401t052622-20210401t052650-026269-032297-002.tiff
--------------------------------------------------------------------------------
/docs/index.md:
--------------------------------------------------------------------------------
1 | # Welcome to xarray_sentinel's documentation!
2 |
3 | Easily access and explore the SAR data products of the Copernicus Sentinel-1 satellite mission
4 |
5 | ```{toctree}
6 | :caption: 'Contents:'
7 | :maxdepth: 2
8 |
9 | README.md
10 | API Reference <_api/xarray_sentinel/index>
11 | ```
12 |
13 | # Indices and tables
14 |
15 | - {ref}`genindex`
16 | - {ref}`modindex`
17 | - {ref}`search`
18 |
--------------------------------------------------------------------------------
/ci/environment-ci.yml:
--------------------------------------------------------------------------------
1 | # environment-ci.yml: Additional dependencies to install in the CI environment.
2 | channels:
3 | - conda-forge
4 | - nodefaults
5 | dependencies:
6 | - make
7 | - mypy
8 | - myst-parser
9 | - pip
10 | - pre-commit
11 | - pydata-sphinx-theme
12 | - pytest
13 | - pytest-cov
14 | - sphinx
15 | - sphinx-autoapi
16 | # DO NOT EDIT ABOVE THIS LINE, ADD DEPENDENCIES BELOW
17 | - pandas-stubs
18 | - shapely
19 | - stac-validator
20 | - types-setuptools
21 |
--------------------------------------------------------------------------------
/tests/test_30_eopf_product.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 |
3 | import xarray as xr
4 |
5 | from xarray_sentinel import eopf_product
6 |
7 | DATA_FOLDER = pathlib.Path(__file__).parent / "data"
8 |
9 | SLC_S3 = (
10 | DATA_FOLDER
11 | / "S1A_S3_SLC__1SDV_20210401T152855_20210401T152914_037258_04638E_6001.SAFE"
12 | )
13 |
14 |
15 | def test_open_datatree() -> None:
16 | res = eopf_product.open_datatree(SLC_S3, check_files_exist=True)
17 |
18 | assert isinstance(res, xr.DataTree)
19 |
--------------------------------------------------------------------------------
/ci/environment-integration.yml:
--------------------------------------------------------------------------------
1 | # environment-integration.yml: Additional dependencies to install in the integration environment (e.g., pinned dependencies).
2 | channels:
3 | - conda-forge
4 | - nodefaults
5 | dependencies:
6 | - make
7 | - pytest
8 | - pytest-cov
9 | # DO NOT EDIT ABOVE THIS LINE, ADD DEPENDENCIES BELOW
10 | - dask == 2023.2.0
11 | - fsspec == 2023.1.0
12 | - numpy == 1.22.0
13 | - packaging == 21.3
14 | - pandas == 1.4.0
15 | - rasterio == 1.3.0
16 | - rioxarray == 0.13.0
17 | - xarray == 2023.02.0
18 | - xmlschema == 2.2.0
19 |
--------------------------------------------------------------------------------
/CONTRIBUTING.md:
--------------------------------------------------------------------------------
1 | Setup the base `XARRAY-SENTINEL` conda environment and update it to include development dependencies with:
2 |
3 | ```
4 | cd xarray-sentinel
5 | conda create -n XARRAY-SENTINEL -c conda-forge python=3.9 mamba
6 | conda activate XARRAY-SENTINEL
7 | make conda-env-update-all CONDA=mamba CONDAFLAGS=
8 | pip install -e .
9 | pip install -U --pre --no-deps --no-binary rasterio "rasterio>=1.3a3" # for fsspec support
10 | ```
11 |
12 | Download sample data and run the notebooks:
13 |
14 | ```
15 | cd notebooks
16 | DHUS_USER= DHUS_PASSWORD= ./download_data.sh
17 | jupyter notebook
18 | ```
19 |
--------------------------------------------------------------------------------
/tests/test_35_xarray_backends_dask.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 |
3 | import pytest
4 | import xarray as xr
5 |
6 | dask = pytest.importorskip("dask")
7 |
8 | DATA_FOLDER = pathlib.Path(__file__).parent / "data"
9 |
10 |
11 | def test_open_pol_dataset_preferred_chunks() -> None:
12 | product_path = (
13 | DATA_FOLDER
14 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
15 | )
16 | res = xr.open_dataset(product_path, engine="sentinel-1", group="IW1/VV", chunks={})
17 |
18 | assert isinstance(res, xr.Dataset)
19 | assert len(res.dims) == 2
20 | assert res.measurement.chunks[0][0] == res.attrs["lines_per_burst"]
21 |
--------------------------------------------------------------------------------
/tests/test_40_reformat_zarr.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from typing import Any
3 |
4 | import pytest
5 |
6 | from xarray_sentinel import reformat
7 |
8 | pytest.importorskip("zarr")
9 |
10 | DATA_FOLDER = pathlib.Path(__file__).parent / "data"
11 |
12 |
13 | def test_to_group_zarr(tmpdir: Any) -> None:
14 | product_path = (
15 | DATA_FOLDER
16 | / "S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8.SAFE"
17 | )
18 | tmp_path = str(tmpdir.join("tmp.zarr"))
19 | groups = {"IW/VV/gcp": "IW/VV/gcp", "IW/VH/attitude": "IW/VH/attitude"}
20 |
21 | reformat.to_group_zarr(product_path, tmp_path, groups)
22 |
23 | reformat.to_group_zarr(product_path, tmp_path)
24 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line, and also
5 | # from the environment for the first two.
6 | SPHINXOPTS ?=
7 | SPHINXBUILD ?= sphinx-build
8 | SOURCEDIR = .
9 | BUILDDIR = _build
10 |
11 | # Put it first so that "make" without argument is like "make help".
12 | help:
13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
14 |
15 | .PHONY: help Makefile
16 |
17 | # Catch-all target: route all unknown targets to Sphinx using the new
18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
19 | %: Makefile
20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
21 |
--------------------------------------------------------------------------------
/tests/test_40_reformat_netcdf4.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from typing import Any
3 |
4 | import pytest
5 |
6 | from xarray_sentinel import reformat
7 |
8 | pytest.importorskip("netCDF4")
9 |
10 | DATA_FOLDER = pathlib.Path(__file__).parent / "data"
11 |
12 |
13 | def test_to_group_netcdf(tmpdir: Any) -> None:
14 | product_path = (
15 | DATA_FOLDER
16 | / "S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8.SAFE"
17 | )
18 | tmp_path = str(tmpdir.join("tmp.nc"))
19 | groups = {"IW/VV/gcp": "IW/VV/gcp", "IW/VH/attitude": "IW/VH/attitude"}
20 |
21 | reformat.to_group_netcdf(product_path, tmp_path, groups, engine="netcdf4")
22 |
23 | reformat.to_group_netcdf(product_path, tmp_path, engine="netcdf4")
24 |
--------------------------------------------------------------------------------
/.cruft.json:
--------------------------------------------------------------------------------
1 | {
2 | "template": "https://github.com/ecmwf-projects/cookiecutter-conda-package",
3 | "commit": "8397fdf4e595402b3266e357ae5f53ebce99a0c9",
4 | "checkout": null,
5 | "context": {
6 | "cookiecutter": {
7 | "project_name": "xarray-sentinel",
8 | "project_slug": "xarray_sentinel",
9 | "project_short_description": "Easily access and explore the SAR data products of the Copernicus Sentinel-1 satellite mission",
10 | "copyright_holder": "B-Open Solutions srl and the xarray-sentinel authors",
11 | "copyright_year": "2021",
12 | "mypy_strict": true,
13 | "integration_tests": true,
14 | "pypi": true,
15 | "_template": "https://github.com/ecmwf-projects/cookiecutter-conda-package"
16 | }
17 | },
18 | "directory": null
19 | }
20 |
--------------------------------------------------------------------------------
/tests/slow_test_40_reformat.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from typing import Any
3 |
4 | import pytest
5 |
6 | from xarray_sentinel import reformat
7 |
8 | DATA_FOLDER = pathlib.Path(__file__).parent / "data"
9 |
10 |
11 | def test_to_group_zarr(tmpdir: Any) -> None:
12 | product_path = (
13 | DATA_FOLDER
14 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
15 | )
16 | tmp_path = str(tmpdir.join("tmp.zarr"))
17 |
18 | reformat.to_group_zarr(product_path, tmp_path)
19 |
20 |
21 | @pytest.mark.xfail
22 | def test_to_group_netcdf(tmpdir: Any) -> None:
23 | product_path = (
24 | DATA_FOLDER
25 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
26 | )
27 | tmp_path = str(tmpdir.join("tmp.nc"))
28 |
29 | reformat.to_group_netcdf(product_path, tmp_path)
30 |
--------------------------------------------------------------------------------
/notebooks/download_data.sh:
--------------------------------------------------------------------------------
1 | #!/bin/sh
2 |
3 | mkdir -p data
4 |
5 | # SM
6 |
7 | ## SLC
8 | sentinelsat --path data -d --include-pattern "*vv*" \
9 | --name S1B_S6_SLC__1SDV_20211216T115438_20211216T115501_030050_03968A_4DCB
10 |
11 | ## GRD
12 | sentinelsat --path data -d --include-pattern "*vv*" \
13 | --name S1B_S6_GRDH_1SDV_20211216T115438_20211216T115501_030050_03968A_0F8A
14 |
15 | ## GRD zipped to test fsspec
16 | sentinelsat --path data -d \
17 | --name S1B_S6_GRDH_1SDV_20211216T115438_20211216T115501_030050_03968A_0F8A
18 |
19 |
20 | # IW
21 |
22 | ## SLC
23 | sentinelsat --path data -d --include-pattern "*iw3*vv*" \
24 | --name S1B_IW_SLC__1SDV_20211223T051121_20211223T051148_030148_039993_BA4B
25 |
26 | ## GRD
27 | sentinelsat --path data -d --include-pattern "*vv*" \
28 | --name S1B_IW_GRDH_1SDV_20211223T051122_20211223T051147_030148_039993_5371
29 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | %SPHINXBUILD% >NUL 2>NUL
14 | if errorlevel 9009 (
15 | echo.
16 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
17 | echo.installed, then set the SPHINXBUILD environment variable to point
18 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
19 | echo.may add the Sphinx directory to PATH.
20 | echo.
21 | echo.If you don't have Sphinx installed, grab it from
22 | echo.https://www.sphinx-doc.org/
23 | exit /b 1
24 | )
25 |
26 | if "%1" == "" goto help
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/xarray_sentinel/resources/sentinel1/my-safe-sentinel-1.0-sentinel-1.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/.pre-commit-config.yaml:
--------------------------------------------------------------------------------
1 | repos:
2 | - repo: https://github.com/pre-commit/pre-commit-hooks
3 | rev: v5.0.0
4 | hooks:
5 | - id: trailing-whitespace
6 | - id: end-of-file-fixer
7 | - id: check-json
8 | - id: check-yaml
9 | - id: check-toml
10 | - id: check-added-large-files
11 | - id: check-merge-conflict
12 | - id: debug-statements
13 | - id: mixed-line-ending
14 | - repo: https://github.com/keewis/blackdoc
15 | rev: v0.3.9
16 | hooks:
17 | - id: blackdoc
18 | additional_dependencies: [black==23.11.0]
19 | - repo: https://github.com/astral-sh/ruff-pre-commit
20 | rev: v0.8.3
21 | hooks:
22 | - id: ruff
23 | args: [--fix, --show-fixes]
24 | - id: ruff-format
25 | - repo: https://github.com/executablebooks/mdformat
26 | rev: 0.7.19
27 | hooks:
28 | - id: mdformat
29 | - repo: https://github.com/macisamuele/language-formatters-pre-commit-hooks
30 | rev: v2.14.0
31 | hooks:
32 | - id: pretty-format-yaml
33 | args: [--autofix, --preserve-quotes]
34 | - id: pretty-format-toml
35 | args: [--autofix]
36 | - repo: https://github.com/gitleaks/gitleaks
37 | rev: v8.21.2
38 | hooks:
39 | - id: gitleaks
40 |
--------------------------------------------------------------------------------
/Makefile:
--------------------------------------------------------------------------------
1 | PROJECT := xarray-sentinel
2 | CONDA := conda
3 | CONDAFLAGS :=
4 | COV_REPORT := html
5 |
6 | default: qa unit-tests type-check
7 |
8 | qa:
9 | uv run --frozen -m pre_commit run --all-files
10 |
11 | unit-tests:
12 | uv run --frozen -m pytest -vv --cov=. --cov-report=$(COV_REPORT)
13 |
14 | type-check:
15 | uv run --frozen -m mypy --strict .
16 |
17 | conda-env-update:
18 | $(CONDA) install -y -c conda-forge conda-merge
19 | $(CONDA) run conda-merge environment.yml ci/environment-ci.yml > ci/combined-environment-ci.yml
20 | $(CONDA) env update $(CONDAFLAGS) -f ci/combined-environment-ci.yml
21 |
22 | docker-build:
23 | docker build -t $(PROJECT) .
24 |
25 | docker-run:
26 | docker run --rm -ti -v $(PWD):/srv $(PROJECT)
27 |
28 | template-update:
29 | pre-commit run --all-files cruft -c .pre-commit-config-cruft.yaml
30 |
31 | docs-build:
32 | cp README.md docs/. && cd docs && rm -fr _api && make clean && make html
33 |
34 | # DO NOT EDIT ABOVE THIS LINE, ADD COMMANDS BELOW
35 |
36 | doc-tests:
37 | uv run --frozen -m pytest -vv --doctest-glob="*.md" --doctest-glob="*.rst" README.md
38 |
39 | integration-tests:
40 | uv run --frozen -m pytest -vv --cov=. --cov-report=$(COV_REPORT) --log-cli-level=INFO tests/integration*.py
41 |
--------------------------------------------------------------------------------
/xarray_sentinel/xarray_backends.py:
--------------------------------------------------------------------------------
1 | import os
2 | from typing import Any
3 |
4 | import fsspec
5 | import xarray as xr
6 |
7 | from . import sentinel1
8 |
9 |
10 | class Sentinel1Backend(xr.backends.common.BackendEntrypoint):
11 | def open_dataset( # type: ignore
12 | self,
13 | filename_or_obj: str,
14 | drop_variables: tuple[str] | None = None,
15 | group: str | None = None,
16 | storage_options: dict[str, Any] | None = None,
17 | override_product_files: str | None = None,
18 | fs: fsspec.AbstractFileSystem | None = None,
19 | check_files_exist: bool = False,
20 | parse_geospatial_attrs: bool = True,
21 | ) -> xr.Dataset:
22 | ds = sentinel1.open_sentinel1_dataset(
23 | filename_or_obj,
24 | drop_variables=drop_variables,
25 | group=group,
26 | storage_options=storage_options,
27 | override_product_files=override_product_files,
28 | fs=fs,
29 | check_files_exist=check_files_exist,
30 | parse_geospatial_attrs=parse_geospatial_attrs,
31 | )
32 | return ds
33 |
34 | def guess_can_open(self, filename_or_obj: Any) -> bool:
35 | try:
36 | _, ext = os.path.splitext(filename_or_obj)
37 | except TypeError:
38 | return False
39 | return ext.lower() in {".safe", ".safe/"}
40 |
--------------------------------------------------------------------------------
/xarray_sentinel/__init__.py:
--------------------------------------------------------------------------------
1 | # Copyright 2021-2022, B-Open Solutions srl and the xarray-sentinel authors.
2 | #
3 | # Licensed under the Apache License, Version 2.0 (the "License");
4 | # you may not use this file except in compliance with the License.
5 | # You may obtain a copy of the License at
6 | #
7 | # http://www.apache.org/licenses/LICENSE-2.0
8 | #
9 | # Unless required by applicable law or agreed to in writing, software
10 | # distributed under the License is distributed on an "AS IS" BASIS,
11 | # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12 | # See the License for the specific language governing permissions and
13 | # limitations under the License.
14 |
15 | try:
16 | # NOTE: the `version.py` file must not be present in the git repository
17 | # as it is generated by setuptools at install time
18 | from .version import __version__
19 | except ImportError: # pragma: no cover
20 | # Local copy or not installed with setuptools
21 | __version__ = "999"
22 |
23 | from .esa_safe import make_stac_item
24 | from .sentinel1 import (
25 | calibrate_amplitude,
26 | calibrate_intensity,
27 | crop_burst_dataset,
28 | get_footprint_linestring,
29 | ground_range_to_slant_range_time,
30 | mosaic_slc_iw,
31 | open_sentinel1_dataset,
32 | slant_range_time_to_ground_range,
33 | )
34 |
35 | __all__ = [
36 | "__version__",
37 | "calibrate_amplitude",
38 | "calibrate_intensity",
39 | "crop_burst_dataset",
40 | "get_footprint_linestring",
41 | "ground_range_to_slant_range_time",
42 | "make_stac_item",
43 | "mosaic_slc_iw",
44 | "open_sentinel1_dataset",
45 | "slant_range_time_to_ground_range",
46 | ]
47 |
--------------------------------------------------------------------------------
/xarray_sentinel/resources/sentinel1/my-safe-sentinel-1.0-sentinel-1-sar-level-1.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
--------------------------------------------------------------------------------
/xarray_sentinel/resources/sentinel1/my-xfdu.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
--------------------------------------------------------------------------------
/xarray_sentinel/reformat.py:
--------------------------------------------------------------------------------
1 | from typing import Any, Dict
2 |
3 | import xarray as xr
4 |
5 | from . import esa_safe
6 |
7 |
8 | def to_group_zarr(
9 | product_path: esa_safe.PathType,
10 | output_store: Any,
11 | groups: Dict[str, str] | None = None,
12 | **kwargs: Any,
13 | ) -> None:
14 | root = xr.open_dataset(product_path, engine="sentinel-1")
15 | root.to_zarr(output_store, mode="w", **kwargs)
16 |
17 | if groups is None:
18 | groups = {g: g for g in root.attrs["subgroups"]}
19 |
20 | for group_out, group_in in groups.items():
21 | try:
22 | group_ds = xr.open_dataset(
23 | product_path, engine="sentinel-1", group=group_in
24 | )
25 | group_ds.to_zarr(output_store, mode="a", group=group_out, **kwargs)
26 | except FileNotFoundError:
27 | pass
28 |
29 |
30 | # Apparently there is no way to save SLC images because "netcdf4" doesn't support complex data
31 | # and "h5netcdf" crashes on an issue related to dimension names
32 | def to_group_netcdf(
33 | product_path: esa_safe.PathType,
34 | output_store: str,
35 | groups: Dict[str, str] | None = None,
36 | **kwargs: Any,
37 | ) -> None:
38 | root = xr.open_dataset(product_path, engine="sentinel-1")
39 | root.to_netcdf(output_store, mode="w", **kwargs)
40 |
41 | if groups is None:
42 | groups = {g: g for g in root.attrs["subgroups"]}
43 |
44 | for group_out, group_in in groups.items():
45 | try:
46 | group_ds = xr.open_dataset(
47 | product_path, engine="sentinel-1", group=group_in
48 | )
49 | group_ds.to_netcdf(output_store, mode="a", group=group_out, **kwargs)
50 | except FileNotFoundError:
51 | pass
52 |
--------------------------------------------------------------------------------
/tests/data/download.sh:
--------------------------------------------------------------------------------
1 | #!/bin/bash
2 |
3 | echo "Uncomment to download the full products, the unzip manually"
4 |
5 | if [ -z "$SCIHUB_CREDENTIALS" ]
6 | then
7 | echo "define the SCIHUB_CREDENTIALS environment virable as user:password";
8 | exit 1
9 | fi
10 |
11 | # Sentinel-1 SLC IW
12 | # curl -u $SCIHUB_CREDENTIALS -L -o S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.zip "https://scihub.copernicus.eu/dhus/odata/v1/Products('cb28c2e3-f258-4af0-96a7-9af05a82cc5c')/\$value"
13 |
14 | # Sentinel-1 SLC SM S6
15 | # curl -u $SCIHUB_CREDENTIALS -L -o S1A_S6_SLC__1SDV_20210402T115512_20210402T115535_037271_046407_39FD.zip "https://scihub.copernicus.eu/dhus/odata/v1/Products('d45c1d65-5bd9-4ccf-a882-aff206a5c157')/\$value"
16 |
17 | # Sentinel-1 SLC SM S3
18 | # curl -u $SCIHUB_CREDENTIALS -L -o S1A_S3_SLC__1SDV_20210401T152855_20210401T152914_037258_04638E_6001.zip "https://scihub.copernicus.eu/dhus/odata/v1/Products('8a5251d4-490a-4669-9702-57d844c7ee77')/\$value"
19 |
20 | # Sentinel-1 SLC EW
21 | # curl -u $SCIHUB_CREDENTIALS -L -o S1A_EW_SLC__1SDH_20210403T122536_20210403T122630_037286_046484_8152.zip "https://scihub.copernicus.eu/dhus/odata/v1/Products('4e237863-7cf3-4110-8a28-744d2b80a21c')/\$value"
22 |
23 | # Sentinel-1 SLC WV
24 | # curl -u $SCIHUB_CREDENTIALS -L -o S1B_WV_SLC__1SSV_20210403T083025_20210403T084452_026300_032390_D542.zip "https://scihub.copernicus.eu/dhus/odata/v1/Products('a3660e2f-bd85-47f0-9ab2-2467c13689c5')/\$value"
25 |
26 | # Sentinel-1 GRD IW
27 | # curl -u $SCIHUB_CREDENTIALS -L -o S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8.zip "https://scihub.copernicus.eu/dhus/odata/v1/Products('93265069-4d8e-4c6e-926c-73cff7bc605f')/\$value"
28 |
29 | # Sentinel-2 S2MSI1C
30 | # curl -u $SCIHUB_CREDENTIALS -L -o S2A_MSIL1C_20210403T101021_N0300_R022_T33TUM_20210403T110551.zip "https://scihub.copernicus.eu/dhus/odata/v1/Products('4c14fd90-6a4a-42f4-a484-abf1df711ed0')/\$value"
31 |
--------------------------------------------------------------------------------
/tests/slow_test_50_cfchecker.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from typing import Any, Dict
3 |
4 | import pytest
5 | import xarray as xr
6 | from cfchecker import cfchecks
7 |
8 | pytest.importorskip("netCDF4")
9 |
10 | DATA_FOLDER = pathlib.Path(__file__).parent / "data"
11 |
12 |
13 | def cfcheck(path: str) -> Dict[str, int]:
14 | (
15 | badc,
16 | coards,
17 | debug,
18 | uploader,
19 | useFileName,
20 | regionnames,
21 | standardName,
22 | areaTypes,
23 | cacheDir,
24 | cacheTables,
25 | cacheTime,
26 | version,
27 | files,
28 | ) = cfchecks.getargs(["cfchecks", path])
29 |
30 | inst = cfchecks.CFChecker(
31 | uploader=uploader,
32 | useFileName=useFileName,
33 | badc=badc,
34 | coards=coards,
35 | cfRegionNamesXML=regionnames,
36 | cfStandardNamesXML=standardName,
37 | cfAreaTypesXML=areaTypes,
38 | cacheDir=cacheDir,
39 | cacheTables=cacheTables,
40 | cacheTime=cacheTime,
41 | version=version,
42 | debug=debug,
43 | )
44 | for file in files:
45 | try:
46 | inst.checker(file)
47 | except cfchecks.FatalCheckerError:
48 | print("Checking of file %s aborted due to error" % file)
49 |
50 | totals: Dict[str, int] = inst.get_total_counts()
51 |
52 | return totals
53 |
54 |
55 | def test_cfcheck_grd(tmpdir: Any) -> None:
56 | product_path = (
57 | DATA_FOLDER
58 | / "S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8.SAFE"
59 | )
60 |
61 | groups = [""]
62 | while groups:
63 | group = groups.pop()
64 | try:
65 | ds = xr.open_dataset(product_path, engine="sentinel-1", group=group)
66 | groups.extend(f"{group}/{g}" for g in ds.attrs.get("subgroups", []))
67 | except FileNotFoundError:
68 | continue
69 | nc_path = tmpdir.join(group.replace("/", "-") + ".nc")
70 | ds.to_netcdf(nc_path)
71 |
72 | totals = cfcheck(str(nc_path))
73 |
74 | assert totals["FATAL"] + totals["ERROR"] + totals["WARN"] == 0
75 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html
6 |
7 | # -- Import and path setup ---------------------------------------------------
8 |
9 | import os
10 | import sys
11 |
12 | import xarray_sentinel
13 |
14 | sys.path.insert(0, os.path.abspath("../"))
15 |
16 | # -- Project information -----------------------------------------------------
17 |
18 | project = "xarray_sentinel"
19 | copyright = "2023, B-Open Solutions srl"
20 | author = "B-Open Solutions srl"
21 | version = xarray_sentinel.__version__
22 | release = xarray_sentinel.__version__
23 |
24 | # -- General configuration ---------------------------------------------------
25 |
26 | # Add any Sphinx extension module names here, as strings. They can be
27 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
28 | # ones.
29 | extensions = [
30 | "autoapi.extension",
31 | "myst_parser",
32 | "sphinx.ext.autodoc",
33 | "sphinx.ext.napoleon",
34 | ]
35 |
36 | # autodoc configuration
37 | autodoc_typehints = "none"
38 |
39 | # autoapi configuration
40 | autoapi_add_toctree_entry = False
41 | autoapi_dirs = ["../xarray_sentinel"]
42 | autoapi_ignore = ["*/version.py"]
43 | autoapi_member_order = "groupwise"
44 | autoapi_options = [
45 | "members",
46 | "inherited-members",
47 | "undoc-members",
48 | "show-inheritance",
49 | "show-module-summary",
50 | "imported-members",
51 | ]
52 | autoapi_root = "_api"
53 |
54 | # napoleon configuration
55 | napoleon_google_docstring = False
56 | napoleon_numpy_docstring = True
57 | napoleon_preprocess_types = True
58 |
59 | # Add any paths that contain templates here, relative to this directory.
60 | templates_path = ["_templates"]
61 |
62 | # List of patterns, relative to source directory, that match files and
63 | # directories to ignore when looking for source files.
64 | # This pattern also affects html_static_path and html_extra_path.
65 | exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"]
66 |
67 |
68 | # -- Options for HTML output -------------------------------------------------
69 |
70 | # The theme to use for HTML and HTML Help pages. See the documentation for
71 | # a list of builtin themes.
72 | #
73 | html_theme = "pydata_sphinx_theme"
74 |
75 | # Add any paths that contain custom static files (such as style sheets) here,
76 | # relative to this directory. They are copied after the builtin static files,
77 | # so a file named "default.css" will overwrite the builtin "default.css".
78 | html_static_path = ["_static"]
79 |
--------------------------------------------------------------------------------
/docs/DATATREE.md:
--------------------------------------------------------------------------------
1 | # Tentative data-tree structure
2 |
3 | Sentinel-1 SLC IW product structure:
4 |
5 | ```
6 | /
7 | ├─ IW1
8 | │ ├─ VH
9 | │ │ ├─ line (line)
10 | │ │ ├─ pixel (pixel)
11 | │ │ ├─ slant_range_time (pixel)
12 | │ │ ├─ measurement (line, pixel)
13 | │ │ ├─ gcp
14 | │ │ │ ├─ azimuth_time (azimuth_time)
15 | │ │ │ ├─ slant_range_time (slant_range_time)
16 | │ │ │ ├─ line (azimuth_time)
17 | │ │ │ ├─ pixel (slant_range_time)
18 | │ │ │ ├─ latitude (azimuth_time, slant_range_time)
19 | │ │ │ ├─ longitude (azimuth_time, slant_range_time)
20 | │ │ │ ├─ height (azimuth_time, slant_range_time)
21 | │ │ │ ├─ incidenceAngle (azimuth_time, slant_range_time)
22 | │ │ │ └─ elevationAngle (azimuth_time, slant_range_time)
23 | │ │ ├─ orbit
24 | │ │ │ ├─ azimuth_time (azimuth_time)
25 | │ │ │ ├─ axis (axis) # "x", "y", "z"
26 | │ │ │ ├─ position (azimuth_time, axis)
27 | │ │ │ └─ velocity (azimuth_time, axis)
28 | │ │ ├─ attitude
29 | │ │ │ ├─ azimuth_time (azimuth_time)
30 | │ │ │ ├─ q0 (azimuth_time)
31 | │ │ │ ├─ q1 (azimuth_time)
32 | │ │ │ ├─ q2 (azimuth_time)
33 | │ │ │ ├─ q3 (azimuth_time)
34 | │ │ │ ├─ wx (azimuth_time)
35 | │ │ │ ├─ wy (azimuth_time)
36 | │ │ │ ├─ wz (azimuth_time)
37 | │ │ │ ├─ pitch (azimuth_time)
38 | │ │ │ ├─ roll (azimuth_time)
39 | │ │ │ └─ yaw (azimuth_time)
40 | │ │ ├─ calibration
41 | │ │ │ ├─ azimuth_time (line)
42 | │ │ │ ├─ line (line)
43 | │ │ │ ├─ pixel (pixel)
44 | │ │ │ ├─ sigmaNought (line, pixel)
45 | │ │ │ ├─ betaNought (line, pixel)
46 | │ │ │ ├─ gamma (line, pixel)
47 | │ │ │ └─ dn (line, pixel)
48 |
49 | # do we need the following as virtual structures?
50 |
51 | │ │ ├─ R168-N459-E0115 # format is f"R{relative_orbit:03}-{N_or_S}{lat:03}-{E_or_W}{lon:04}"
52 | │ │ │ ├─ azimuth_time (azimuth_time)
53 | │ │ │ ├─ slant_range_time (slant_range_time)
54 | │ │ │ ├─ line (azimuth_time)
55 | │ │ │ ├─ pixel (slant_range_time)
56 | │ │ │ ├─ R168-N459-E0115 (azimuth_time, slant_range_time) # "measurements" or "VH-R168-N459-E0115"?
57 | │ │ │ ├─ gcp # GPCs relative to the burst?
58 | │ │ │ │ ├─ azimuth_time (azimuth_time)
59 | │ │ │ │ ├─ slant_range_time (slant_range_time)
60 | │ │ │ │ ├─ line (azimuth_time)
61 | │ │ │ │ ├─ pixel (slant_range_time)
62 | │ │ │ │ ├─ latitude (azimuth_time, slant_range_time)
63 | │ │ │ │ ├─ longitude (azimuth_time, slant_range_time)
64 | │ │ │ │ ├─ height (azimuth_time, slant_range_time)
65 | │ │ │ │ ├─ incidenceAngle (azimuth_time, slant_range_time)
66 | │ │ │ │ └─ elevationAngle (azimuth_time, slant_range_time)
67 | │ │ │ └─ calibration # calibration relative to the burst?
68 | │ │ │ ├─ azimuth_time (line)
69 | ...
70 | ```
71 |
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | build-backend = "setuptools.build_meta"
3 | requires = ["setuptools>=64", "setuptools_scm>=8"]
4 |
5 | [dependency-groups]
6 | dev = [
7 | "cfchecker>=4.1.0",
8 | "dask>=2025.7.0",
9 | "mypy>=1.15.0",
10 | "pandas-stubs>=1.4.0",
11 | "pre-commit>=4.3.0",
12 | "pytest>=7.0",
13 | "pytest-cov>=5.0",
14 | "shapely>=2.1",
15 | "stac-validator>=3.6.0",
16 | "zarr>=2.18.3"
17 | ]
18 |
19 | [project]
20 | authors = [{"name" = "B-Open", "email" = "software@bopen.eu"}]
21 | classifiers = [
22 | "Development Status :: 4 - Beta",
23 | "Intended Audience :: Science/Research",
24 | "License :: OSI Approved :: Apache Software License",
25 | "Operating System :: OS Independent",
26 | "Programming Language :: Python",
27 | "Programming Language :: Python :: 3",
28 | "Programming Language :: Python :: 3.10",
29 | "Programming Language :: Python :: 3.11",
30 | "Programming Language :: Python :: 3.12",
31 | "Programming Language :: Python :: 3.13",
32 | "Topic :: Scientific/Engineering"
33 | ]
34 | dependencies = [
35 | "fsspec>=2023.1.0",
36 | "numpy>=1.22.0",
37 | "pandas>=1.4.0",
38 | "pydantic>=2.11.7",
39 | "rioxarray>=0.13.0",
40 | "scipy>=1.10.0",
41 | "xarray>=2023.2.0",
42 | "xmlschema>=2.2.0"
43 | ]
44 | description = "Easily access and explore the SAR data products of the Copernicus Sentinel-1 satellite mission"
45 | dynamic = ["version"]
46 | keywords = [
47 | "copernicus",
48 | "earth-observation",
49 | "radar",
50 | "remote-sensing",
51 | "satellite-imagery",
52 | "sentinel-1",
53 | "sar",
54 | "synthetic-aperture-radar",
55 | "xarray"
56 | ]
57 | license = {file = "LICENSE"}
58 | name = "xarray-sentinel"
59 | readme = "README.md"
60 | requires-python = ">=3.10"
61 |
62 | [project.entry-points."xarray.backends"]
63 | sentinel-1 = "xarray_sentinel.xarray_backends:Sentinel1Backend"
64 |
65 | [project.urls]
66 | repository = "https://github.com/bopen/xarray-sentinel"
67 |
68 | [tool.coverage.run]
69 | branch = true
70 |
71 | [tool.mypy]
72 | strict = true
73 |
74 | [[tool.mypy.overrides]]
75 | ignore_missing_imports = true
76 | module = [
77 | "cfchecker",
78 | "dask",
79 | "fsspec",
80 | "pydantic",
81 | "pydantic.alias_generators",
82 | "rasterio",
83 | "shapely",
84 | "shapely.geometry",
85 | "shapely.wkt",
86 | "stac_validator"
87 | ]
88 |
89 | [tool.ruff]
90 | # Same as Black.
91 | indent-width = 4
92 | line-length = 88
93 |
94 | [tool.ruff.lint]
95 | ignore = [
96 | # pydocstyle: Missing Docstrings
97 | "D1"
98 | ]
99 | select = [
100 | # pyflakes
101 | "F",
102 | # pycodestyle
103 | "E",
104 | "W",
105 | # isort
106 | "I",
107 | # pydocstyle
108 | "D"
109 | ]
110 |
111 | [tool.ruff.lint.pycodestyle]
112 | max-line-length = 110
113 |
114 | [tool.ruff.lint.pydocstyle]
115 | convention = "numpy"
116 |
117 | [tool.setuptools]
118 | packages = ["xarray_sentinel"]
119 |
120 | [tool.setuptools.package-data]
121 | xarray_sentinel = ["py.typed"]
122 |
123 | [tool.setuptools_scm]
124 | write_to = "xarray_sentinel/version.py"
125 | write_to_template = '''
126 | # Do not change! Do not track in version control!
127 | __version__ = "{version}"
128 | '''
129 |
--------------------------------------------------------------------------------
/xarray_sentinel/eopf_product.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from typing import Any
3 |
4 | import fsspec
5 | import xarray as xr
6 |
7 | from . import esa_safe, sentinel1
8 |
9 |
10 | def open_datatree(
11 | product_urlpath: esa_safe.PathType,
12 | *,
13 | fs: fsspec.AbstractFileSystem | None = None,
14 | storage_options: dict[str, Any] | None = None,
15 | check_files_exist: bool = False,
16 | override_product_files: str | None = None,
17 | **kwargs: Any,
18 | ) -> xr.DataTree:
19 | product_name = pathlib.Path(product_urlpath).stem
20 | root = sentinel1.open_sentinel1_dataset(
21 | product_urlpath,
22 | fs=fs,
23 | storage_options=storage_options,
24 | check_files_exist=check_files_exist,
25 | override_product_files=override_product_files,
26 | )
27 | xarray_sentinel_groups = root.attrs["subgroups"]
28 | dt = xr.DataTree()
29 | for xarray_sentinel_group in xarray_sentinel_groups:
30 | swath, _, pol_group = xarray_sentinel_group.partition("/")
31 | pol, _, dataset = pol_group.partition("/")
32 | eopf_product_name = f"{product_name}_{swath}_{pol.upper()}"
33 | if not pol:
34 | continue
35 | if not dataset:
36 | measurement_ds = sentinel1.open_sentinel1_dataset(
37 | product_urlpath,
38 | fs=fs,
39 | storage_options=storage_options,
40 | check_files_exist=check_files_exist,
41 | override_product_files=override_product_files,
42 | group=xarray_sentinel_group,
43 | parse_eopf_metadata=True,
44 | ).rename(measurement="slc")
45 | if eopf_product_name not in dt.children:
46 | product_ds = xr.Dataset(
47 | attrs={
48 | "other_metadata": measurement_ds.attrs["other_metadata"],
49 | "stac_discovery": measurement_ds.attrs["stac_discovery"],
50 | }
51 | )
52 | dt[f"{eopf_product_name}"] = product_ds
53 | measurement_ds.attrs.clear()
54 | dt[f"{eopf_product_name}/measurements"] = measurement_ds
55 | elif dataset in {"orbit", "attitude", "dc_estimate", "gcp"}:
56 | ds = sentinel1.open_sentinel1_dataset(
57 | product_urlpath,
58 | fs=fs,
59 | storage_options=storage_options,
60 | check_files_exist=check_files_exist,
61 | group=xarray_sentinel_group,
62 | override_product_files=override_product_files,
63 | )
64 | if dataset == "dc_estimate":
65 | dataset = "doppler_centroid"
66 | ds.attrs.clear()
67 | dt[f"{eopf_product_name}/conditions/{dataset}"] = ds
68 | elif dataset in {"calibration", "noise_range", "noise_azimuth"}:
69 | ds = sentinel1.open_sentinel1_dataset(
70 | product_urlpath,
71 | fs=fs,
72 | storage_options=storage_options,
73 | check_files_exist=check_files_exist,
74 | group=xarray_sentinel_group,
75 | override_product_files=override_product_files,
76 | )
77 | ds.attrs.clear()
78 | dt[f"{eopf_product_name}/quality/{dataset}"] = ds
79 | else:
80 | print(f"Skipping {xarray_sentinel_group=}")
81 |
82 | return dt
83 |
--------------------------------------------------------------------------------
/docs/USER-REQUIREMENTS.md:
--------------------------------------------------------------------------------
1 | Data groups:
2 |
3 | - SLC complex measurements by swath and burst
4 | - azimuth / time, slant_range as dimensions, polarisation as variables
5 | - azimuth / slant_range coordinates as distances instead of times for
6 | easier interpretation? (slant_range == two-ways-time * speed-of-light,
7 | azimuth as linear distance from ascending node?) - rejected for now
8 | - keep time coordinates as UTC, TAI, UT1 and elapsed time from ascending node (NOT PRESENT??)
9 | - calibration information (azimuth / slant_range dimensions on a reduced grid)
10 | - ground control points (azimuth / slant_range dimensions on one more reduced grid)
11 | - de-ramping parameters
12 | - kinematic description:
13 | - orbit / state vectors
14 | - attitude / quaternions
15 | - antenna pattern
16 | - Doppler centroid / Doppler rate
17 | - incidence angle & Co.
18 |
19 | Not loaded:
20 |
21 | - noise
22 |
23 | Attributes:
24 |
25 | - mission, acquisition, processing, etc
26 |
27 | Conformance in order of precedence:
28 |
29 | - STAC metadata for attributes with SAT and SAR extensions
30 | - CF conventions for the coordinates (with special attentions to time)
31 |
32 | High level requirements:
33 |
34 | - keep all naming as close to the original al possible (with XML camel case to Python snake case?)
35 | - support opening a swath when other swaths are missing (especially the tifs)
36 |
37 | # User experience
38 |
39 | ```python
40 | >>> import xarray as xr
41 | >>> ds = xr.open_dataset("tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE/manifest.safe")
42 | >>> ds = xr.open_dataset("tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE")
43 | >>> ds
44 |
45 | ...
46 | >>> ds_iw1_gpc = xr.open_dataset("tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE", group="IW1/VV/gcp")
47 |
48 | ```
49 |
50 | Structure:
51 |
52 | - root / SAFE
53 | - swaths "IW1" "IW2" "S3" etc / duplicated in VH-VV annotation XML
54 | - bursts "R022_N433_E0120" etc
55 | - polarizations are data variables
56 | - gcp "gcp"
57 | - calibration "calibration"
58 | - orbit "orbit" / duplicated in annotation XML for different polarizations
59 | - attitude "attitude" / same as above
60 | - antenna pattern "antenna"
61 | - zero-Doppler "doppler"
62 |
63 | examples: `group="IW2/orbit"`, `group="IW2/N433_E0120`, `group="S3/gcp"` etc
64 |
65 | Dimensions, coordinates and variables
66 |
67 | We may either keep names and unit of measures as close as possible to the original
68 | or going for easier-to-use choices.
69 |
70 | In all cases we add the XML tag name in the long_name so it is clear the provenance of the
71 | information: e.g. for `slant_range_time` -> `"long_name": "two way delay (slantRangeTime)"`
72 |
73 | - `azimuth_time` as CF time in UTC (warn: may fail on leap seconds)
74 | - `slant_range_time` as CF time interval in `"s"`
75 |
76 | # Accuracy considerations
77 |
78 | - `azimuth_time` can be expressed as `np.datetime64[ns]` because
79 | spatial resolution at LEO speed is 10km/s * 1ns ~= 0.001cm
80 | - `slant_range_time` cannot be expressed as `np.timedelta64[ns]` because
81 | spatial resolution at the speed of light is 300_000km/s * 1ns / 2 ~= 15cm,
82 | that it is not enough for interferometric applications.
83 | `slant_range_time` needs a spatial resolution of 0.001cm at a 1_000km distance
84 | so around 1e-9 that is well within 1e-15 resolution of IEEE-754 float64.
85 |
--------------------------------------------------------------------------------
/tests/test_30_sentinel1_fsspec.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 |
3 | import fsspec
4 | import pytest
5 | import xarray as xr
6 |
7 | from xarray_sentinel import sentinel1
8 |
9 | DATA_FOLDER = pathlib.Path(__file__).parent / "data"
10 |
11 | SLC_IW = (
12 | DATA_FOLDER
13 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
14 | )
15 | SLC_IW1_VV_annotation = (
16 | DATA_FOLDER
17 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
18 | / "annotation"
19 | / "s1b-iw1-slc-vv-20210401t052624-20210401t052649-026269-032297-004.xml"
20 | )
21 | SLC_IW1_VV_calibration = (
22 | DATA_FOLDER
23 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
24 | / "annotation"
25 | / "calibration"
26 | / "calibration-s1b-iw1-slc-vv-20210401t052624-20210401t052649-026269-032297-004.xml"
27 | )
28 | SLC_IW1_VV_measurement = (
29 | DATA_FOLDER
30 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
31 | / "measurement"
32 | / "s1b-iw1-slc-vv-20210401t052624-20210401t052649-026269-032297-004.tiff"
33 | )
34 | SLC_S3_VV_annotation = (
35 | DATA_FOLDER
36 | / "S1A_S3_SLC__1SDV_20210401T152855_20210401T152914_037258_04638E_6001.SAFE"
37 | / "annotation"
38 | / "s1a-s3-slc-vv-20210401t152855-20210401t152914-037258-04638e-002.xml"
39 | )
40 | SLC_S3_VV_measurement = (
41 | DATA_FOLDER
42 | / "S1A_S3_SLC__1SDV_20210401T152855_20210401T152914_037258_04638E_6001.SAFE"
43 | / "measurement"
44 | / "s1a-s3-slc-vv-20210401t152855-20210401t152914-037258-04638e-002.tiff"
45 | )
46 | GRD_IW_VV_annotation = (
47 | DATA_FOLDER
48 | / "S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8.SAFE"
49 | / "annotation"
50 | / "s1b-iw-grd-vv-20210401t052623-20210401t052648-026269-032297-001.xml"
51 | )
52 |
53 |
54 | def test_get_fs_path() -> None:
55 | fs, path = sentinel1.get_fs_path(SLC_IW)
56 |
57 | assert isinstance(fs, fsspec.AbstractFileSystem)
58 | assert path == str(SLC_IW / "manifest.safe")
59 |
60 | fs2, path2 = sentinel1.get_fs_path(path, fs=fs)
61 |
62 | assert fs2 is fs
63 | assert path2 is path
64 |
65 | with pytest.raises(ValueError):
66 | sentinel1.get_fs_path("dummy*")
67 |
68 | with pytest.raises(ValueError):
69 | sentinel1.get_fs_path("*")
70 |
71 |
72 | def test_open_dataset_zip_metadata() -> None:
73 | zip_path = (
74 | DATA_FOLDER
75 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.zip"
76 | )
77 | zip_urlpath = f"zip://*/manifest.safe::{zip_path}"
78 | expected_groups = {
79 | "IW1",
80 | "IW1/VV",
81 | "IW1/VV/gcp",
82 | "IW1/VV/attitude",
83 | "IW1/VV/orbit",
84 | "IW1/VV/calibration",
85 | }
86 |
87 | res = sentinel1.open_sentinel1_dataset(zip_urlpath)
88 |
89 | assert isinstance(res, xr.Dataset)
90 | assert set(res.attrs["subgroups"]) >= expected_groups
91 |
92 | res = sentinel1.open_sentinel1_dataset(zip_urlpath, group="IW1/VV/orbit")
93 |
94 | assert isinstance(res, xr.Dataset)
95 | assert res.dims == {"axis": 3, "azimuth_time": 17}
96 |
97 |
98 | @pytest.mark.xfail
99 | def test_open_dataset_zip_data() -> None:
100 | zip_path = (
101 | DATA_FOLDER
102 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.zip"
103 | )
104 | zip_urlpath = f"zip://*/manifest.safe::{zip_path}"
105 |
106 | res = sentinel1.open_sentinel1_dataset(zip_urlpath, group="IW1/VV/0")
107 |
108 | assert isinstance(res, xr.Dataset)
109 | assert res.dims == {"slant_range_time": 21632, "azimuth_time": 1501}
110 | assert abs(res.measurement[:40, :40]).mean() >= 0
111 |
--------------------------------------------------------------------------------
/xarray_sentinel/eopf_metadata.py:
--------------------------------------------------------------------------------
1 | import warnings
2 | from typing import Any
3 |
4 | import fsspec
5 | import numpy as np
6 | import pydantic.alias_generators
7 |
8 | from . import esa_safe
9 |
10 |
11 | def to_snake_recursive(
12 | struct: dict[str, Any] | list[Any],
13 | ) -> dict[str, Any] | list[Any]:
14 | if isinstance(struct, dict):
15 | struct = {
16 | pydantic.alias_generators.to_snake(k): to_snake_recursive(v)
17 | for k, v in struct.items()
18 | }
19 | elif isinstance(struct, list):
20 | struct = [to_snake_recursive(v) for v in struct]
21 | return struct
22 |
23 |
24 | def fix_lists(struct: Any) -> Any:
25 | fixed: Any
26 | if isinstance(struct, dict):
27 | fixed = {}
28 | for k, v in struct.items():
29 | if k == "@count":
30 | continue
31 | if k[-5:] == "_list":
32 | try:
33 | fixed[k] = fix_lists(struct[k][k[:-5]])
34 | except Exception:
35 | fixed[k] = fix_lists(fix_lists(struct[k]))
36 | else:
37 | fixed[k] = fix_lists(struct[k])
38 | elif isinstance(struct, list):
39 | fixed = [fix_lists(v) for v in struct]
40 | else:
41 | fixed = struct
42 | return fixed
43 |
44 |
45 | def filter_metadata_dict(image_information: dict[str, Any]) -> dict[str, Any]:
46 | image_information = to_snake_recursive(image_information) # type: ignore
47 | image_information = fix_lists(image_information)
48 | return image_information
49 |
50 |
51 | def build_azimuth_fm_rate_list(
52 | azimuth_fm_rate_list: list[dict[str, Any] | list[Any]],
53 | ) -> list[dict[str, Any] | list[Any]]:
54 | azimuth_fm_rate_list_out: list[dict[str, Any] | list[Any]] = []
55 | for item in azimuth_fm_rate_list:
56 | azimuth_fm_rate_polynomial_str = item["azimuth_fm_rate_polynomial"]["$"] # type: ignore
57 | azimuth_fm_rate_list_out.append(
58 | {
59 | "azimuth_time": item["azimuth_time"], # type: ignore
60 | "azimuth_fm_rate_polynomial": np.fromstring(
61 | azimuth_fm_rate_polynomial_str, sep=" "
62 | ).tolist(),
63 | }
64 | )
65 | return azimuth_fm_rate_list_out
66 |
67 |
68 | def build_general_annotation(general_annotation: dict[str, Any]) -> dict[str, Any]:
69 | general_annotation = filter_metadata_dict(general_annotation)
70 | _ = general_annotation.pop("orbit_list")
71 | _ = general_annotation.pop("attitude_list")
72 | general_annotation["azimuth_fm_rate_list"] = build_azimuth_fm_rate_list(
73 | general_annotation["azimuth_fm_rate_list"]
74 | )
75 | return general_annotation
76 |
77 |
78 | def build_other_metadata(annotation_urlpath: esa_safe.PathType) -> dict[str, Any]:
79 | warnings.warn("This is an unofficial, alpha converter", UserWarning)
80 | with fsspec.open(annotation_urlpath) as fp:
81 | quality_information = esa_safe.parse_tag(fp, "//qualityInformation")
82 | quality_information = filter_metadata_dict(quality_information)
83 | general_annotation = esa_safe.parse_tag(fp, "//generalAnnotation")
84 | general_annotation = build_general_annotation(general_annotation)
85 | image_information = esa_safe.parse_tag(fp, "//imageAnnotation")
86 | image_information = filter_metadata_dict(image_information)
87 | swath_merginig = esa_safe.parse_tag(fp, "//swathMerging")
88 | swath_merginig = filter_metadata_dict(swath_merginig)
89 | swath_timing = esa_safe.parse_tag(fp, "//swathTiming")
90 | swath_timing = filter_metadata_dict(swath_timing)
91 |
92 | other_metadata = {
93 | "quality_information": quality_information,
94 | "general_annotation": general_annotation,
95 | "image_annotation": image_information,
96 | "swath_timing": swath_timing,
97 | "swath_merginig": swath_merginig,
98 | }
99 | return other_metadata
100 |
--------------------------------------------------------------------------------
/tests/test_15_eopf_metadata.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 |
3 | from xarray_sentinel import eopf_metadata
4 |
5 | DATA_FOLDER = pathlib.Path(__file__).parent / "data"
6 |
7 | SLC_S3 = (
8 | DATA_FOLDER
9 | / "S1A_S3_SLC__1SDV_20210401T152855_20210401T152914_037258_04638E_6001.SAFE"
10 | )
11 | SLC_S3_VH_annotation = (
12 | SLC_S3
13 | / "annotation"
14 | / "s1a-s3-slc-vh-20210401t152855-20210401t152914-037258-04638e-001.xml"
15 | )
16 |
17 |
18 | def test_to_snake_recursive() -> None:
19 | metadata = {"qualityInformation": {"qualityDataList": [{"qualityData": 2}]}}
20 | expected = {"quality_information": {"quality_data_list": [{"quality_data": 2}]}}
21 |
22 | res = eopf_metadata.to_snake_recursive(metadata)
23 |
24 | assert res == expected
25 |
26 |
27 | def test_build_other_metadata() -> None:
28 | res = eopf_metadata.build_other_metadata(SLC_S3_VH_annotation)
29 | expected_quality_information = {
30 | "product_quality_index": 0.0,
31 | "quality_data_list": [
32 | {
33 | "azimuth_time": "2021-04-01T15:28:55.111501",
34 | "downlink_quality": {
35 | "i_input_data_mean": 0.06302300840616226,
36 | "q_input_data_mean": 0.114902101457119,
37 | "input_data_mean_outside_nominal_range_flag": False,
38 | "i_input_data_std_dev": 1.674363970756531,
39 | "q_input_data_std_dev": 1.515843987464905,
40 | "input_data_st_dev_outside_nominal_range_flag": False,
41 | "num_downlink_input_data_gaps": 0,
42 | "downlink_gaps_in_input_data_significant_flag": False,
43 | "num_downlink_input_missing_lines": 0,
44 | "downlink_missing_lines_significant_flag": False,
45 | "num_instrument_input_data_gaps": 0,
46 | "instrument_gaps_in_input_data_significant_flag": False,
47 | "num_instrument_input_missing_lines": 0,
48 | "instrument_missing_lines_significant_flag": False,
49 | "num_ssb_error_input_data_gaps": 0,
50 | "ssb_error_gaps_in_input_data_significant_flag": False,
51 | "num_ssb_error_input_missing_lines": 0,
52 | "ssb_error_missing_lines_significant_flag": False,
53 | "chirp_source_used": "Nominal",
54 | "pg_source_used": "Extracted",
55 | "rrf_spectrum_used": "Extended Tapered",
56 | "replica_reconstruction_failed_flag": False,
57 | "mean_pg_product_amplitude": 0.8810305595397949,
58 | "std_dev_pg_product_amplitude": 0.006521929986774921,
59 | "mean_pg_product_phase": 0.07730674743652344,
60 | "std_dev_pg_product_phase": 0.01562887243926525,
61 | "pg_product_derivation_failed_flag": False,
62 | "invalid_downlink_params_flag": True,
63 | },
64 | "raw_data_analysis_quality": {
65 | "i_bias": 0.06302300840616226,
66 | "i_bias_significance_flag": True,
67 | "q_bias": 0.114902101457119,
68 | "q_bias_significance_flag": True,
69 | "iq_gain_imbalance": 1.104575037956238,
70 | "iq_gain_significance_flag": True,
71 | "iq_quadrature_departure": -0.5162643194198608,
72 | "iq_quadrature_departure_significance_flag": True,
73 | },
74 | "doppler_centroid_quality": {
75 | "dc_method": "Data Analysis",
76 | "doppler_centroid_uncertain_flag": False,
77 | },
78 | "image_quality": {
79 | "image_statistics": {
80 | "output_data_mean": {"re": -0.02062067, "im": 0.01445807},
81 | "output_data_std_dev": {"re": 6.993358, "im": 6.996273},
82 | },
83 | "output_data_mean_outside_nominal_range_flag": True,
84 | "output_data_st_dev_outside_nominal_range_flag": True,
85 | },
86 | }
87 | ],
88 | }
89 |
90 | assert len(res) == 5
91 |
92 | assert res["quality_information"] == expected_quality_information
93 |
--------------------------------------------------------------------------------
/xarray_sentinel/resources/sentinel1/my-no-namespace.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
--------------------------------------------------------------------------------
/xarray_sentinel/conventions.py:
--------------------------------------------------------------------------------
1 | """CF representation of metadata according to Sentinel-1 Product Specification.
2 |
3 | See: S1-RS-MDA-52-7441, DI-MPC-PB, MPC-0240, 3/7, 27/02/2020
4 | https://sentinel.esa.int/documents/247904/1877131/Sentinel-1-Product-Specification
5 | """
6 |
7 | import xarray as xr
8 |
9 | from . import __version__
10 |
11 | GROUP_ATTRIBUTES = {
12 | "orbit": {
13 | "title": "Orbit information used by the IPF during processing",
14 | "comment": (
15 | "The dataset contains a sets of orbit state vectors that are updated along azimuth."
16 | " The values represent the interpolated values used by the IPF"
17 | " and are derived from the sub-commutated ancillary data from the ISPs"
18 | " or from an input auxiliary orbit file"
19 | ),
20 | },
21 | "attitude": {
22 | "title": "Attitude information used by the IPF during processing",
23 | "comment": (
24 | "The dataset contains a sets of attitude data records that are updated along azimuth."
25 | " The values represent the interpolated values used by the IPF"
26 | " and are derived from the sub-commutated ancillary data from the ISPs"
27 | " or from an input auxiliary orbit file"
28 | ),
29 | },
30 | "gcp": {
31 | "title": "Geolocation grid",
32 | "comment": (
33 | "The dataset contains geolocation grid point entries for each line/pixel"
34 | " combination based on a configured resolution."
35 | " The list contains an entry for each update made along azimuth"
36 | ),
37 | },
38 | "calibration": {
39 | "title": "Calibration coefficients",
40 | "comment": (
41 | "The dataset contains calibration information and the beta nought, sigma nought,"
42 | " gamma and digital number (DN) Look-up Tables (LUT) that can be used for"
43 | " absolute product calibration"
44 | ),
45 | },
46 | }
47 |
48 | VARIABLE_ATTRIBUTES = {
49 | "line": {"units": "1", "long_name": "product line number"},
50 | "pixel": {"units": "1", "long_name": "product pixel number"},
51 | "azimuth_time": {"long_name": "zero-Doppler azimuth time", "standard_name": "time"},
52 | # NOTE: `slant_range_time` is not expressed as `np.timedelta64[ns]` in order to keep enough
53 | # accuracy for interferometric processing, i.e. c * 1ns / 2 ~= 15cm.
54 | "slant_range_time": {"units": "s", "long_name": "slant range time / two-way delay"},
55 | "ground_range": {"units": "m", "long_name": "ground range"},
56 | "axis": {"units": "1", "long_name": "coordinate index"},
57 | "degree": {"units": "1", "long_name": "polynomial degree"},
58 | "latitude": {"units": "degrees_north", "long_name": "geodetic latitude"},
59 | "longitude": {"units": "degrees_east", "long_name": "geodetic longitude"},
60 | "height": {"units": "m", "long_name": "height above sea level"},
61 | "incidenceAngle": {"units": "°", "long_name": "incidence angle"},
62 | "elevationAngle": {"units": "°", "long_name": "elevation angle"},
63 | "q0": {"units": "1", "long_name": "Q0 attitude quaternion"},
64 | "q1": {"units": "1", "long_name": "Q1 attitude quaternion"},
65 | "q2": {"units": "1", "long_name": "Q2 attitude quaternion"},
66 | "q3": {"units": "1", "long_name": "Q3 attitude quaternion"},
67 | "roll": {"units": "°", "long_name": "platform roll"},
68 | "pitch": {"units": "°", "long_name": "platform pitch"},
69 | "yaw": {"units": "°", "long_name": "platform yaw"},
70 | "wx": {"units": "° s-1", "long_name": "X component of angular velocity vector"},
71 | "wy": {"units": "° s-1", "long_name": "Y component of angular velocity vector"},
72 | "wz": {"units": "° s-1", "long_name": "Z component of angular velocity vector"},
73 | "position": {"units": "m", "long_name": "ECEF position"},
74 | "velocity": {"units": "m s-1", "long_name": "ECEF velocity"},
75 | "sigmaNought": {"units": "m m-1", "long_name": "sigma nought calibration LUT"},
76 | "betaNought": {"units": "m m-1", "long_name": "beta nought calibration LUT"},
77 | "gamma": {"units": "m m-1", "long_name": "gamma calibration LUT"},
78 | "dn": {"units": "1", "long_name": "original digital number calibration LUT"},
79 | "noiseRangeLut": {"units": "1", "long_name": "range thermal noise correction LUT"},
80 | "noiseAzimuthLut": {
81 | "units": "1",
82 | "long_name": "azimuth thermal noise correction LUT",
83 | },
84 | "gr0": {
85 | "units": "m",
86 | "long_name": "ground range origin for slant range calculation",
87 | },
88 | "grsrCoefficients": {
89 | "units": "1",
90 | "long_name": "polynomial to convert from ground range to slant range",
91 | },
92 | "sr0": {
93 | "units": "m",
94 | "long_name": "slant range origin for ground range calculation",
95 | },
96 | "srgrCoefficients": {
97 | "units": "1",
98 | "long_name": "polynomial to convert from slant range to ground range",
99 | },
100 | "t0": {
101 | "units": "s",
102 | "long_name": "Two-way slant range time origin",
103 | },
104 | "data_dc_polynomial": {
105 | "units": "1",
106 | "long_name": "Coppler centroid estimated from data",
107 | },
108 | "azimuth_fm_rate_polynomial": {
109 | "units": "1",
110 | "long_name": "Azimuth FM rate coefficient array",
111 | },
112 | "measurement": {"units": "1", "long_name": "digital number"},
113 | }
114 |
115 |
116 | def update_attributes(ds: xr.Dataset, group: str = "") -> xr.Dataset:
117 | # NOTE: keep the version in sync with the capabilities of CF compliance checkers
118 | ds.attrs["Conventions"] = "CF-1.8"
119 | ds.attrs.update(GROUP_ATTRIBUTES.get(group, {}))
120 | ds.attrs["history"] = f"created by xarray_sentinel-{__version__}"
121 | for var in ds.variables:
122 | attrs = VARIABLE_ATTRIBUTES.get(str(var), {})
123 | ds.variables[var].attrs.update(attrs)
124 | return ds
125 |
--------------------------------------------------------------------------------
/xarray_sentinel/resources/sentinel1/s1-level-1-calibration.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Annotation record for calibration information.
8 |
9 |
10 |
11 |
12 | Swath dependent absolute calibration constant.
13 |
14 |
15 |
16 |
17 |
18 |
19 | Annotation record for a calibration vector at given zero Doppler azimuth time and two-way slant range time.
20 |
21 |
22 |
23 |
24 | Zero Doppler azimuth time at which calibration vector applies.
25 |
26 |
27 |
28 |
29 | Image line at which the calibration vector applies.
30 |
31 |
32 |
33 |
34 | Image pixel at which the calibration vector applies. This array contains the count attribute number of integer values (i.e. one value per point in the noise vectors), separated by spaces. The maximum length of this array will be one value for every pixel in an image line, however in general the vectors will be subsampled.
35 |
36 |
37 |
38 |
39 | Sigma nought calibration vector. This array contains the count attribute number of floating point values separated by spaces. The values in this vector are aligned with the pixel vector.
40 |
41 |
42 |
43 |
44 | Beta nought calibration vector. This array contains the count attribute number of floating point values separated by spaces. The values in this vector are aligned with the pixel vector.
45 |
46 |
47 |
48 |
49 | Gamma calibration vector. This array contains the count attribute number of floating point values separated by spaces. The values in this vector are aligned with the pixel vector.
50 |
51 |
52 |
53 |
54 | Digital number calibration vector. This array contains the count attribute number of floating point values separated by spaces. The values in this vector are aligned with the pixel vector.
55 |
56 |
57 |
58 |
59 |
60 |
61 | List of calibration vector annotation records.
62 |
63 |
64 |
65 |
66 | Calibration vector record. This record holds the calibration vectors and associated fields required to derive radiometrically calibrated imagery from the image MDS. With a minimum calibration vector update rate of of 1s and a maximum product length of 25 minutes, the maximum size of this list is 1500 elements. The azimuth spacing used will be different for different modes and product types.
67 |
68 |
69 |
70 |
71 |
72 | Number of calibrationVector records within the list.
73 |
74 |
75 |
76 |
77 |
78 | Annotation record for Sentinel-1 level 1 calibration product annotations.
79 |
80 |
81 |
82 |
83 | ADS header data set record. This DSR contains information that applies to the entire data set.
84 |
85 |
86 |
87 |
88 | Calibration information. This DSR holds parameters applicable to the image calibration.
89 |
90 |
91 |
92 |
93 | Calibration vector list. This element is a list of calibrationVector records that contain the absolute calibration vectors required to derive radiometrically calibrated imagery from the image MDS. This list contains an entry for each update made along azimuth.
94 |
95 |
96 |
97 |
98 |
99 |
100 | Sentinel-1 level 1 calibration product annotations.
101 |
102 |
103 |
104 |
--------------------------------------------------------------------------------
/xarray_sentinel/resources/sentinel1/my-schema_orb.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
--------------------------------------------------------------------------------
/.github/workflows/on-push.yml:
--------------------------------------------------------------------------------
1 | name: on-push
2 |
3 | on:
4 | push:
5 | branches:
6 | - main
7 | tags:
8 | - '*'
9 | pull_request:
10 | branches:
11 | - main
12 |
13 | concurrency:
14 | group: ${{ github.workflow }}-${{ github.ref }}
15 | cancel-in-progress: true
16 |
17 | defaults:
18 | run:
19 | shell: bash -l {0}
20 |
21 | jobs:
22 | pre-commit:
23 | runs-on: ubuntu-latest
24 | steps:
25 | - uses: actions/checkout@v5
26 | - uses: actions/setup-python@v5
27 | with:
28 | python-version: 3.x
29 | - uses: pre-commit/action@v3.0.1
30 |
31 | combine-environments:
32 | runs-on: ubuntu-latest
33 |
34 | steps:
35 | - uses: actions/checkout@v5
36 | - uses: actions/setup-python@v5
37 | with:
38 | python-version: 3.x
39 | - name: Install conda-merge
40 | run: |
41 | python -m pip install conda-merge
42 | - name: Combine environments
43 | run: |
44 | for SUFFIX in ci integration; do
45 | conda-merge ci/environment-$SUFFIX.yml environment.yml > ci/combined-environment-$SUFFIX.yml || exit
46 | done
47 | - uses: actions/upload-artifact@v4
48 | with:
49 | name: combined-environments
50 | path: ci/combined-environment-*.yml
51 |
52 | unit-tests:
53 | name: unit-tests
54 | needs: combine-environments
55 | runs-on: ubuntu-latest
56 | strategy:
57 | matrix:
58 | python-version: ['3.13']
59 |
60 | steps:
61 | - uses: actions/checkout@v5
62 | - uses: actions/download-artifact@v5
63 | with:
64 | name: combined-environments
65 | path: ci
66 | - name: Get current date
67 | id: date
68 | run: echo "date=$(date +%Y-%m-%d)" >> "${GITHUB_OUTPUT}"
69 | - uses: mamba-org/setup-micromamba@v2
70 | with:
71 | environment-file: ci/combined-environment-ci.yml
72 | environment-name: DEVELOP
73 | cache-environment: true
74 | cache-environment-key: environment-${{ steps.date.outputs.date }}
75 | cache-downloads-key: downloads-${{ steps.date.outputs.date }}
76 | create-args: >-
77 | python=${{ matrix.python-version }}
78 | - name: Install uv
79 | run: |
80 | python -m pip install uv
81 | - name: Run tests
82 | run: |
83 | make unit-tests COV_REPORT=xml
84 |
85 | type-check:
86 | needs: [combine-environments, unit-tests]
87 | runs-on: ubuntu-latest
88 |
89 | steps:
90 | - uses: actions/checkout@v5
91 | - uses: actions/download-artifact@v5
92 | with:
93 | name: combined-environments
94 | path: ci
95 | - name: Get current date
96 | id: date
97 | run: echo "date=$(date +%Y-%m-%d)" >> "${GITHUB_OUTPUT}"
98 | - uses: mamba-org/setup-micromamba@v2
99 | with:
100 | environment-file: ci/combined-environment-ci.yml
101 | environment-name: DEVELOP
102 | cache-environment: true
103 | cache-environment-key: environment-${{ steps.date.outputs.date }}
104 | cache-downloads-key: downloads-${{ steps.date.outputs.date }}
105 | create-args: >-
106 | python=3.13
107 | - name: Install uv
108 | run: |
109 | python -m pip install uv
110 | - name: Run code quality checks
111 | run: |
112 | make type-check
113 |
114 | # docs-build:
115 | # needs: [combine-environments, unit-tests]
116 | # runs-on: ubuntu-latest
117 |
118 | # steps:
119 | # - uses: actions/checkout@v5
120 | # - uses: actions/download-artifact@v5
121 | # with:
122 | # name: combined-environments
123 | # path: ci
124 | # - name: Get current date
125 | # id: date
126 | # run: echo "date=$(date +%Y-%m-%d)" >> "${GITHUB_OUTPUT}"
127 | # - uses: mamba-org/setup-micromamba@v2
128 | # with:
129 | # environment-file: ci/combined-environment-ci.yml
130 | # environment-name: DEVELOP
131 | # cache-environment: true
132 | # cache-environment-key: environment-${{ steps.date.outputs.date }}
133 | # cache-downloads-key: downloads-${{ steps.date.outputs.date }}
134 | # create-args: >-
135 | # python=3.13
136 | # - name: Install uv
137 | # run: |
138 | # python -m pip install uv
139 | # - name: Build documentation
140 | # run: |
141 | # make docs-build
142 |
143 | integration-tests:
144 | needs: [unit-tests]
145 | if: |
146 | success() && true
147 | runs-on: ubuntu-latest
148 | strategy:
149 | matrix:
150 | python-version:
151 | - "3.10"
152 | - "3.11"
153 | - "3.12"
154 |
155 | steps:
156 | - uses: actions/checkout@v5
157 |
158 | - name: Install uv and set the python version
159 | uses: astral-sh/setup-uv@v6
160 | with:
161 | python-version: ${{ matrix.python-version }}
162 |
163 | - name: Install the project
164 | run: uv sync --locked --dev
165 |
166 | - name: Run tests
167 | run: uv run pytest -vv
168 |
169 | minver-tests:
170 | needs: [unit-tests]
171 | if: |
172 | success() && true
173 | runs-on: ubuntu-latest
174 |
175 | steps:
176 | - uses: actions/checkout@v5
177 |
178 | - name: Install uv and set the python version
179 | uses: astral-sh/setup-uv@v6
180 | with:
181 | python-version: "3.10"
182 |
183 | - name: Install the project
184 | run: uv sync --dev --resolution lowest-direct
185 |
186 | - name: Run tests
187 | run: uv run pytest -vv
188 |
189 | distribution:
190 | runs-on: ubuntu-latest
191 | needs: [unit-tests, type-check, integration-tests]
192 | if: |
193 | always() &&
194 | needs.unit-tests.result == 'success' &&
195 | needs.type-check.result == 'success' &&
196 | needs.docs-build.result == 'success' &&
197 | (needs.integration-tests.result == 'success' || needs.integration-tests.result == 'skipped')
198 |
199 | steps:
200 | - uses: actions/checkout@v5
201 | - uses: actions/setup-python@v5
202 | with:
203 | python-version: '3.12'
204 | - name: Install package
205 | run: |
206 | python -m pip install --upgrade pip
207 | python -m pip install build twine
208 | - name: Build distribution
209 | run: |
210 | python -m build
211 | - name: Check wheels
212 | run: |
213 | cd dist || exit
214 | python -m pip install xarray_sentinel*.whl || exit
215 | python -m twine check --strict * || exit
216 | python -c "import xarray_sentinel" || exit
217 | cd ..
218 | - uses: actions/upload-artifact@v4
219 | with:
220 | name: distribution
221 | path: dist
222 |
223 | upload-to-pypi:
224 | runs-on: ubuntu-latest
225 | needs: distribution
226 | if: |
227 | always() && true &&
228 | needs.distribution.result == 'success' &&
229 | github.event_name == 'push' &&
230 | startsWith(github.ref, 'refs/tags')
231 | environment:
232 | name: pypi
233 | url: https://pypi.org/p/xarray-sentinel
234 | permissions:
235 | id-token: write # IMPORTANT: this permission is mandatory for trusted publish
236 |
237 | steps:
238 | - uses: actions/download-artifact@v5
239 | with:
240 | name: distribution
241 | path: dist
242 | - uses: pypa/gh-action-pypi-publish@v1.12.3
243 | with:
244 | verbose: true
245 |
--------------------------------------------------------------------------------
/tests/test_30_xarray_backends.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 |
3 | import pytest
4 | import xarray as xr
5 |
6 | from xarray_sentinel import esa_safe
7 |
8 | DATA_FOLDER = pathlib.Path(__file__).parent / "data"
9 |
10 |
11 | COMMON_ATTRIBUTES = {
12 | "family_name": "SENTINEL-1",
13 | "number": "B",
14 | "mode": "IW",
15 | "swaths": ["IW1", "IW2", "IW3"],
16 | "orbit_number": 26269,
17 | "relative_orbit_number": 168,
18 | "pass": "DESCENDING",
19 | "ascending_node_time": "2021-04-01T04:49:55.637823",
20 | "transmitter_receiver_polarisations": ["VV", "VH"],
21 | "product_type": "SLC",
22 | }
23 |
24 |
25 | SENTINEL1_SLC_PRODUCTS = [
26 | (
27 | DATA_FOLDER
28 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE",
29 | "IW1/VV",
30 | ),
31 | (
32 | DATA_FOLDER
33 | / "S1A_EW_SLC__1SDH_20210403T122536_20210403T122630_037286_046484_8152.SAFE",
34 | "EW1/HH",
35 | ),
36 | (
37 | DATA_FOLDER
38 | / "S1A_S3_SLC__1SDV_20210401T152855_20210401T152914_037258_04638E_6001.SAFE",
39 | "S3/VH",
40 | ),
41 | ]
42 |
43 | SENTINEL1_GRD_PRODUCTS = [
44 | (
45 | DATA_FOLDER
46 | / "S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8.SAFE",
47 | "IW/VV",
48 | ),
49 | ]
50 | SENTINEL1_PRODUCTS = SENTINEL1_SLC_PRODUCTS + SENTINEL1_GRD_PRODUCTS
51 |
52 |
53 | def test_open_dataset_root() -> None:
54 | product_path = (
55 | DATA_FOLDER
56 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
57 | )
58 | res = xr.open_dataset(product_path, engine="sentinel-1")
59 |
60 | assert isinstance(res, xr.Dataset)
61 | for attr_name in COMMON_ATTRIBUTES:
62 | assert attr_name in res.attrs
63 | assert res.attrs[attr_name] == COMMON_ATTRIBUTES[attr_name]
64 |
65 | res = xr.open_dataset(product_path)
66 |
67 | assert isinstance(res, xr.Dataset)
68 |
69 | product_path = product_path / "manifest.safe"
70 |
71 | res = xr.open_dataset(product_path, engine="sentinel-1")
72 |
73 | assert isinstance(res, xr.Dataset)
74 |
75 | res = xr.open_dataset(product_path)
76 |
77 | assert isinstance(res, xr.Dataset)
78 |
79 | with pytest.raises(ValueError):
80 | xr.open_dataset("")
81 |
82 |
83 | @pytest.mark.parametrize("product_path,swath_pol", SENTINEL1_SLC_PRODUCTS)
84 | def test_open_dataset_polarisation_slc(
85 | product_path: esa_safe.PathType,
86 | swath_pol: str,
87 | ) -> None:
88 | res = xr.open_dataset(product_path, engine="sentinel-1", group=swath_pol)
89 |
90 | assert isinstance(res, xr.Dataset)
91 | assert set(res.dims) == {"line", "pixel"} or set(res.dims) == {
92 | "azimuth_time",
93 | "slant_range_time",
94 | }
95 | assert set(res.coords) == {"azimuth_time", "slant_range_time", "line", "pixel"}
96 |
97 |
98 | @pytest.mark.parametrize("product_path,swath_pol", SENTINEL1_GRD_PRODUCTS)
99 | def test_open_dataset_polarisation_grd(
100 | product_path: esa_safe.PathType,
101 | swath_pol: str,
102 | ) -> None:
103 | res = xr.open_dataset(product_path, engine="sentinel-1", group=swath_pol)
104 |
105 | assert isinstance(res, xr.Dataset)
106 | assert set(res.dims) == {"line", "pixel"} or set(res.dims) == {
107 | "azimuth_time",
108 | "ground_range",
109 | }
110 | assert set(res.coords) == {"azimuth_time", "ground_range", "line", "pixel"}
111 |
112 |
113 | @pytest.mark.parametrize("product_path,swath_pol", SENTINEL1_PRODUCTS)
114 | def test_open_dataset_orbit(
115 | product_path: esa_safe.PathType,
116 | swath_pol: str,
117 | ) -> None:
118 | res = xr.open_dataset(product_path, engine="sentinel-1", group=f"{swath_pol}/orbit")
119 |
120 | assert isinstance(res, xr.Dataset)
121 | assert set(res.dims) == {"axis", "azimuth_time"}
122 | assert set(res.variables) == {"azimuth_time", "axis", "velocity", "position"}
123 |
124 |
125 | @pytest.mark.parametrize("product_path,swath_pol", SENTINEL1_PRODUCTS)
126 | def test_open_dataset_attitude(
127 | product_path: esa_safe.PathType,
128 | swath_pol: str,
129 | ) -> None:
130 | res = xr.open_dataset(
131 | product_path, engine="sentinel-1", group=f"{swath_pol}/attitude"
132 | )
133 |
134 | assert isinstance(res, xr.Dataset)
135 | assert set(res.dims) == {"azimuth_time"}
136 | expected = {
137 | "azimuth_time",
138 | "roll",
139 | "pitch",
140 | "yaw",
141 | "q0",
142 | "q1",
143 | "q2",
144 | "q3",
145 | "wx",
146 | "wy",
147 | "wz",
148 | }
149 | assert set(res.variables) == expected
150 |
151 |
152 | @pytest.mark.parametrize("product_path,swath_pol", SENTINEL1_PRODUCTS)
153 | def test_open_dataset_gcp(
154 | product_path: esa_safe.PathType,
155 | swath_pol: str,
156 | ) -> None:
157 | res = xr.open_dataset(product_path, engine="sentinel-1", group=f"{swath_pol}/gcp")
158 |
159 | assert isinstance(res, xr.Dataset)
160 | assert set(res.dims) == {"azimuth_time", "slant_range_time"}
161 |
162 |
163 | @pytest.mark.parametrize("product_path,swath_pol", SENTINEL1_PRODUCTS)
164 | def test_open_dataset_dc_estimate(
165 | product_path: esa_safe.PathType,
166 | swath_pol: str,
167 | ) -> None:
168 | res = xr.open_dataset(
169 | product_path, engine="sentinel-1", group=f"{swath_pol}/dc_estimate"
170 | )
171 |
172 | assert isinstance(res, xr.Dataset)
173 | assert set(res.dims) == {"azimuth_time", "degree"}
174 |
175 |
176 | def test_open_pol_dataset() -> None:
177 | product_path = (
178 | DATA_FOLDER
179 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
180 | )
181 | expected_variables = {
182 | "measurement",
183 | "line",
184 | "pixel",
185 | "slant_range_time",
186 | "azimuth_time",
187 | }
188 | res = xr.open_dataset(product_path, engine="sentinel-1", group="IW1/VV")
189 |
190 | assert isinstance(res, xr.Dataset)
191 | for attr_name in COMMON_ATTRIBUTES:
192 | assert attr_name in res.attrs
193 | assert res.attrs[attr_name] == COMMON_ATTRIBUTES[attr_name]
194 |
195 | assert set(res.dims) == {"line", "pixel"}
196 | assert set(res.variables) == expected_variables
197 |
198 |
199 | def test_burst_id_attribute() -> None:
200 | product_path = (
201 | DATA_FOLDER
202 | / "S1A_IW_SLC__1SDH_20220414T102209_20220414T102236_042768_051AA4_E677.SAFE"
203 | )
204 |
205 | res = xr.open_dataset(product_path, engine="sentinel-1", group="IW1/HH")
206 | assert "burst_ids" in res.attrs
207 | assert len(res.attrs["burst_ids"]) == res.attrs["number_of_bursts"]
208 |
209 | product_path = (
210 | DATA_FOLDER
211 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
212 | )
213 | res = xr.open_dataset(product_path, engine="sentinel-1", group="IW1/VV")
214 | assert "burst_ids" not in res.attrs
215 |
216 |
217 | def test_open_calibration_dataset() -> None:
218 | annotation_path = (
219 | DATA_FOLDER
220 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
221 | )
222 | res = xr.open_dataset(
223 | annotation_path, engine="sentinel-1", group="IW1/VV/calibration"
224 | )
225 |
226 | assert isinstance(res, xr.Dataset)
227 | assert set(res.dims) == {"line", "pixel"}
228 |
--------------------------------------------------------------------------------
/notebooks/fsspec.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "id": "76a45b2f",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "import xarray as xr\n",
11 | "\n",
12 | "import xarray_sentinel"
13 | ]
14 | },
15 | {
16 | "cell_type": "code",
17 | "execution_count": null,
18 | "id": "8e8bd886",
19 | "metadata": {},
20 | "outputs": [],
21 | "source": [
22 | "# rasterio 1.3 is needed for fsspec support to work\n",
23 | "# pip install -U --pre --no-deps --no-binary rasterio \"rasterio>=1.3a3\""
24 | ]
25 | },
26 | {
27 | "cell_type": "markdown",
28 | "id": "7d350b72",
29 | "metadata": {},
30 | "source": [
31 | "## local zip file"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": null,
37 | "id": "ec537b7f",
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "product_path = \"zip://*/manifest.safe::data/S1B_S6_GRDH_1SDV_20211216T115438_20211216T115501_030050_03968A_0F8A.zip\"\n",
42 | "ds = xarray_sentinel.open_sentinel1_dataset(product_path)\n",
43 | "ds"
44 | ]
45 | },
46 | {
47 | "cell_type": "code",
48 | "execution_count": null,
49 | "id": "2858e010",
50 | "metadata": {},
51 | "outputs": [],
52 | "source": [
53 | "ds = xarray_sentinel.open_sentinel1_dataset(product_path, group=\"S6/VH/orbit\")\n",
54 | "ds"
55 | ]
56 | },
57 | {
58 | "cell_type": "code",
59 | "execution_count": null,
60 | "id": "c1d5ec7a",
61 | "metadata": {
62 | "scrolled": false
63 | },
64 | "outputs": [],
65 | "source": [
66 | "ds = xarray_sentinel.open_sentinel1_dataset(product_path, group=\"S6/VH\")\n",
67 | "ds"
68 | ]
69 | },
70 | {
71 | "cell_type": "code",
72 | "execution_count": null,
73 | "id": "c3bc593b",
74 | "metadata": {
75 | "scrolled": false
76 | },
77 | "outputs": [],
78 | "source": [
79 | "_ = ds.measurement[2000:4000, 2000:4000].plot(vmax=100)"
80 | ]
81 | },
82 | {
83 | "cell_type": "markdown",
84 | "id": "0bd337b6",
85 | "metadata": {},
86 | "source": [
87 | "## remote fake file from github"
88 | ]
89 | },
90 | {
91 | "cell_type": "code",
92 | "execution_count": null,
93 | "id": "d260c490",
94 | "metadata": {},
95 | "outputs": [],
96 | "source": [
97 | "product_path = \"github://bopen:xarray-sentinel@/tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE\"\n",
98 | "ds = xarray_sentinel.open_sentinel1_dataset(product_path)\n",
99 | "ds"
100 | ]
101 | },
102 | {
103 | "cell_type": "code",
104 | "execution_count": null,
105 | "id": "5a316651",
106 | "metadata": {},
107 | "outputs": [],
108 | "source": [
109 | "ds = xarray_sentinel.open_sentinel1_dataset(product_path, group=\"IW1/VH/orbit\")\n",
110 | "ds"
111 | ]
112 | },
113 | {
114 | "cell_type": "code",
115 | "execution_count": null,
116 | "id": "07302996",
117 | "metadata": {},
118 | "outputs": [],
119 | "source": [
120 | "ds = xarray_sentinel.open_sentinel1_dataset(product_path, group=\"IW1/VH/2\")\n",
121 | "ds"
122 | ]
123 | },
124 | {
125 | "cell_type": "code",
126 | "execution_count": null,
127 | "id": "6bea7088",
128 | "metadata": {},
129 | "outputs": [],
130 | "source": [
131 | "abs(ds.measurement).mean().compute()"
132 | ]
133 | },
134 | {
135 | "cell_type": "markdown",
136 | "id": "e3ee52d6",
137 | "metadata": {},
138 | "source": [
139 | "## remote fake file from a zip on github"
140 | ]
141 | },
142 | {
143 | "cell_type": "code",
144 | "execution_count": null,
145 | "id": "cf2ea217",
146 | "metadata": {},
147 | "outputs": [],
148 | "source": [
149 | "product_path = \"zip://*/manifest.safe::github://bopen:xarray-sentinel@/tests/data/S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.zip\"\n",
150 | "ds = xarray_sentinel.open_sentinel1_dataset(product_path)\n",
151 | "ds"
152 | ]
153 | },
154 | {
155 | "cell_type": "code",
156 | "execution_count": null,
157 | "id": "634089c1",
158 | "metadata": {},
159 | "outputs": [],
160 | "source": [
161 | "ds = xarray_sentinel.open_sentinel1_dataset(product_path, group=\"IW1/VH/orbit\")\n",
162 | "ds"
163 | ]
164 | },
165 | {
166 | "cell_type": "code",
167 | "execution_count": null,
168 | "id": "3e656f38",
169 | "metadata": {},
170 | "outputs": [],
171 | "source": [
172 | "ds = xarray_sentinel.open_sentinel1_dataset(product_path, group=\"IW1/VH/2\")\n",
173 | "ds"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": null,
179 | "id": "58bc770e",
180 | "metadata": {},
181 | "outputs": [],
182 | "source": [
183 | "abs(ds.measurement).mean().compute()"
184 | ]
185 | },
186 | {
187 | "cell_type": "markdown",
188 | "id": "a498362e",
189 | "metadata": {},
190 | "source": [
191 | "## remote file on Microsoft Planetary Computer"
192 | ]
193 | },
194 | {
195 | "cell_type": "code",
196 | "execution_count": null,
197 | "id": "d8ab1d18",
198 | "metadata": {},
199 | "outputs": [],
200 | "source": [
201 | "import planetary_computer\n",
202 | "\n",
203 | "account_name = \"sentinel1euwest\"\n",
204 | "token = planetary_computer.sas.get_token(account_name, \"s1-grd\").token\n",
205 | "\n",
206 | "product_folder = (\n",
207 | " \"s1-grd/GRD/2022/1/10/IW/DV/\"\n",
208 | " \"S1A_IW_GRDH_1SDV_20220110T050922_20220110T050947_041394_04EBF7_A360\"\n",
209 | ")\n",
210 | "\n",
211 | "storage_options = {\"account_name\": \"sentinel1euwest\", \"sas_token\": token}\n",
212 | "product_path = f\"abfs://{product_folder}/manifest.safe\"\n",
213 | "\n",
214 | "ds = xr.open_dataset(\n",
215 | " product_path,\n",
216 | " engine=\"sentinel-1\",\n",
217 | " storage_options=storage_options,\n",
218 | ")\n",
219 | "ds"
220 | ]
221 | },
222 | {
223 | "cell_type": "code",
224 | "execution_count": null,
225 | "id": "97a42b81",
226 | "metadata": {},
227 | "outputs": [],
228 | "source": [
229 | "ds = xr.open_dataset(\n",
230 | " product_path,\n",
231 | " engine=\"sentinel-1\",\n",
232 | " storage_options=storage_options,\n",
233 | " group=\"IW/VV\",\n",
234 | " chunks=2048,\n",
235 | " override_product_files=\"{dirname}/{swath}-{polarization}{ext}\",\n",
236 | ")\n",
237 | "ds"
238 | ]
239 | },
240 | {
241 | "cell_type": "code",
242 | "execution_count": null,
243 | "id": "c70f9b0b",
244 | "metadata": {},
245 | "outputs": [],
246 | "source": [
247 | "_ = ds.measurement[:2048, 2048:4096].plot(vmax=300)"
248 | ]
249 | },
250 | {
251 | "cell_type": "code",
252 | "execution_count": null,
253 | "id": "54a28d57",
254 | "metadata": {},
255 | "outputs": [],
256 | "source": []
257 | }
258 | ],
259 | "metadata": {
260 | "kernelspec": {
261 | "display_name": "Python 3 (ipykernel)",
262 | "language": "python",
263 | "name": "python3"
264 | },
265 | "language_info": {
266 | "codemirror_mode": {
267 | "name": "ipython",
268 | "version": 3
269 | },
270 | "file_extension": ".py",
271 | "mimetype": "text/x-python",
272 | "name": "python",
273 | "nbconvert_exporter": "python",
274 | "pygments_lexer": "ipython3",
275 | "version": "3.10.4"
276 | }
277 | },
278 | "nbformat": 4,
279 | "nbformat_minor": 5
280 | }
281 |
--------------------------------------------------------------------------------
/xarray_sentinel/esa_safe.py:
--------------------------------------------------------------------------------
1 | import functools
2 | import importlib.resources
3 | import os
4 | import re
5 | from typing import Any, Mapping, TextIO, Union
6 | from xml.etree import ElementTree
7 |
8 | import xmlschema
9 |
10 | PathType = Union[str, "os.PathLike[str]"]
11 | PathOrFileType = Union[PathType, TextIO]
12 |
13 |
14 | SENTINEL1_NAMESPACES = {
15 | "safe": "http://www.esa.int/safe/sentinel-1.0",
16 | "s1": "http://www.esa.int/safe/sentinel-1.0/sentinel-1",
17 | "s1sarl1": "http://www.esa.int/safe/sentinel-1.0/sentinel-1/sar/level-1",
18 | "gml": "http://www.opengis.net/gml",
19 | }
20 |
21 | SENTINEL1_FOLDER = importlib.resources.files("xarray_sentinel") / "resources/sentinel1"
22 | SENTINEL1_SCHEMAS = {
23 | "manifest": SENTINEL1_FOLDER / "my-xfdu.xsd",
24 | "annotation": SENTINEL1_FOLDER / "s1-level-1-product.xsd",
25 | "calibration": SENTINEL1_FOLDER / "s1-level-1-calibration.xsd",
26 | "noise": SENTINEL1_FOLDER / "s1-level-1-noise.xsd",
27 | "aux_orbit": SENTINEL1_FOLDER / "my-schema_orb.xsd",
28 | }
29 |
30 | SENTINEL2_NAMESPACES = {
31 | "safe": "http://www.esa.int/safe/sentinel/1.1",
32 | }
33 |
34 |
35 | @functools.lru_cache
36 | def cached_sentinel1_schemas(schema_type: str) -> xmlschema.XMLSchema:
37 | return xmlschema.XMLSchema(str(SENTINEL1_SCHEMAS[schema_type]))
38 |
39 |
40 | def parse_tag(
41 | xml_path: PathOrFileType,
42 | query: str,
43 | schema_type: str = "annotation",
44 | validation: str = "skip",
45 | ) -> dict[str, Any]:
46 | schema = cached_sentinel1_schemas(schema_type)
47 | if hasattr(xml_path, "seek"):
48 | xml_path.seek(0)
49 | xml_tree = ElementTree.parse(xml_path)
50 | tag_dict: Any = schema.decode(xml_tree, query, validation=validation)
51 | assert isinstance(tag_dict, dict), f"{type(tag_dict)} is not dict"
52 | return tag_dict
53 |
54 |
55 | def parse_tag_as_list(
56 | xml_path: PathOrFileType,
57 | query: str,
58 | schema_type: str = "annotation",
59 | validation: str = "skip",
60 | ) -> list[dict[str, Any]]:
61 | schema = cached_sentinel1_schemas(schema_type)
62 | xml_tree = ElementTree.parse(xml_path)
63 | tag: Any = schema.decode(xml_tree, query, validation=validation)
64 | if tag is None:
65 | tag = []
66 | elif isinstance(tag, dict):
67 | tag = [tag]
68 | tag_list: list[dict[str, Any]] = tag
69 | assert isinstance(tag_list, list), f"{type(tag_list)} is not list"
70 | return tag_list
71 |
72 |
73 | def findtext(
74 | tree: ElementTree.Element,
75 | query: str,
76 | namespaces: dict[str, str] = SENTINEL1_NAMESPACES,
77 | ) -> str:
78 | value = tree.findtext(query, namespaces=namespaces)
79 | if value is None:
80 | raise ValueError(f"{query=} returned None")
81 | return value
82 |
83 |
84 | def findall(
85 | tree: ElementTree.Element,
86 | query: str,
87 | namespaces: dict[str, str] = SENTINEL1_NAMESPACES,
88 | ) -> list[str]:
89 | tags = tree.findall(query, namespaces=namespaces)
90 | values: list[str] = []
91 | for tag in tags:
92 | if tag.text is None:
93 | raise ValueError(f"{query=} returned None")
94 | values.append(tag.text)
95 | return values
96 |
97 |
98 | def parse_annotation_filename(name: str) -> tuple[str, str, str, str]:
99 | match = re.match(
100 | r"([a-z-]*)s1[abcd]-([^-]*)-[^-]*-([^-]*)-([\dt]*)-", os.path.basename(name)
101 | )
102 | if match is None:
103 | raise ValueError(f"cannot parse name {name!r}")
104 | return tuple(match.groups()) # type: ignore
105 |
106 |
107 | @functools.lru_cache
108 | def parse_manifest_sentinel1(
109 | manifest_path: PathOrFileType,
110 | ) -> tuple[dict[str, Any], dict[str, tuple[str, str, str, str, str]]]:
111 | # We use ElementTree because we didn't find a XSD definition for the manifest
112 | manifest = ElementTree.parse(manifest_path).getroot()
113 |
114 | family_name = findtext(manifest, ".//safe:platform/safe:familyName")
115 | if family_name != "SENTINEL-1":
116 | raise ValueError(f"{family_name=} not supported")
117 |
118 | number = findtext(manifest, ".//safe:platform/safe:number")
119 | mode = findtext(manifest, ".//s1sarl1:instrumentMode/s1sarl1:mode")
120 | swaths = findall(manifest, ".//s1sarl1:instrumentMode/s1sarl1:swath")
121 |
122 | orbit_number = findall(manifest, ".//safe:orbitNumber")
123 | if len(orbit_number) != 2 or orbit_number[0] != orbit_number[1]:
124 | raise ValueError(f"{orbit_number=} not supported")
125 |
126 | relative_orbit_number = findall(manifest, ".//safe:relativeOrbitNumber")
127 | if (
128 | len(relative_orbit_number) != 2
129 | or relative_orbit_number[0] != relative_orbit_number[1]
130 | ):
131 | raise ValueError(f"{relative_orbit_number=} not supported")
132 |
133 | orbit_pass = findtext(manifest, ".//s1:pass")
134 | if orbit_pass not in {"ASCENDING", "DESCENDING"}:
135 | raise ValueError(f"pass={orbit_pass} not supported")
136 |
137 | ascending_node_time = findtext(manifest, ".//s1:ascendingNodeTime")
138 |
139 | mission_data_take_id = findtext(manifest, ".//s1sarl1:missionDataTakeID")
140 | transmitter_receiver_polarisations = findall(
141 | manifest, ".//s1sarl1:transmitterReceiverPolarisation"
142 | )
143 | product_type = findtext(manifest, ".//s1sarl1:productType")
144 |
145 | start_time = findtext(manifest, ".//safe:startTime")
146 | stop_time = findtext(manifest, ".//safe:stopTime")
147 |
148 | attributes = {
149 | "family_name": family_name,
150 | "number": number,
151 | "mode": mode,
152 | "swaths": swaths,
153 | "orbit_number": int(orbit_number[0]),
154 | "relative_orbit_number": int(relative_orbit_number[0]),
155 | "pass": orbit_pass,
156 | "ascending_node_time": ascending_node_time,
157 | "mission_data_take_id": int(mission_data_take_id),
158 | "transmitter_receiver_polarisations": transmitter_receiver_polarisations,
159 | "product_type": product_type,
160 | "start_time": start_time,
161 | "stop_time": stop_time,
162 | }
163 |
164 | files = {}
165 |
166 | for file_tag in manifest.findall(".//dataObjectSection/dataObject"):
167 | location_tag = file_tag.find(".//fileLocation")
168 | if location_tag is not None:
169 | file_href = location_tag.attrib["href"]
170 | try:
171 | description = parse_annotation_filename(os.path.basename(file_href))
172 | except ValueError:
173 | continue
174 | file_type = file_tag.attrib["repID"]
175 | files[file_href] = (file_type,) + description
176 |
177 | return attributes, files
178 |
179 |
180 | # DEPRECATED
181 | def make_stac_item(attrs: Mapping[str, Any]) -> dict[str, Any]:
182 | assert attrs["family_name"] == "SENTINEL-1"
183 |
184 | stac_item = {
185 | "constellation": "sentinel-1",
186 | "platform": "sentinel-1" + attrs["number"].lower(),
187 | "instrument": ["c-sar"],
188 | "sat:orbit_state": attrs["pass"].lower(),
189 | "sat:absolute_orbit": attrs["orbit_number"],
190 | "sat:relative_orbit": attrs["relative_orbit_number"],
191 | "sat:anx_datetime": attrs["ascending_node_time"] + "Z",
192 | "sar:instrument_mode": attrs["mode"],
193 | "sar:frequency_band": "C",
194 | "sar:polarizations": attrs["transmitter_receiver_polarisations"],
195 | "sar:product_type": attrs["product_type"],
196 | "sar:observation_direction": "right",
197 | }
198 | return stac_item
199 |
--------------------------------------------------------------------------------
/xarray_sentinel/resources/sentinel1/s1-level-1-noise.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | Annotation record for range noise vectors.
8 |
9 |
10 |
11 |
12 | Zero Doppler azimuth time at which noise vector applies [UTC].
13 |
14 |
15 |
16 |
17 | Image line at which the noise vector applies.
18 |
19 |
20 |
21 |
22 | Image pixel at which the noise vector applies. This array contains the count attribute number of integer values (i.e. one value per point in the noise vector), separated by spaces. The maximum length of this array will be one value for every pixel in an image line, however in general the vectors will be subsampled.
23 |
24 |
25 |
26 |
27 | Range thermal noise correction vector power values. This array contains the count attribute number of floating point values separated by spaces.
28 |
29 |
30 |
31 |
32 |
33 |
34 | List of noise range vector annotation records.
35 |
36 |
37 |
38 |
39 | Noise range vector. This record contains the thermal noise estimation annotations which can be used to remove thermal noise from the image. With a minimum noise vector update rate of 1s and a maximum product length of 25 minutes, the maximum size of this list is 1500 elements. The azimuth spacing used will be different for different modes and product types.
40 |
41 |
42 |
43 |
44 |
45 | Number of noiseRangeVector records within the list.
46 |
47 |
48 |
49 |
50 |
51 | Annotation record for azimuth noise vectors.
52 |
53 |
54 |
55 |
56 | Swath to which the noise vector applies.
57 |
58 |
59 |
60 |
61 | The first line at which this annotation applies.
62 |
63 |
64 |
65 |
66 | The first sample at which this annotation applies.
67 |
68 |
69 |
70 |
71 | The last line at which this annotation applies.
72 |
73 |
74 |
75 |
76 | The last sample at which this annotation applies.
77 |
78 |
79 |
80 |
81 | Image line at which the noise vector applies. This array contains the count attribute number of integer values (i.e. one value per point in the noise vector), separated by spaces. The maximum length of this array will be one value for every line in an image pixel, however in general the vectors will be subsampled.
82 |
83 |
84 |
85 |
86 | Azimuth thermal noise correction vector power values. This array contains the count attribute number of floating point values separated by spaces.
87 |
88 |
89 |
90 |
91 |
92 |
93 | List of noise azimuth vector annotation records.
94 |
95 |
96 |
97 |
98 | Noise azimuth vector. This record contains the thermal noise estimation annotations which can be used to remove thermal noise from the image.
99 |
100 |
101 |
102 |
103 |
104 | Number of noiseAzimuthVector records within the list.
105 |
106 |
107 |
108 |
109 |
110 | Annotation record for Sentinel-1 level 1 noise product annotations.
111 |
112 |
113 |
114 |
115 | ADS header data set record. This DSR contains information that applies to the entire data set.
116 |
117 |
118 |
119 |
120 | Range noise vector list. This element is a list of noiseRangeVector records that contain the range thermal noise estimation for the image MDS. The list contains an entry for each update made along azimuth.
121 |
122 |
123 |
124 |
125 | Azimuth noise vector list. This annotation divides the image in blocks providing a list of azimuth noise vector records that contain the thermal noise estimation for the block. The block belongs to a (sub-)swath (i.e. it can't cross by design two swaths) and it is delimited by firstAzimuthLine, lastAzimuthLine, firstRangeSample, lastRangeSample.
126 |
127 |
128 |
129 |
130 |
131 |
132 | Sentinel-1 level 1 thermal noise level product annotations.
133 |
134 |
135 |
136 |
--------------------------------------------------------------------------------
/tests/test_10_esa_safe.py:
--------------------------------------------------------------------------------
1 | import pathlib
2 | from typing import Any, Dict
3 | from xml.etree import ElementTree
4 |
5 | import pytest
6 | import xmlschema
7 |
8 | from xarray_sentinel import esa_safe
9 |
10 | DATA_FOLDER = pathlib.Path(__file__).parent / "data"
11 |
12 | SENTINEL1_ATTRIBUTES = {
13 | "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4": {
14 | "ascending_node_time": "2021-04-01T04:49:55.637823",
15 | "family_name": "SENTINEL-1",
16 | "mode": "IW",
17 | "number": "B",
18 | "orbit_number": 26269,
19 | "pass": "DESCENDING",
20 | "product_type": "SLC",
21 | "mission_data_take_id": 205463,
22 | "relative_orbit_number": 168,
23 | "start_time": "2021-04-01T05:26:22.396989",
24 | "stop_time": "2021-04-01T05:26:50.325833",
25 | "swaths": ["IW1", "IW2", "IW3"],
26 | "transmitter_receiver_polarisations": ["VV", "VH"],
27 | },
28 | "S1A_S6_SLC__1SDV_20210402T115512_20210402T115535_037271_046407_39FD": {
29 | "ascending_node_time": "2021-04-02T11:17:22.132050",
30 | "family_name": "SENTINEL-1",
31 | "mode": "SM",
32 | "number": "A",
33 | "orbit_number": 37271,
34 | "pass": "DESCENDING",
35 | "product_type": "SLC",
36 | "mission_data_take_id": 287751,
37 | "relative_orbit_number": 99,
38 | "start_time": "2021-04-02T11:55:12.030410",
39 | "stop_time": "2021-04-02T11:55:35.706705",
40 | "swaths": ["S6"],
41 | "transmitter_receiver_polarisations": ["VV", "VH"],
42 | },
43 | "S1A_S3_SLC__1SDV_20210401T152855_20210401T152914_037258_04638E_6001": {
44 | "ascending_node_time": "2021-04-01T13:53:42.874198",
45 | "family_name": "SENTINEL-1",
46 | "mode": "SM",
47 | "number": "A",
48 | "orbit_number": 37258,
49 | "pass": "ASCENDING",
50 | "product_type": "SLC",
51 | "mission_data_take_id": 287630,
52 | "relative_orbit_number": 86,
53 | "start_time": "2021-04-01T15:28:55.111501",
54 | "stop_time": "2021-04-01T15:29:14.277650",
55 | "swaths": ["S3"],
56 | "transmitter_receiver_polarisations": ["VV", "VH"],
57 | },
58 | "S1A_EW_SLC__1SDH_20210403T122536_20210403T122630_037286_046484_8152": {
59 | "ascending_node_time": "2021-04-03T11:58:30.792178",
60 | "family_name": "SENTINEL-1",
61 | "mode": "EW",
62 | "number": "A",
63 | "orbit_number": 37286,
64 | "pass": "DESCENDING",
65 | "product_type": "SLC",
66 | "mission_data_take_id": 287876,
67 | "relative_orbit_number": 114,
68 | "start_time": "2021-04-03T12:25:36.505937",
69 | "stop_time": "2021-04-03T12:26:30.902216",
70 | "swaths": ["EW1", "EW2", "EW3", "EW4", "EW5"],
71 | "transmitter_receiver_polarisations": ["HH", "HV"],
72 | },
73 | "S1B_WV_SLC__1SSV_20210403T083025_20210403T084452_026300_032390_D542": {
74 | "ascending_node_time": "2021-04-03T07:50:57.437371",
75 | "family_name": "SENTINEL-1",
76 | "mode": "WV",
77 | "number": "B",
78 | "orbit_number": 26300,
79 | "pass": "DESCENDING",
80 | "product_type": "SLC",
81 | "mission_data_take_id": 205712,
82 | "relative_orbit_number": 24,
83 | "start_time": "2021-04-03T08:30:25.749829",
84 | "stop_time": "2021-04-03T08:44:52.841818",
85 | "swaths": ["WV1", "WV2"],
86 | "transmitter_receiver_polarisations": ["VV"],
87 | },
88 | "S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8": {
89 | "ascending_node_time": "2021-04-01T04:49:55.637823",
90 | "family_name": "SENTINEL-1",
91 | "mode": "IW",
92 | "number": "B",
93 | "orbit_number": 26269,
94 | "pass": "DESCENDING",
95 | "product_type": "GRD",
96 | "mission_data_take_id": 205463,
97 | "relative_orbit_number": 168,
98 | "start_time": "2021-04-01T05:26:23.794457",
99 | "stop_time": "2021-04-01T05:26:48.793373",
100 | "swaths": ["IW"],
101 | "transmitter_receiver_polarisations": ["VV", "VH"],
102 | },
103 | }
104 |
105 | ANNOTATION_PATH = str(
106 | DATA_FOLDER
107 | / "S1B_IW_SLC__1SDV_20210401T052622_20210401T052650_026269_032297_EFA4.SAFE"
108 | / "annotation"
109 | / "s1b-iw1-slc-vv-20210401t052624-20210401t052649-026269-032297-004.xml"
110 | )
111 |
112 |
113 | def test_cached_sentinel1_schemas() -> None:
114 | res = esa_safe.cached_sentinel1_schemas("annotation")
115 |
116 | assert isinstance(res, xmlschema.XMLSchema)
117 |
118 |
119 | def test_parse_tag() -> None:
120 | expected = {
121 | "timelinessCategory",
122 | "platformHeading",
123 | "radarFrequency",
124 | "rangeSamplingRate",
125 | "projection",
126 | "pass",
127 | "azimuthSteeringRate",
128 | }
129 |
130 | res = esa_safe.parse_tag(ANNOTATION_PATH, "//productInformation")
131 |
132 | assert isinstance(res, dict)
133 | assert set(res) == expected
134 |
135 |
136 | def test_parse_tag_as_list() -> None:
137 | expected = {
138 | "azimuthTime",
139 | "firstValidSample",
140 | "sensingTime",
141 | "lastValidSample",
142 | "byteOffset",
143 | "azimuthAnxTime",
144 | }
145 |
146 | res = esa_safe.parse_tag_as_list(ANNOTATION_PATH, "//burst")
147 |
148 | assert isinstance(res, list)
149 | assert set(res[0]) == expected
150 |
151 | # XPath to a single element
152 | res = esa_safe.parse_tag_as_list(ANNOTATION_PATH, "//burst[1]")
153 |
154 | assert isinstance(res, list)
155 | assert set(res[0]) == expected
156 |
157 | # XPath to a non existent element
158 | res = esa_safe.parse_tag_as_list(ANNOTATION_PATH, "//dummy")
159 |
160 | assert isinstance(res, list)
161 | assert res == []
162 |
163 |
164 | def test_parse_annotation_filename() -> None:
165 | res = esa_safe.parse_annotation_filename(
166 | "s1b-iw1-slc-vv-20210401t052624-20210401t052649-026269-032297-004.xml"
167 | )
168 |
169 | assert res == ("", "iw1", "vv", "20210401t052624")
170 |
171 | with pytest.raises(ValueError):
172 | esa_safe.parse_annotation_filename("")
173 |
174 |
175 | def test_findtext() -> None:
176 | tree = ElementTree.fromstring("text")
177 |
178 | res = esa_safe.findtext(tree, ".//child")
179 |
180 | assert res == "text"
181 |
182 | with pytest.raises(ValueError):
183 | esa_safe.findtext(tree, ".//dummy")
184 |
185 |
186 | def test_findall() -> None:
187 | tree = ElementTree.fromstring("text")
188 |
189 | res = esa_safe.findall(tree, ".//c1")
190 |
191 | assert res == ["text"]
192 |
193 | with pytest.raises(ValueError):
194 | esa_safe.findall(tree, ".//c2")
195 |
196 |
197 | @pytest.mark.parametrize("product_id,expected", SENTINEL1_ATTRIBUTES.items())
198 | def test_parse_manifest_sentinel1(product_id: str, expected: Dict[str, Any]) -> None:
199 | manifest_path = DATA_FOLDER / (product_id + ".SAFE") / "manifest.safe"
200 |
201 | res_attrs, res_files = esa_safe.parse_manifest_sentinel1(manifest_path)
202 |
203 | assert res_attrs == expected
204 |
205 |
206 | def test_make_stac_item() -> None:
207 | attrs = SENTINEL1_ATTRIBUTES[
208 | "S1B_IW_GRDH_1SDV_20210401T052623_20210401T052648_026269_032297_ECC8"
209 | ]
210 | expected = {
211 | "constellation": "sentinel-1",
212 | "instrument": ["c-sar"],
213 | "platform": "sentinel-1b",
214 | "sat:orbit_state": "descending",
215 | "sat:absolute_orbit": 26269,
216 | "sat:relative_orbit": 168,
217 | "sat:anx_datetime": "2021-04-01T04:49:55.637823Z",
218 | "sar:instrument_mode": "IW",
219 | "sar:frequency_band": "C",
220 | "sar:polarizations": ["VV", "VH"],
221 | "sar:product_type": "GRD",
222 | "sar:observation_direction": "right",
223 | }
224 |
225 | assert esa_safe.make_stac_item(attrs) == expected
226 |
--------------------------------------------------------------------------------
/notebooks/Sentinel-1_SLC_IW.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "id": "62bc62b4",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "%load_ext autoreload\n",
11 | "%autoreload 2\n",
12 | "\n",
13 | "%matplotlib inline\n",
14 | "%config InlineBackend.figure_format = 'retina'\n",
15 | "\n",
16 | "import matplotlib.pyplot as plt\n",
17 | "\n",
18 | "plt.rcParams[\"figure.figsize\"] = (12, 8)\n",
19 | "plt.rcParams[\"font.size\"] = 12"
20 | ]
21 | },
22 | {
23 | "cell_type": "code",
24 | "execution_count": null,
25 | "id": "1e5120fa",
26 | "metadata": {},
27 | "outputs": [],
28 | "source": [
29 | "import xarray as xr\n",
30 | "\n",
31 | "import xarray_sentinel"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": null,
37 | "id": "7776d0c7",
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "# uncomment download the data\n",
42 | "#! DHUS_USER= DHUS_PASSWORD= ./download_data.sh"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": null,
48 | "id": "3b066268",
49 | "metadata": {},
50 | "outputs": [],
51 | "source": [
52 | "product_path = (\n",
53 | " \"data/S1B_IW_SLC__1SDV_20211223T051121_20211223T051148_030148_039993_BA4B.SAFE\"\n",
54 | ")\n",
55 | "swath_group = \"IW3\"\n",
56 | "swath_polarisation_group = \"IW3/VV\"\n",
57 | "measurement_group = \"IW3/VV/4\"\n",
58 | "measurement_block_slices = (slice(None), slice(12000, 15000))\n",
59 | "digital_number_max = 250\n",
60 | "\n",
61 | "!ls -d {product_path}"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": null,
67 | "id": "f42e4c16",
68 | "metadata": {},
69 | "outputs": [],
70 | "source": [
71 | "xr.open_dataset(product_path, engine=\"sentinel-1\")"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": null,
77 | "id": "8e8511e2",
78 | "metadata": {},
79 | "outputs": [],
80 | "source": [
81 | "xr.open_dataset(product_path, engine=\"sentinel-1\", group=swath_group)"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": null,
87 | "id": "d5774952",
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "xr.open_dataset(product_path, engine=\"sentinel-1\", group=swath_polarisation_group)"
92 | ]
93 | },
94 | {
95 | "cell_type": "code",
96 | "execution_count": null,
97 | "id": "2c39574d",
98 | "metadata": {},
99 | "outputs": [],
100 | "source": [
101 | "measurement = xr.open_dataset(\n",
102 | " product_path, engine=\"sentinel-1\", group=measurement_group\n",
103 | ")\n",
104 | "measurement"
105 | ]
106 | },
107 | {
108 | "cell_type": "code",
109 | "execution_count": null,
110 | "id": "ca1eea3a",
111 | "metadata": {},
112 | "outputs": [],
113 | "source": [
114 | "measurement_block = measurement.measurement[measurement_block_slices]\n",
115 | "measurement_block"
116 | ]
117 | },
118 | {
119 | "cell_type": "code",
120 | "execution_count": null,
121 | "id": "afae99bf",
122 | "metadata": {
123 | "scrolled": false
124 | },
125 | "outputs": [],
126 | "source": [
127 | "_ = abs(measurement_block).plot(y=\"azimuth_time\", vmax=digital_number_max)"
128 | ]
129 | },
130 | {
131 | "cell_type": "code",
132 | "execution_count": null,
133 | "id": "3d231b4a",
134 | "metadata": {},
135 | "outputs": [],
136 | "source": [
137 | "gcp = xr.open_dataset(\n",
138 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/gcp\"\n",
139 | ")\n",
140 | "gcp"
141 | ]
142 | },
143 | {
144 | "cell_type": "code",
145 | "execution_count": null,
146 | "id": "b500c2c3",
147 | "metadata": {},
148 | "outputs": [],
149 | "source": [
150 | "_ = gcp.height.plot(y=\"azimuth_time\")"
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": null,
156 | "id": "ce6541ea",
157 | "metadata": {},
158 | "outputs": [],
159 | "source": [
160 | "_ = gcp.plot.scatter(x=\"longitude\", y=\"latitude\", hue=\"height\")"
161 | ]
162 | },
163 | {
164 | "cell_type": "code",
165 | "execution_count": null,
166 | "id": "f27e0e78",
167 | "metadata": {},
168 | "outputs": [],
169 | "source": [
170 | "orbit = xr.open_dataset(\n",
171 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/orbit\"\n",
172 | ")\n",
173 | "orbit"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": null,
179 | "id": "38904b43",
180 | "metadata": {},
181 | "outputs": [],
182 | "source": [
183 | "orbit.plot.scatter(y=\"azimuth_time\", x=\"position\", hue=\"velocity\")"
184 | ]
185 | },
186 | {
187 | "cell_type": "code",
188 | "execution_count": null,
189 | "id": "97430f6a",
190 | "metadata": {},
191 | "outputs": [],
192 | "source": [
193 | "calibration = xr.open_dataset(\n",
194 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/calibration\"\n",
195 | ")\n",
196 | "calibration"
197 | ]
198 | },
199 | {
200 | "cell_type": "code",
201 | "execution_count": null,
202 | "id": "79e6022b",
203 | "metadata": {},
204 | "outputs": [],
205 | "source": [
206 | "# betaNought and dn calibration are typically constat\n",
207 | "print(calibration.betaNought.mean().item(), \"+-\", calibration.betaNought.std().item())\n",
208 | "print(calibration.dn.mean().item(), \"+-\", calibration.dn.std().item())"
209 | ]
210 | },
211 | {
212 | "cell_type": "code",
213 | "execution_count": null,
214 | "id": "1337f73a",
215 | "metadata": {},
216 | "outputs": [],
217 | "source": [
218 | "_ = calibration.sigmaNought.plot(x=\"pixel\")"
219 | ]
220 | },
221 | {
222 | "cell_type": "code",
223 | "execution_count": null,
224 | "id": "2e056095",
225 | "metadata": {},
226 | "outputs": [],
227 | "source": [
228 | "_ = calibration.gamma.plot(x=\"pixel\")"
229 | ]
230 | },
231 | {
232 | "cell_type": "code",
233 | "execution_count": null,
234 | "id": "c9e54759",
235 | "metadata": {},
236 | "outputs": [],
237 | "source": [
238 | "betaNought_block = xarray_sentinel.calibrate_amplitude(\n",
239 | " measurement_block, calibration.betaNought\n",
240 | ")\n",
241 | "betaNought_block"
242 | ]
243 | },
244 | {
245 | "cell_type": "code",
246 | "execution_count": null,
247 | "id": "27f3b95c",
248 | "metadata": {},
249 | "outputs": [],
250 | "source": [
251 | "_ = abs(betaNought_block).plot(y=\"azimuth_time\", vmax=1)"
252 | ]
253 | },
254 | {
255 | "cell_type": "code",
256 | "execution_count": null,
257 | "id": "41087cca",
258 | "metadata": {},
259 | "outputs": [],
260 | "source": [
261 | "betaNought_block_db = xarray_sentinel.calibrate_intensity(\n",
262 | " measurement_block,\n",
263 | " calibration.betaNought,\n",
264 | " as_db=True,\n",
265 | ")\n",
266 | "betaNought_block_db"
267 | ]
268 | },
269 | {
270 | "cell_type": "code",
271 | "execution_count": null,
272 | "id": "271e3908",
273 | "metadata": {},
274 | "outputs": [],
275 | "source": [
276 | "_ = betaNought_block_db.plot(y=\"azimuth_time\", vmin=-20, vmax=5)"
277 | ]
278 | },
279 | {
280 | "cell_type": "code",
281 | "execution_count": null,
282 | "id": "2aae5912",
283 | "metadata": {},
284 | "outputs": [],
285 | "source": [
286 | "noise_range = xr.open_dataset(\n",
287 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/noise_range\"\n",
288 | ")\n",
289 | "noise_range"
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": null,
295 | "id": "c357a9de",
296 | "metadata": {},
297 | "outputs": [],
298 | "source": [
299 | "_ = noise_range.noiseRangeLut.plot(x=\"pixel\")"
300 | ]
301 | },
302 | {
303 | "cell_type": "code",
304 | "execution_count": null,
305 | "id": "5ebe2a71",
306 | "metadata": {},
307 | "outputs": [],
308 | "source": [
309 | "noise_azimuth = xr.open_dataset(\n",
310 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/noise_azimuth\"\n",
311 | ")\n",
312 | "noise_azimuth"
313 | ]
314 | },
315 | {
316 | "cell_type": "code",
317 | "execution_count": null,
318 | "id": "0a29b553",
319 | "metadata": {},
320 | "outputs": [],
321 | "source": [
322 | "_ = noise_azimuth.plot.scatter(x=\"line\", y=\"noiseAzimuthLut\")"
323 | ]
324 | },
325 | {
326 | "cell_type": "code",
327 | "execution_count": null,
328 | "id": "77578300",
329 | "metadata": {},
330 | "outputs": [],
331 | "source": [
332 | "xr.open_dataset(\n",
333 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/attitude\"\n",
334 | ")"
335 | ]
336 | },
337 | {
338 | "cell_type": "code",
339 | "execution_count": null,
340 | "id": "f98e156a",
341 | "metadata": {},
342 | "outputs": [],
343 | "source": [
344 | "xr.open_dataset(\n",
345 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/dc_estimate\"\n",
346 | ")"
347 | ]
348 | },
349 | {
350 | "cell_type": "code",
351 | "execution_count": null,
352 | "id": "910c5ab9",
353 | "metadata": {},
354 | "outputs": [],
355 | "source": [
356 | "xr.open_dataset(\n",
357 | " product_path,\n",
358 | " engine=\"sentinel-1\",\n",
359 | " group=f\"{swath_polarisation_group}/azimuth_fm_rate\",\n",
360 | ")"
361 | ]
362 | },
363 | {
364 | "cell_type": "code",
365 | "execution_count": null,
366 | "id": "204a429d",
367 | "metadata": {},
368 | "outputs": [],
369 | "source": []
370 | }
371 | ],
372 | "metadata": {
373 | "kernelspec": {
374 | "display_name": "Python 3",
375 | "language": "python",
376 | "name": "python3"
377 | },
378 | "language_info": {
379 | "codemirror_mode": {
380 | "name": "ipython",
381 | "version": 3
382 | },
383 | "file_extension": ".py",
384 | "mimetype": "text/x-python",
385 | "name": "python",
386 | "nbconvert_exporter": "python",
387 | "pygments_lexer": "ipython3",
388 | "version": "3.9.10"
389 | }
390 | },
391 | "nbformat": 4,
392 | "nbformat_minor": 5
393 | }
394 |
--------------------------------------------------------------------------------
/notebooks/Sentinel-1_SLC_SM.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "id": "62bc62b4",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "%load_ext autoreload\n",
11 | "%autoreload 2\n",
12 | "\n",
13 | "%matplotlib inline\n",
14 | "%config InlineBackend.figure_format = 'retina'\n",
15 | "\n",
16 | "import matplotlib.pyplot as plt\n",
17 | "\n",
18 | "plt.rcParams[\"figure.figsize\"] = (12, 8)\n",
19 | "plt.rcParams[\"font.size\"] = 12"
20 | ]
21 | },
22 | {
23 | "cell_type": "code",
24 | "execution_count": null,
25 | "id": "1e5120fa",
26 | "metadata": {},
27 | "outputs": [],
28 | "source": [
29 | "import xarray as xr\n",
30 | "\n",
31 | "import xarray_sentinel"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": null,
37 | "id": "7776d0c7",
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "# uncomment download the data\n",
42 | "#! DHUS_USER= DHUS_PASSWORD= ./download_data.sh"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": null,
48 | "id": "3b066268",
49 | "metadata": {},
50 | "outputs": [],
51 | "source": [
52 | "product_path = (\n",
53 | " \"data/S1B_S6_SLC__1SDV_20211216T115438_20211216T115501_030050_03968A_4DCB.SAFE\"\n",
54 | ")\n",
55 | "swath_group = \"S6\"\n",
56 | "swath_polarisation_group = \"S6/VV\"\n",
57 | "measurement_group = \"S6/VV\"\n",
58 | "measurement_block_slices = (slice(13312, 15360), slice(None, 2048))\n",
59 | "digital_number_max = 75\n",
60 | "\n",
61 | "!ls -d {product_path}"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": null,
67 | "id": "f42e4c16",
68 | "metadata": {},
69 | "outputs": [],
70 | "source": [
71 | "xr.open_dataset(product_path, engine=\"sentinel-1\")"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": null,
77 | "id": "8e8511e2",
78 | "metadata": {},
79 | "outputs": [],
80 | "source": [
81 | "xr.open_dataset(product_path, engine=\"sentinel-1\", group=swath_group)"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": null,
87 | "id": "d5774952",
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "xr.open_dataset(product_path, engine=\"sentinel-1\", group=swath_polarisation_group)"
92 | ]
93 | },
94 | {
95 | "cell_type": "code",
96 | "execution_count": null,
97 | "id": "2c39574d",
98 | "metadata": {},
99 | "outputs": [],
100 | "source": [
101 | "measurement = xr.open_dataset(\n",
102 | " product_path, engine=\"sentinel-1\", group=measurement_group, chunks=2048\n",
103 | ")\n",
104 | "measurement"
105 | ]
106 | },
107 | {
108 | "cell_type": "code",
109 | "execution_count": null,
110 | "id": "ca1eea3a",
111 | "metadata": {},
112 | "outputs": [],
113 | "source": [
114 | "measurement_block = measurement.measurement[measurement_block_slices]\n",
115 | "measurement_block"
116 | ]
117 | },
118 | {
119 | "cell_type": "code",
120 | "execution_count": null,
121 | "id": "afae99bf",
122 | "metadata": {
123 | "scrolled": false
124 | },
125 | "outputs": [],
126 | "source": [
127 | "_ = abs(measurement_block).plot(y=\"azimuth_time\", vmax=digital_number_max)"
128 | ]
129 | },
130 | {
131 | "cell_type": "code",
132 | "execution_count": null,
133 | "id": "3d231b4a",
134 | "metadata": {},
135 | "outputs": [],
136 | "source": [
137 | "gcp = xr.open_dataset(\n",
138 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/gcp\"\n",
139 | ")\n",
140 | "gcp"
141 | ]
142 | },
143 | {
144 | "cell_type": "code",
145 | "execution_count": null,
146 | "id": "b500c2c3",
147 | "metadata": {},
148 | "outputs": [],
149 | "source": [
150 | "_ = gcp.height.plot(y=\"azimuth_time\")"
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": null,
156 | "id": "ce6541ea",
157 | "metadata": {},
158 | "outputs": [],
159 | "source": [
160 | "_ = gcp.plot.scatter(x=\"longitude\", y=\"latitude\", hue=\"height\")"
161 | ]
162 | },
163 | {
164 | "cell_type": "code",
165 | "execution_count": null,
166 | "id": "f27e0e78",
167 | "metadata": {},
168 | "outputs": [],
169 | "source": [
170 | "orbit = xr.open_dataset(\n",
171 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/orbit\"\n",
172 | ")\n",
173 | "orbit"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": null,
179 | "id": "38904b43",
180 | "metadata": {},
181 | "outputs": [],
182 | "source": [
183 | "orbit.plot.scatter(y=\"azimuth_time\", x=\"position\", hue=\"velocity\")"
184 | ]
185 | },
186 | {
187 | "cell_type": "code",
188 | "execution_count": null,
189 | "id": "97430f6a",
190 | "metadata": {},
191 | "outputs": [],
192 | "source": [
193 | "calibration = xr.open_dataset(\n",
194 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/calibration\"\n",
195 | ")\n",
196 | "calibration"
197 | ]
198 | },
199 | {
200 | "cell_type": "code",
201 | "execution_count": null,
202 | "id": "79e6022b",
203 | "metadata": {},
204 | "outputs": [],
205 | "source": [
206 | "# betaNought and dn calibration are typically constat\n",
207 | "print(calibration.betaNought.mean().item(), \"+-\", calibration.betaNought.std().item())\n",
208 | "print(calibration.dn.mean().item(), \"+-\", calibration.dn.std().item())"
209 | ]
210 | },
211 | {
212 | "cell_type": "code",
213 | "execution_count": null,
214 | "id": "1337f73a",
215 | "metadata": {},
216 | "outputs": [],
217 | "source": [
218 | "_ = calibration.sigmaNought.plot(x=\"pixel\")"
219 | ]
220 | },
221 | {
222 | "cell_type": "code",
223 | "execution_count": null,
224 | "id": "2e056095",
225 | "metadata": {},
226 | "outputs": [],
227 | "source": [
228 | "_ = calibration.gamma.plot(x=\"pixel\")"
229 | ]
230 | },
231 | {
232 | "cell_type": "code",
233 | "execution_count": null,
234 | "id": "c9e54759",
235 | "metadata": {},
236 | "outputs": [],
237 | "source": [
238 | "betaNought_block = xarray_sentinel.calibrate_amplitude(\n",
239 | " measurement_block, calibration.betaNought\n",
240 | ")\n",
241 | "betaNought_block"
242 | ]
243 | },
244 | {
245 | "cell_type": "code",
246 | "execution_count": null,
247 | "id": "27f3b95c",
248 | "metadata": {},
249 | "outputs": [],
250 | "source": [
251 | "_ = abs(betaNought_block).plot(y=\"azimuth_time\", vmax=1)"
252 | ]
253 | },
254 | {
255 | "cell_type": "code",
256 | "execution_count": null,
257 | "id": "41087cca",
258 | "metadata": {},
259 | "outputs": [],
260 | "source": [
261 | "betaNought_block_db = xarray_sentinel.calibrate_intensity(\n",
262 | " measurement_block,\n",
263 | " calibration.betaNought,\n",
264 | " as_db=True,\n",
265 | ")\n",
266 | "betaNought_block_db"
267 | ]
268 | },
269 | {
270 | "cell_type": "code",
271 | "execution_count": null,
272 | "id": "271e3908",
273 | "metadata": {},
274 | "outputs": [],
275 | "source": [
276 | "_ = betaNought_block_db.plot(y=\"azimuth_time\", vmin=-20, vmax=5)"
277 | ]
278 | },
279 | {
280 | "cell_type": "code",
281 | "execution_count": null,
282 | "id": "2aae5912",
283 | "metadata": {},
284 | "outputs": [],
285 | "source": [
286 | "noise_range = xr.open_dataset(\n",
287 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/noise_range\"\n",
288 | ")\n",
289 | "noise_range"
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": null,
295 | "id": "c357a9de",
296 | "metadata": {},
297 | "outputs": [],
298 | "source": [
299 | "_ = noise_range.noiseRangeLut.plot(x=\"pixel\")"
300 | ]
301 | },
302 | {
303 | "cell_type": "code",
304 | "execution_count": null,
305 | "id": "5ebe2a71",
306 | "metadata": {},
307 | "outputs": [],
308 | "source": [
309 | "noise_azimuth = xr.open_dataset(\n",
310 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/noise_azimuth\"\n",
311 | ")\n",
312 | "noise_azimuth"
313 | ]
314 | },
315 | {
316 | "cell_type": "code",
317 | "execution_count": null,
318 | "id": "75c40bce",
319 | "metadata": {},
320 | "outputs": [],
321 | "source": [
322 | "# _ = noise_azimuth.plot.scatter(x=\"line\", y=\"noiseAzimuthLut\")"
323 | ]
324 | },
325 | {
326 | "cell_type": "code",
327 | "execution_count": null,
328 | "id": "e45d93cf",
329 | "metadata": {},
330 | "outputs": [],
331 | "source": [
332 | "xr.open_dataset(\n",
333 | " product_path,\n",
334 | " engine=\"sentinel-1\",\n",
335 | " group=f\"{swath_polarisation_group}/coordinate_conversion\",\n",
336 | ")"
337 | ]
338 | },
339 | {
340 | "cell_type": "code",
341 | "execution_count": null,
342 | "id": "77578300",
343 | "metadata": {},
344 | "outputs": [],
345 | "source": [
346 | "xr.open_dataset(\n",
347 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/attitude\"\n",
348 | ")"
349 | ]
350 | },
351 | {
352 | "cell_type": "code",
353 | "execution_count": null,
354 | "id": "f98e156a",
355 | "metadata": {},
356 | "outputs": [],
357 | "source": [
358 | "xr.open_dataset(\n",
359 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/dc_estimate\"\n",
360 | ")"
361 | ]
362 | },
363 | {
364 | "cell_type": "code",
365 | "execution_count": null,
366 | "id": "910c5ab9",
367 | "metadata": {},
368 | "outputs": [],
369 | "source": [
370 | "xr.open_dataset(\n",
371 | " product_path,\n",
372 | " engine=\"sentinel-1\",\n",
373 | " group=f\"{swath_polarisation_group}/azimuth_fm_rate\",\n",
374 | ")"
375 | ]
376 | },
377 | {
378 | "cell_type": "code",
379 | "execution_count": null,
380 | "id": "5128f31a",
381 | "metadata": {},
382 | "outputs": [],
383 | "source": []
384 | }
385 | ],
386 | "metadata": {
387 | "kernelspec": {
388 | "display_name": "Python 3",
389 | "language": "python",
390 | "name": "python3"
391 | },
392 | "language_info": {
393 | "codemirror_mode": {
394 | "name": "ipython",
395 | "version": 3
396 | },
397 | "file_extension": ".py",
398 | "mimetype": "text/x-python",
399 | "name": "python",
400 | "nbconvert_exporter": "python",
401 | "pygments_lexer": "ipython3",
402 | "version": "3.9.10"
403 | }
404 | },
405 | "nbformat": 4,
406 | "nbformat_minor": 5
407 | }
408 |
--------------------------------------------------------------------------------
/notebooks/Sentinel-1_GRD_IW.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "id": "62bc62b4",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "%load_ext autoreload\n",
11 | "%autoreload 2\n",
12 | "\n",
13 | "%matplotlib inline\n",
14 | "%config InlineBackend.figure_format = 'retina'\n",
15 | "\n",
16 | "import matplotlib.pyplot as plt\n",
17 | "\n",
18 | "plt.rcParams[\"figure.figsize\"] = (12, 8)\n",
19 | "plt.rcParams[\"font.size\"] = 12"
20 | ]
21 | },
22 | {
23 | "cell_type": "code",
24 | "execution_count": null,
25 | "id": "1e5120fa",
26 | "metadata": {},
27 | "outputs": [],
28 | "source": [
29 | "import xarray as xr\n",
30 | "\n",
31 | "import xarray_sentinel"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": null,
37 | "id": "7776d0c7",
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "# uncomment download the data\n",
42 | "#! DHUS_USER= DHUS_PASSWORD= ./download_data.sh"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": null,
48 | "id": "3b066268",
49 | "metadata": {},
50 | "outputs": [],
51 | "source": [
52 | "product_path = (\n",
53 | " \"data/S1B_IW_GRDH_1SDV_20211223T051122_20211223T051147_030148_039993_5371.SAFE\"\n",
54 | ")\n",
55 | "swath_group = \"IW\"\n",
56 | "swath_polarisation_group = \"IW/VV\"\n",
57 | "measurement_group = \"IW/VV\"\n",
58 | "measurement_block_slices = (slice(7000, 9000), slice(20000, 23000))\n",
59 | "digital_number_max = 600\n",
60 | "\n",
61 | "!ls -d {product_path}"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": null,
67 | "id": "f42e4c16",
68 | "metadata": {},
69 | "outputs": [],
70 | "source": [
71 | "xr.open_dataset(product_path, engine=\"sentinel-1\")"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": null,
77 | "id": "8e8511e2",
78 | "metadata": {},
79 | "outputs": [],
80 | "source": [
81 | "xr.open_dataset(product_path, engine=\"sentinel-1\", group=swath_group)"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": null,
87 | "id": "d5774952",
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "xr.open_dataset(product_path, engine=\"sentinel-1\", group=swath_polarisation_group)"
92 | ]
93 | },
94 | {
95 | "cell_type": "code",
96 | "execution_count": null,
97 | "id": "2c39574d",
98 | "metadata": {},
99 | "outputs": [],
100 | "source": [
101 | "measurement = xr.open_dataset(\n",
102 | " product_path, engine=\"sentinel-1\", group=measurement_group, chunks=2048\n",
103 | ")\n",
104 | "measurement"
105 | ]
106 | },
107 | {
108 | "cell_type": "code",
109 | "execution_count": null,
110 | "id": "ca1eea3a",
111 | "metadata": {},
112 | "outputs": [],
113 | "source": [
114 | "measurement_block = measurement.measurement[measurement_block_slices]\n",
115 | "measurement_block"
116 | ]
117 | },
118 | {
119 | "cell_type": "code",
120 | "execution_count": null,
121 | "id": "afae99bf",
122 | "metadata": {
123 | "scrolled": false
124 | },
125 | "outputs": [],
126 | "source": [
127 | "_ = abs(measurement_block).plot(y=\"azimuth_time\", vmax=digital_number_max)"
128 | ]
129 | },
130 | {
131 | "cell_type": "code",
132 | "execution_count": null,
133 | "id": "3d231b4a",
134 | "metadata": {},
135 | "outputs": [],
136 | "source": [
137 | "gcp = xr.open_dataset(\n",
138 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/gcp\"\n",
139 | ")\n",
140 | "gcp"
141 | ]
142 | },
143 | {
144 | "cell_type": "code",
145 | "execution_count": null,
146 | "id": "b500c2c3",
147 | "metadata": {},
148 | "outputs": [],
149 | "source": [
150 | "_ = gcp.height.plot(y=\"azimuth_time\")"
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": null,
156 | "id": "ce6541ea",
157 | "metadata": {},
158 | "outputs": [],
159 | "source": [
160 | "_ = gcp.plot.scatter(x=\"longitude\", y=\"latitude\", hue=\"height\")"
161 | ]
162 | },
163 | {
164 | "cell_type": "code",
165 | "execution_count": null,
166 | "id": "f27e0e78",
167 | "metadata": {},
168 | "outputs": [],
169 | "source": [
170 | "orbit = xr.open_dataset(\n",
171 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/orbit\"\n",
172 | ")\n",
173 | "orbit"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": null,
179 | "id": "38904b43",
180 | "metadata": {},
181 | "outputs": [],
182 | "source": [
183 | "orbit.plot.scatter(y=\"azimuth_time\", x=\"position\", hue=\"velocity\")"
184 | ]
185 | },
186 | {
187 | "cell_type": "code",
188 | "execution_count": null,
189 | "id": "97430f6a",
190 | "metadata": {},
191 | "outputs": [],
192 | "source": [
193 | "calibration = xr.open_dataset(\n",
194 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/calibration\"\n",
195 | ")\n",
196 | "calibration"
197 | ]
198 | },
199 | {
200 | "cell_type": "code",
201 | "execution_count": null,
202 | "id": "79e6022b",
203 | "metadata": {},
204 | "outputs": [],
205 | "source": [
206 | "# betaNought and dn calibration are typically constat\n",
207 | "print(calibration.betaNought.mean().item(), \"+-\", calibration.betaNought.std().item())\n",
208 | "print(calibration.dn.mean().item(), \"+-\", calibration.dn.std().item())"
209 | ]
210 | },
211 | {
212 | "cell_type": "code",
213 | "execution_count": null,
214 | "id": "1337f73a",
215 | "metadata": {},
216 | "outputs": [],
217 | "source": [
218 | "_ = calibration.sigmaNought.plot(x=\"pixel\")"
219 | ]
220 | },
221 | {
222 | "cell_type": "code",
223 | "execution_count": null,
224 | "id": "2e056095",
225 | "metadata": {},
226 | "outputs": [],
227 | "source": [
228 | "_ = calibration.gamma.plot(x=\"pixel\")"
229 | ]
230 | },
231 | {
232 | "cell_type": "code",
233 | "execution_count": null,
234 | "id": "c9e54759",
235 | "metadata": {},
236 | "outputs": [],
237 | "source": [
238 | "betaNought_block = xarray_sentinel.calibrate_amplitude(\n",
239 | " measurement_block, calibration.betaNought\n",
240 | ")\n",
241 | "betaNought_block"
242 | ]
243 | },
244 | {
245 | "cell_type": "code",
246 | "execution_count": null,
247 | "id": "27f3b95c",
248 | "metadata": {},
249 | "outputs": [],
250 | "source": [
251 | "_ = abs(betaNought_block).plot(y=\"azimuth_time\", vmax=1)"
252 | ]
253 | },
254 | {
255 | "cell_type": "code",
256 | "execution_count": null,
257 | "id": "41087cca",
258 | "metadata": {},
259 | "outputs": [],
260 | "source": [
261 | "betaNought_block_db = xarray_sentinel.calibrate_intensity(\n",
262 | " measurement_block, calibration.betaNought, as_db=True\n",
263 | ")\n",
264 | "betaNought_block_db"
265 | ]
266 | },
267 | {
268 | "cell_type": "code",
269 | "execution_count": null,
270 | "id": "271e3908",
271 | "metadata": {},
272 | "outputs": [],
273 | "source": [
274 | "_ = betaNought_block_db.plot(y=\"azimuth_time\", vmin=-20, vmax=5)"
275 | ]
276 | },
277 | {
278 | "cell_type": "code",
279 | "execution_count": null,
280 | "id": "2aae5912",
281 | "metadata": {},
282 | "outputs": [],
283 | "source": [
284 | "noise_range = xr.open_dataset(\n",
285 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/noise_range\"\n",
286 | ")\n",
287 | "noise_range"
288 | ]
289 | },
290 | {
291 | "cell_type": "code",
292 | "execution_count": null,
293 | "id": "c357a9de",
294 | "metadata": {},
295 | "outputs": [],
296 | "source": [
297 | "_ = noise_range.noiseRangeLut.plot(x=\"pixel\")"
298 | ]
299 | },
300 | {
301 | "cell_type": "code",
302 | "execution_count": null,
303 | "id": "5ebe2a71",
304 | "metadata": {},
305 | "outputs": [],
306 | "source": [
307 | "noise_azimuth = xr.open_dataset(\n",
308 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/noise_azimuth\"\n",
309 | ")\n",
310 | "noise_azimuth"
311 | ]
312 | },
313 | {
314 | "cell_type": "code",
315 | "execution_count": null,
316 | "id": "75c40bce",
317 | "metadata": {},
318 | "outputs": [],
319 | "source": [
320 | "_ = noise_azimuth.plot.scatter(x=\"line\", y=\"noiseAzimuthLut\")"
321 | ]
322 | },
323 | {
324 | "cell_type": "code",
325 | "execution_count": null,
326 | "id": "f942214c",
327 | "metadata": {},
328 | "outputs": [],
329 | "source": [
330 | "coordinate_conversion = xr.open_dataset(\n",
331 | " product_path,\n",
332 | " engine=\"sentinel-1\",\n",
333 | " group=f\"{swath_polarisation_group}/coordinate_conversion\",\n",
334 | ")\n",
335 | "coordinate_conversion"
336 | ]
337 | },
338 | {
339 | "cell_type": "code",
340 | "execution_count": null,
341 | "id": "77578300",
342 | "metadata": {},
343 | "outputs": [],
344 | "source": [
345 | "xr.open_dataset(\n",
346 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/attitude\"\n",
347 | ")"
348 | ]
349 | },
350 | {
351 | "cell_type": "code",
352 | "execution_count": null,
353 | "id": "f98e156a",
354 | "metadata": {},
355 | "outputs": [],
356 | "source": [
357 | "xr.open_dataset(\n",
358 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/dc_estimate\"\n",
359 | ")"
360 | ]
361 | },
362 | {
363 | "cell_type": "code",
364 | "execution_count": null,
365 | "id": "910c5ab9",
366 | "metadata": {},
367 | "outputs": [],
368 | "source": [
369 | "xr.open_dataset(\n",
370 | " product_path,\n",
371 | " engine=\"sentinel-1\",\n",
372 | " group=f\"{swath_polarisation_group}/azimuth_fm_rate\",\n",
373 | ")"
374 | ]
375 | },
376 | {
377 | "cell_type": "code",
378 | "execution_count": null,
379 | "id": "204a429d",
380 | "metadata": {},
381 | "outputs": [],
382 | "source": []
383 | }
384 | ],
385 | "metadata": {
386 | "kernelspec": {
387 | "display_name": "Python 3",
388 | "language": "python",
389 | "name": "python3"
390 | },
391 | "language_info": {
392 | "codemirror_mode": {
393 | "name": "ipython",
394 | "version": 3
395 | },
396 | "file_extension": ".py",
397 | "mimetype": "text/x-python",
398 | "name": "python",
399 | "nbconvert_exporter": "python",
400 | "pygments_lexer": "ipython3",
401 | "version": "3.9.10"
402 | }
403 | },
404 | "nbformat": 4,
405 | "nbformat_minor": 5
406 | }
407 |
--------------------------------------------------------------------------------
/notebooks/Sentinel-1_GRD_SM.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": null,
6 | "id": "62bc62b4",
7 | "metadata": {},
8 | "outputs": [],
9 | "source": [
10 | "%load_ext autoreload\n",
11 | "%autoreload 2\n",
12 | "\n",
13 | "%matplotlib inline\n",
14 | "%config InlineBackend.figure_format = 'retina'\n",
15 | "\n",
16 | "import matplotlib.pyplot as plt\n",
17 | "\n",
18 | "plt.rcParams[\"figure.figsize\"] = (12, 8)\n",
19 | "plt.rcParams[\"font.size\"] = 12"
20 | ]
21 | },
22 | {
23 | "cell_type": "code",
24 | "execution_count": null,
25 | "id": "1e5120fa",
26 | "metadata": {},
27 | "outputs": [],
28 | "source": [
29 | "import xarray as xr\n",
30 | "\n",
31 | "import xarray_sentinel"
32 | ]
33 | },
34 | {
35 | "cell_type": "code",
36 | "execution_count": null,
37 | "id": "7776d0c7",
38 | "metadata": {},
39 | "outputs": [],
40 | "source": [
41 | "# uncomment download the data\n",
42 | "#! DHUS_USER= DHUS_PASSWORD= ./download_data.sh"
43 | ]
44 | },
45 | {
46 | "cell_type": "code",
47 | "execution_count": null,
48 | "id": "3b066268",
49 | "metadata": {},
50 | "outputs": [],
51 | "source": [
52 | "product_path = (\n",
53 | " \"data/S1B_S6_GRDH_1SDV_20211216T115438_20211216T115501_030050_03968A_0F8A.SAFE\"\n",
54 | ")\n",
55 | "swath_group = \"S6\"\n",
56 | "swath_polarisation_group = \"S6/VV\"\n",
57 | "measurement_group = \"S6/VV\"\n",
58 | "measurement_block_slices = (slice(4096, 6144), slice(None, 2048))\n",
59 | "digital_number_max = 150\n",
60 | "\n",
61 | "!ls -d {product_path}"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": null,
67 | "id": "f42e4c16",
68 | "metadata": {},
69 | "outputs": [],
70 | "source": [
71 | "xr.open_dataset(product_path, engine=\"sentinel-1\")"
72 | ]
73 | },
74 | {
75 | "cell_type": "code",
76 | "execution_count": null,
77 | "id": "8e8511e2",
78 | "metadata": {},
79 | "outputs": [],
80 | "source": [
81 | "xr.open_dataset(product_path, engine=\"sentinel-1\", group=swath_group)"
82 | ]
83 | },
84 | {
85 | "cell_type": "code",
86 | "execution_count": null,
87 | "id": "d5774952",
88 | "metadata": {},
89 | "outputs": [],
90 | "source": [
91 | "xr.open_dataset(product_path, engine=\"sentinel-1\", group=swath_polarisation_group)"
92 | ]
93 | },
94 | {
95 | "cell_type": "code",
96 | "execution_count": null,
97 | "id": "2c39574d",
98 | "metadata": {},
99 | "outputs": [],
100 | "source": [
101 | "measurement = xr.open_dataset(\n",
102 | " product_path, engine=\"sentinel-1\", group=measurement_group, chunks=2048\n",
103 | ")\n",
104 | "measurement"
105 | ]
106 | },
107 | {
108 | "cell_type": "code",
109 | "execution_count": null,
110 | "id": "ca1eea3a",
111 | "metadata": {},
112 | "outputs": [],
113 | "source": [
114 | "measurement_block = measurement.measurement[measurement_block_slices]\n",
115 | "measurement_block"
116 | ]
117 | },
118 | {
119 | "cell_type": "code",
120 | "execution_count": null,
121 | "id": "afae99bf",
122 | "metadata": {
123 | "scrolled": false
124 | },
125 | "outputs": [],
126 | "source": [
127 | "_ = abs(measurement_block).plot(y=\"azimuth_time\", vmax=digital_number_max)"
128 | ]
129 | },
130 | {
131 | "cell_type": "code",
132 | "execution_count": null,
133 | "id": "3d231b4a",
134 | "metadata": {},
135 | "outputs": [],
136 | "source": [
137 | "gcp = xr.open_dataset(\n",
138 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/gcp\"\n",
139 | ")\n",
140 | "gcp"
141 | ]
142 | },
143 | {
144 | "cell_type": "code",
145 | "execution_count": null,
146 | "id": "b500c2c3",
147 | "metadata": {},
148 | "outputs": [],
149 | "source": [
150 | "_ = gcp.height.plot(y=\"azimuth_time\")"
151 | ]
152 | },
153 | {
154 | "cell_type": "code",
155 | "execution_count": null,
156 | "id": "ce6541ea",
157 | "metadata": {},
158 | "outputs": [],
159 | "source": [
160 | "_ = gcp.plot.scatter(x=\"longitude\", y=\"latitude\", hue=\"height\")"
161 | ]
162 | },
163 | {
164 | "cell_type": "code",
165 | "execution_count": null,
166 | "id": "f27e0e78",
167 | "metadata": {},
168 | "outputs": [],
169 | "source": [
170 | "orbit = xr.open_dataset(\n",
171 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/orbit\"\n",
172 | ")\n",
173 | "orbit"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": null,
179 | "id": "38904b43",
180 | "metadata": {},
181 | "outputs": [],
182 | "source": [
183 | "orbit.plot.scatter(y=\"azimuth_time\", x=\"position\", hue=\"velocity\")"
184 | ]
185 | },
186 | {
187 | "cell_type": "code",
188 | "execution_count": null,
189 | "id": "97430f6a",
190 | "metadata": {},
191 | "outputs": [],
192 | "source": [
193 | "calibration = xr.open_dataset(\n",
194 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/calibration\"\n",
195 | ")\n",
196 | "calibration"
197 | ]
198 | },
199 | {
200 | "cell_type": "code",
201 | "execution_count": null,
202 | "id": "79e6022b",
203 | "metadata": {},
204 | "outputs": [],
205 | "source": [
206 | "# betaNought and dn calibration are typically constat\n",
207 | "print(calibration.betaNought.mean().item(), \"+-\", calibration.betaNought.std().item())\n",
208 | "print(calibration.dn.mean().item(), \"+-\", calibration.dn.std().item())"
209 | ]
210 | },
211 | {
212 | "cell_type": "code",
213 | "execution_count": null,
214 | "id": "1337f73a",
215 | "metadata": {},
216 | "outputs": [],
217 | "source": [
218 | "_ = calibration.sigmaNought.plot(x=\"pixel\")"
219 | ]
220 | },
221 | {
222 | "cell_type": "code",
223 | "execution_count": null,
224 | "id": "2e056095",
225 | "metadata": {},
226 | "outputs": [],
227 | "source": [
228 | "_ = calibration.gamma.plot(x=\"pixel\")"
229 | ]
230 | },
231 | {
232 | "cell_type": "code",
233 | "execution_count": null,
234 | "id": "c9e54759",
235 | "metadata": {},
236 | "outputs": [],
237 | "source": [
238 | "betaNought_block = xarray_sentinel.calibrate_amplitude(\n",
239 | " measurement_block, calibration.betaNought\n",
240 | ")\n",
241 | "betaNought_block"
242 | ]
243 | },
244 | {
245 | "cell_type": "code",
246 | "execution_count": null,
247 | "id": "27f3b95c",
248 | "metadata": {},
249 | "outputs": [],
250 | "source": [
251 | "_ = abs(betaNought_block).plot(y=\"azimuth_time\", vmax=1)"
252 | ]
253 | },
254 | {
255 | "cell_type": "code",
256 | "execution_count": null,
257 | "id": "41087cca",
258 | "metadata": {},
259 | "outputs": [],
260 | "source": [
261 | "betaNought_block_db = xarray_sentinel.calibrate_intensity(\n",
262 | " measurement_block,\n",
263 | " calibration.betaNought,\n",
264 | " as_db=True,\n",
265 | ")\n",
266 | "betaNought_block_db"
267 | ]
268 | },
269 | {
270 | "cell_type": "code",
271 | "execution_count": null,
272 | "id": "271e3908",
273 | "metadata": {},
274 | "outputs": [],
275 | "source": [
276 | "_ = betaNought_block_db.plot(y=\"azimuth_time\", vmin=-20, vmax=5)"
277 | ]
278 | },
279 | {
280 | "cell_type": "code",
281 | "execution_count": null,
282 | "id": "2aae5912",
283 | "metadata": {},
284 | "outputs": [],
285 | "source": [
286 | "noise_range = xr.open_dataset(\n",
287 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/noise_range\"\n",
288 | ")\n",
289 | "noise_range"
290 | ]
291 | },
292 | {
293 | "cell_type": "code",
294 | "execution_count": null,
295 | "id": "c357a9de",
296 | "metadata": {},
297 | "outputs": [],
298 | "source": [
299 | "_ = noise_range.noiseRangeLut.plot(x=\"pixel\")"
300 | ]
301 | },
302 | {
303 | "cell_type": "code",
304 | "execution_count": null,
305 | "id": "5ebe2a71",
306 | "metadata": {},
307 | "outputs": [],
308 | "source": [
309 | "noise_azimuth = xr.open_dataset(\n",
310 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/noise_azimuth\"\n",
311 | ")\n",
312 | "noise_azimuth"
313 | ]
314 | },
315 | {
316 | "cell_type": "code",
317 | "execution_count": null,
318 | "id": "75c40bce",
319 | "metadata": {},
320 | "outputs": [],
321 | "source": [
322 | "# _ = noise_azimuth.plot.scatter(x=\"line\", y=\"noiseAzimuthLut\")"
323 | ]
324 | },
325 | {
326 | "cell_type": "code",
327 | "execution_count": null,
328 | "id": "93333920",
329 | "metadata": {},
330 | "outputs": [],
331 | "source": [
332 | "coordinate_conversion = xr.open_dataset(\n",
333 | " product_path,\n",
334 | " engine=\"sentinel-1\",\n",
335 | " group=f\"{swath_polarisation_group}/coordinate_conversion\",\n",
336 | ")\n",
337 | "coordinate_conversion"
338 | ]
339 | },
340 | {
341 | "cell_type": "code",
342 | "execution_count": null,
343 | "id": "77578300",
344 | "metadata": {},
345 | "outputs": [],
346 | "source": [
347 | "xr.open_dataset(\n",
348 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/attitude\"\n",
349 | ")"
350 | ]
351 | },
352 | {
353 | "cell_type": "code",
354 | "execution_count": null,
355 | "id": "f98e156a",
356 | "metadata": {},
357 | "outputs": [],
358 | "source": [
359 | "xr.open_dataset(\n",
360 | " product_path, engine=\"sentinel-1\", group=f\"{swath_polarisation_group}/dc_estimate\"\n",
361 | ")"
362 | ]
363 | },
364 | {
365 | "cell_type": "code",
366 | "execution_count": null,
367 | "id": "910c5ab9",
368 | "metadata": {},
369 | "outputs": [],
370 | "source": [
371 | "xr.open_dataset(\n",
372 | " product_path,\n",
373 | " engine=\"sentinel-1\",\n",
374 | " group=f\"{swath_polarisation_group}/azimuth_fm_rate\",\n",
375 | ")"
376 | ]
377 | },
378 | {
379 | "cell_type": "code",
380 | "execution_count": null,
381 | "id": "900a6d9c",
382 | "metadata": {},
383 | "outputs": [],
384 | "source": []
385 | }
386 | ],
387 | "metadata": {
388 | "kernelspec": {
389 | "display_name": "Python 3",
390 | "language": "python",
391 | "name": "python3"
392 | },
393 | "language_info": {
394 | "codemirror_mode": {
395 | "name": "ipython",
396 | "version": 3
397 | },
398 | "file_extension": ".py",
399 | "mimetype": "text/x-python",
400 | "name": "python",
401 | "nbconvert_exporter": "python",
402 | "pygments_lexer": "ipython3",
403 | "version": "3.9.10"
404 | }
405 | },
406 | "nbformat": 4,
407 | "nbformat_minor": 5
408 | }
409 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | # setuptools-scm
2 | version.py
3 |
4 | # Sphinx automatic generation of API
5 | docs/README.md
6 | docs/_api/
7 |
8 | # Created by https://www.toptal.com/developers/gitignore/api/python,jupyternotebooks,vim,visualstudiocode,pycharm,emacs,linux,macos,windows
9 | # Edit at https://www.toptal.com/developers/gitignore?templates=python,jupyternotebooks,vim,visualstudiocode,pycharm,emacs,linux,macos,windows
10 |
11 | ### Emacs ###
12 | # -*- mode: gitignore; -*-
13 | *~
14 | \#*\#
15 | /.emacs.desktop
16 | /.emacs.desktop.lock
17 | *.elc
18 | auto-save-list
19 | tramp
20 | .\#*
21 |
22 | # Org-mode
23 | .org-id-locations
24 | *_archive
25 |
26 | # flymake-mode
27 | *_flymake.*
28 |
29 | # eshell files
30 | /eshell/history
31 | /eshell/lastdir
32 |
33 | # elpa packages
34 | /elpa/
35 |
36 | # reftex files
37 | *.rel
38 |
39 | # AUCTeX auto folder
40 | /auto/
41 |
42 | # cask packages
43 | .cask/
44 | dist/
45 |
46 | # Flycheck
47 | flycheck_*.el
48 |
49 | # server auth directory
50 | /server/
51 |
52 | # projectiles files
53 | .projectile
54 |
55 | # directory configuration
56 | .dir-locals.el
57 |
58 | # network security
59 | /network-security.data
60 |
61 |
62 | ### JupyterNotebooks ###
63 | # gitignore template for Jupyter Notebooks
64 | # website: http://jupyter.org/
65 |
66 | .ipynb_checkpoints
67 | */.ipynb_checkpoints/*
68 |
69 | # IPython
70 | profile_default/
71 | ipython_config.py
72 |
73 | # Remove previous ipynb_checkpoints
74 | # git rm -r .ipynb_checkpoints/
75 |
76 | ### Linux ###
77 |
78 | # temporary files which can be created if a process still has a handle open of a deleted file
79 | .fuse_hidden*
80 |
81 | # KDE directory preferences
82 | .directory
83 |
84 | # Linux trash folder which might appear on any partition or disk
85 | .Trash-*
86 |
87 | # .nfs files are created when an open file is removed but is still being accessed
88 | .nfs*
89 |
90 | ### macOS ###
91 | # General
92 | .DS_Store
93 | .AppleDouble
94 | .LSOverride
95 |
96 | # Icon must end with two \r
97 | Icon
98 |
99 | # Thumbnails
100 | ._*
101 |
102 | # Files that might appear in the root of a volume
103 | .DocumentRevisions-V100
104 | .fseventsd
105 | .Spotlight-V100
106 | .TemporaryItems
107 | .Trashes
108 | .VolumeIcon.icns
109 | .com.apple.timemachine.donotpresent
110 |
111 | # Directories potentially created on remote AFP share
112 | .AppleDB
113 | .AppleDesktop
114 | Network Trash Folder
115 | Temporary Items
116 | .apdisk
117 |
118 | ### macOS Patch ###
119 | # iCloud generated files
120 | *.icloud
121 |
122 | ### PyCharm ###
123 | # Covers JetBrains IDEs: IntelliJ, RubyMine, PhpStorm, AppCode, PyCharm, CLion, Android Studio, WebStorm and Rider
124 | # Reference: https://intellij-support.jetbrains.com/hc/en-us/articles/206544839
125 |
126 | # User-specific stuff
127 | .idea/**/workspace.xml
128 | .idea/**/tasks.xml
129 | .idea/**/usage.statistics.xml
130 | .idea/**/dictionaries
131 | .idea/**/shelf
132 |
133 | # AWS User-specific
134 | .idea/**/aws.xml
135 |
136 | # Generated files
137 | .idea/**/contentModel.xml
138 |
139 | # Sensitive or high-churn files
140 | .idea/**/dataSources/
141 | .idea/**/dataSources.ids
142 | .idea/**/dataSources.local.xml
143 | .idea/**/sqlDataSources.xml
144 | .idea/**/dynamic.xml
145 | .idea/**/uiDesigner.xml
146 | .idea/**/dbnavigator.xml
147 |
148 | # Gradle
149 | .idea/**/gradle.xml
150 | .idea/**/libraries
151 |
152 | # Gradle and Maven with auto-import
153 | # When using Gradle or Maven with auto-import, you should exclude module files,
154 | # since they will be recreated, and may cause churn. Uncomment if using
155 | # auto-import.
156 | # .idea/artifacts
157 | # .idea/compiler.xml
158 | # .idea/jarRepositories.xml
159 | # .idea/modules.xml
160 | # .idea/*.iml
161 | # .idea/modules
162 | # *.iml
163 | # *.ipr
164 |
165 | # CMake
166 | cmake-build-*/
167 |
168 | # Mongo Explorer plugin
169 | .idea/**/mongoSettings.xml
170 |
171 | # File-based project format
172 | *.iws
173 |
174 | # IntelliJ
175 | out/
176 |
177 | # mpeltonen/sbt-idea plugin
178 | .idea_modules/
179 |
180 | # JIRA plugin
181 | atlassian-ide-plugin.xml
182 |
183 | # Cursive Clojure plugin
184 | .idea/replstate.xml
185 |
186 | # SonarLint plugin
187 | .idea/sonarlint/
188 |
189 | # Crashlytics plugin (for Android Studio and IntelliJ)
190 | com_crashlytics_export_strings.xml
191 | crashlytics.properties
192 | crashlytics-build.properties
193 | fabric.properties
194 |
195 | # Editor-based Rest Client
196 | .idea/httpRequests
197 |
198 | # Android studio 3.1+ serialized cache file
199 | .idea/caches/build_file_checksums.ser
200 |
201 | ### PyCharm Patch ###
202 | # Comment Reason: https://github.com/joeblau/gitignore.io/issues/186#issuecomment-215987721
203 |
204 | # *.iml
205 | # modules.xml
206 | # .idea/misc.xml
207 | # *.ipr
208 |
209 | # Sonarlint plugin
210 | # https://plugins.jetbrains.com/plugin/7973-sonarlint
211 | .idea/**/sonarlint/
212 |
213 | # SonarQube Plugin
214 | # https://plugins.jetbrains.com/plugin/7238-sonarqube-community-plugin
215 | .idea/**/sonarIssues.xml
216 |
217 | # Markdown Navigator plugin
218 | # https://plugins.jetbrains.com/plugin/7896-markdown-navigator-enhanced
219 | .idea/**/markdown-navigator.xml
220 | .idea/**/markdown-navigator-enh.xml
221 | .idea/**/markdown-navigator/
222 |
223 | # Cache file creation bug
224 | # See https://youtrack.jetbrains.com/issue/JBR-2257
225 | .idea/$CACHE_FILE$
226 |
227 | # CodeStream plugin
228 | # https://plugins.jetbrains.com/plugin/12206-codestream
229 | .idea/codestream.xml
230 |
231 | # Azure Toolkit for IntelliJ plugin
232 | # https://plugins.jetbrains.com/plugin/8053-azure-toolkit-for-intellij
233 | .idea/**/azureSettings.xml
234 |
235 | ### Python ###
236 | # Byte-compiled / optimized / DLL files
237 | __pycache__/
238 | *.py[cod]
239 | *$py.class
240 |
241 | # C extensions
242 | *.so
243 |
244 | # Distribution / packaging
245 | .Python
246 | build/
247 | develop-eggs/
248 | downloads/
249 | eggs/
250 | .eggs/
251 | lib/
252 | lib64/
253 | parts/
254 | sdist/
255 | var/
256 | wheels/
257 | share/python-wheels/
258 | *.egg-info/
259 | .installed.cfg
260 | *.egg
261 | MANIFEST
262 |
263 | # PyInstaller
264 | # Usually these files are written by a python script from a template
265 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
266 | *.manifest
267 | *.spec
268 |
269 | # Installer logs
270 | pip-log.txt
271 | pip-delete-this-directory.txt
272 |
273 | # Unit test / coverage reports
274 | htmlcov/
275 | .tox/
276 | .nox/
277 | .coverage
278 | .coverage.*
279 | .cache
280 | nosetests.xml
281 | coverage.xml
282 | *.cover
283 | *.py,cover
284 | .hypothesis/
285 | .pytest_cache/
286 | cover/
287 |
288 | # Translations
289 | *.mo
290 | *.pot
291 |
292 | # Django stuff:
293 | *.log
294 | local_settings.py
295 | db.sqlite3
296 | db.sqlite3-journal
297 |
298 | # Flask stuff:
299 | instance/
300 | .webassets-cache
301 |
302 | # Scrapy stuff:
303 | .scrapy
304 |
305 | # Sphinx documentation
306 | docs/_build/
307 |
308 | # PyBuilder
309 | .pybuilder/
310 | target/
311 |
312 | # Jupyter Notebook
313 |
314 | # IPython
315 |
316 | # pyenv
317 | # For a library or package, you might want to ignore these files since the code is
318 | # intended to run in multiple environments; otherwise, check them in:
319 | # .python-version
320 |
321 | # pipenv
322 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
323 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
324 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
325 | # install all needed dependencies.
326 | #Pipfile.lock
327 |
328 | # poetry
329 | # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
330 | # This is especially recommended for binary packages to ensure reproducibility, and is more
331 | # commonly ignored for libraries.
332 | # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
333 | #poetry.lock
334 |
335 | # pdm
336 | # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
337 | #pdm.lock
338 | # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
339 | # in version control.
340 | # https://pdm.fming.dev/#use-with-ide
341 | .pdm.toml
342 |
343 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
344 | __pypackages__/
345 |
346 | # Celery stuff
347 | celerybeat-schedule
348 | celerybeat.pid
349 |
350 | # SageMath parsed files
351 | *.sage.py
352 |
353 | # Environments
354 | .env
355 | .venv
356 | env/
357 | venv/
358 | ENV/
359 | env.bak/
360 | venv.bak/
361 |
362 | # Spyder project settings
363 | .spyderproject
364 | .spyproject
365 |
366 | # Rope project settings
367 | .ropeproject
368 |
369 | # mkdocs documentation
370 | /site
371 |
372 | # mypy
373 | .mypy_cache/
374 | .dmypy.json
375 | dmypy.json
376 |
377 | # Pyre type checker
378 | .pyre/
379 |
380 | # pytype static type analyzer
381 | .pytype/
382 |
383 | # Cython debug symbols
384 | cython_debug/
385 |
386 | # PyCharm
387 | # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
388 | # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
389 | # and can be added to the global gitignore or merged into this file. For a more nuclear
390 | # option (not recommended) you can uncomment the following to ignore the entire idea folder.
391 | #.idea/
392 |
393 | ### Python Patch ###
394 | # Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
395 | poetry.toml
396 |
397 | # ruff
398 | .ruff_cache/
399 |
400 | # LSP config files
401 | pyrightconfig.json
402 |
403 | ### Vim ###
404 | # Swap
405 | [._]*.s[a-v][a-z]
406 | !*.svg # comment out if you don't need vector files
407 | [._]*.sw[a-p]
408 | [._]s[a-rt-v][a-z]
409 | [._]ss[a-gi-z]
410 | [._]sw[a-p]
411 |
412 | # Session
413 | Session.vim
414 | Sessionx.vim
415 |
416 | # Temporary
417 | .netrwhist
418 | # Auto-generated tag files
419 | tags
420 | # Persistent undo
421 | [._]*.un~
422 |
423 | ### VisualStudioCode ###
424 | .vscode/
425 | # .vscode/*
426 | # !.vscode/settings.json
427 | # !.vscode/tasks.json
428 | # !.vscode/launch.json
429 | # !.vscode/extensions.json
430 | # !.vscode/*.code-snippets
431 |
432 | # Local History for Visual Studio Code
433 | .history/
434 |
435 | # Built Visual Studio Code Extensions
436 | *.vsix
437 |
438 | ### VisualStudioCode Patch ###
439 | # Ignore all local history of files
440 | .history
441 | .ionide
442 |
443 | ### Windows ###
444 | # Windows thumbnail cache files
445 | Thumbs.db
446 | Thumbs.db:encryptable
447 | ehthumbs.db
448 | ehthumbs_vista.db
449 |
450 | # Dump file
451 | *.stackdump
452 |
453 | # Folder config file
454 | [Dd]esktop.ini
455 |
456 | # Recycle Bin used on file shares
457 | $RECYCLE.BIN/
458 |
459 | # Windows Installer files
460 | *.cab
461 | *.msi
462 | *.msix
463 | *.msm
464 | *.msp
465 |
466 | # Windows shortcuts
467 | *.lnk
468 |
469 | # End of https://www.toptal.com/developers/gitignore/api/python,jupyternotebooks,vim,visualstudiocode,pycharm,emacs,linux,macos,windows
470 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "[]"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright [yyyy] [name of copyright owner]
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/xarray_sentinel/resources/sentinel1/my-safe-sentinel-1.0.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 |
8 |
9 |
10 |
11 |
12 |
13 |
14 |
15 |
16 |
17 |
18 |
19 |
20 |
21 |
22 |
23 |
24 |
25 |
26 |
27 |
28 |
29 |
30 |
31 |
32 |
33 |
34 |
35 |
36 |
37 |
38 |
39 |
40 |
41 |
42 |
43 |
44 |
45 |
46 |
47 |
48 |
49 |
50 |
51 |
52 |
53 |
54 |
55 |
56 |
57 |
58 |
59 |
60 |
61 |
62 |
63 |
64 |
65 |
66 |
67 |
68 |
69 |
70 |
71 |
72 |
73 |
74 |
75 |
76 |
77 |
78 |
79 |
80 |
81 |
82 |
83 |
84 |
85 |
86 |
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 |
95 |
96 |
97 |
98 |
99 |
100 |
101 |
102 |
103 |
104 |
105 |
106 |
107 |
108 |
109 |
110 |
111 |
112 |
113 |
114 |
115 |
116 |
117 |
118 |
119 |
120 |
121 |
122 |
123 |
124 |
125 |
126 |
127 |
128 |
129 |
130 |
131 |
132 |
133 |
134 |
135 |
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 |
148 |
149 |
150 |
151 |
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
160 |
161 |
162 |
163 |
164 |
165 |
166 |
167 |
168 |
169 |
170 |
171 |
172 |
173 |
174 |
175 |
176 |
177 |
178 |
179 |
180 |
181 |
182 |
183 |
184 |
185 |
186 |
187 |
188 |
189 |
190 |
191 |
192 |
193 |
194 |
195 |
196 |
197 |
198 |
199 |
200 |
201 |
202 |
203 |
204 |
205 |
206 |
207 |
208 |
209 |
210 |
211 |
212 |
213 |
214 |
215 |
216 |
217 |
218 |
219 |
220 |
221 |
222 |
223 |
224 |
225 |
226 |
227 |
228 |
229 |
230 |
231 |
232 |
233 |
--------------------------------------------------------------------------------
/xarray_sentinel/resources/sentinel1/s1-level-1-rfi.xsd:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
5 |
6 |
7 | RFI report associated to a noise sequence.
8 |
9 |
10 |
11 |
12 | Swath to which the RFI report applies.
13 |
14 |
15 |
16 |
17 | Sensing time of the noise sequence [UTC].
18 |
19 |
20 |
21 |
22 | True if RFI was detected in the noise sequence.
23 |
24 |
25 |
26 |
27 | Maximum value of the KL divergence statisatic in the noise sequence.
28 |
29 |
30 |
31 |
32 | Maximum value of Fisher's Z statistic in the noise sequence.
33 |
34 |
35 |
36 |
37 | Maximum power spectral density recorded in a frequency bin containing RFI. It is zero if no RFI was found.
38 |
39 |
40 |
41 |
42 |
43 |
44 | List of reports of the RFI detections performed on noise sequences.
45 |
46 |
47 |
48 |
49 | This record contains an RFI detection report associated to a noise sequence.
50 |
51 |
52 |
53 |
54 |
55 | Number of rfiDetectionFromNoiseReport records within the list.
56 |
57 |
58 |
59 |
60 |
61 | Time domain RFI report.
62 |
63 |
64 |
65 |
66 | Percentage of level-0 lines affected by RFI.
67 |
68 |
69 |
70 |
71 | Average percentage of affected level-0 samples in the lines containing RFI.
72 |
73 |
74 |
75 |
76 | Maximum percentage of level-0 samples affected by RFI in the same line.
77 |
78 |
79 |
80 |
81 |
82 |
83 | Frequency domain RFI report at burst level (isolated RFIs).
84 |
85 |
86 |
87 |
88 | Percentage of level-0 lines affected by isolated RFI.
89 |
90 |
91 |
92 |
93 | Max percentage of bandwidth affected by isolated RFI in a single line.
94 |
95 |
96 |
97 |
98 |
99 |
100 | Frequency domain RFI report at burst level. Even if the frequency-domain mitigation algorithm subdivides the lines of the burst in multiple sub-blocks, this element only provides an aggregated report.
101 |
102 |
103 |
104 |
105 | Number of sub-blocks in the current burst.
106 |
107 |
108 |
109 |
110 | Number of lines in each sub-block.
111 |
112 |
113 |
114 |
115 | Report about the isolated RFI tones detected.
116 |
117 |
118 |
119 |
120 | Percentage of processing blocks affected by persistent RFI.
121 |
122 |
123 |
124 |
125 | Max percentage bandwidth affected by persistent RFI in a single processing block.
126 |
127 |
128 |
129 |
130 |
131 |
132 | RFI report at burst level. This record contains the time and frequency domain reports for a single TOPSAR burst.
133 |
134 |
135 |
136 |
137 | Swath to which the RFI report applies.
138 |
139 |
140 |
141 |
142 | Zero Doppler azimuth time of the first line of the burst [UTC].
143 |
144 |
145 |
146 |
147 | Ratio between the in-band and out-of-band power of the burst.
148 |
149 |
150 |
151 |
152 | RFI report of the time-domain mitigation.
153 |
154 |
155 |
156 |
157 | RFI report of the frequency-domain mitigation.
158 |
159 |
160 |
161 |
162 |
163 |
164 | RFI report list at burst level.
165 |
166 |
167 |
168 |
169 | RFI report at burst level. This record contains the time and frequency domain reports for a single TOPSAR burst.
170 |
171 |
172 |
173 |
174 |
175 | Number of rfiBurstReport records within the list.
176 |
177 |
178 |
179 |
180 |
181 | Time domain RFI report list at block level.
182 |
183 |
184 |
185 |
186 | Swath to which the RFI report applies.
187 |
188 |
189 |
190 |
191 | Zero Doppler azimuth time of the first line of the processing block [UTC].
192 |
193 |
194 |
195 |
196 | Number of lines in the time-domain block.
197 |
198 |
199 |
200 |
201 | RFI report of the time-domain mitigation.
202 |
203 |
204 |
205 |
206 |
207 |
208 | Time domain RFI report at processing block level.
209 |
210 |
211 |
212 |
213 | RFI time-domain report at block level. This record contains the time domain reports for a single processing block.
214 |
215 |
216 |
217 |
218 |
219 | Number of timeDomainRfiBlockReport records within the list.
220 |
221 |
222 |
223 |
224 |
225 | Frequency domain RFI report at processing block level.
226 |
227 |
228 |
229 |
230 | Number of bins in the frequency axis
231 |
232 |
233 |
234 |
235 | Step in Hertz between frequency axis bins.
236 |
237 |
238 |
239 |
240 | Frequency mask of the persistent RFI found in the block.
241 |
242 |
243 |
244 |
245 |
246 |
247 | Frequency domain RFI report at processing block level.
248 |
249 |
250 |
251 |
252 | Swath to which the RFI report applies.
253 |
254 |
255 |
256 |
257 | Zero Doppler azimuth time of the first line of the processing block [UTC].
258 |
259 |
260 |
261 |
262 | Number of lines in the frequency-domain block.
263 |
264 |
265 |
266 |
267 | Frequency-domain RFI report for the isolated RFIs detected.
268 |
269 |
270 |
271 |
272 | Percentage of bandwidth affected by persistent RFI.
273 |
274 |
275 |
276 |
277 | Frequency mask of the persistent RFI found in the block
278 |
279 |
280 |
281 |
282 |
283 |
284 | Frequency domain RFI report list at processing block level.
285 |
286 |
287 |
288 |
289 | RFI frequency-domain report at block level. This record contains the frequency domain reports for a single processing block.
290 |
291 |
292 |
293 |
294 |
295 | Number of frequencyDomainRfiBlockReport records within the list.
296 |
297 |
298 |
299 |
300 |
301 | CONVERT_FUNC_DEC: SKIP
302 | CONVERT_FUNC_DEF: SKIP
303 | Enumeration of the type of RFI mitigation applied on the product.
304 |
305 |
306 |
307 |
308 |
309 |
310 |
311 |
312 |
313 |
314 | Annotation record for Sentinel-1 level 1 RFI annotations.
315 |
316 |
317 |
318 |
319 | ADS header data set record. This DSR contains information that applies to the entire data set.
320 |
321 |
322 |
323 |
324 | Type of RFI mitigation applied to the data set.
325 |
326 |
327 |
328 |
329 | RFI report list of the RFI detections performed on each noise sequence.
330 |
331 |
332 |
333 |
334 | RFI report list at burst level. This element is a list of rfiBurstReport records that contain the time and freuency domain reports for each burst of the data set. This element can only be present in EW and IW modes if either time-domain or frequency-domain RFI mitigations are performed.
335 |
336 |
337 |
338 |
339 | Time domain RFI report list at processing block level. This element is a list of timeDomainRfiBlockReport records that contain the time domain reports for each block of the data set. This element can only be present in SM and WV modes if time-domain RFI mitigation is performed.
340 |
341 |
342 |
343 |
344 | Frequency domain RFI report list at processing block level. This element is a list of frequencyDomainRfiBlockReport records that contain the frequency domain reports for each block of the data set. This element can only be present in SM and WV modes if frequency-domain RFI mitigation is performed.
345 |
346 |
347 |
348 |
349 |
350 |
351 | Sentinel-1 level 1 Radio Frequency Interference (RFI) product annotations.
352 |
353 |
354 |
355 |
--------------------------------------------------------------------------------