├── .gitignore
├── LICENSE
├── README.md
├── case_study_montblanc
├── estimate_heterosc_spatialcorr.py
└── preprocess_pleiades_spot6_dh.py
├── case_study_npi
└── preprocess_aster_spot5_dh.py
├── dev-requirements.txt
├── environment.yml
└── figures
├── fig_1_patterns_bias_noise.py
├── fig_1_patterns_bias_noise_pdf_type42.py
├── fig_2.pdf
├── fig_2.png
├── fig_3_case_study_map.py
├── fig_3_case_study_map_pdf_type42.py
├── fig_4_heteroscedasticity.py
├── fig_4_heteroscedasticity_pdf_type42.py
├── fig_5_spatial_correlations.py
├── fig_5_spatial_correlations_pdf_type42.py
├── fig_6_example_slope.py
├── fig_6_example_slope_pdf_type42.py
├── fig_7_table_2_example_glacier_volumes.py
├── fig_7_table_2_example_glacier_volumes_pdf_type42.py
├── fig_s10_qqplot_normalfit_montblanc.py
├── fig_s11_vario_estimator_robustness_montblanc.py
├── fig_s12_vario_standardization_montblanc.py
├── fig_s13_pairwise_sampling_random_vs_ring.py
├── fig_s14_table_s2_sensitivity_nb_vgm.py
├── fig_s15_table_s3_s4_sensitivity_form_vgm.py
├── fig_s16_spatial_derivative_approx.py
├── fig_s17_sim_correlated_error_field_montblanc.py
├── fig_s18_sim_assymetry_slope_aspect_errors.py
├── fig_s19_sim_3x3kernel_slope_error.py
├── fig_s1_dem_noise_examples.py
├── fig_s20_sim_curv_influence_slope_errors.py
├── fig_s21_vario_artifical_undulations.py
├── fig_s2_shift_tilt_pleiades_spot6_montblanc.py
├── fig_s3_dh_zdh_pleiades_spot6_montblanc.py
├── fig_s4_slope_curv_pleiades_montblanc.py
├── fig_s5_dh_qcorr_aster_spot5_npi.py
├── fig_s6_heterosc_stereocorr_slope_npi.py
├── fig_s7_heterosc_stereocorr_curv_npi.py
├── fig_s8_interp_2d_hetesc_montblanc.py
└── fig_s9_fit_heterosc_slope_curv.py
/.gitignore:
--------------------------------------------------------------------------------
1 | # Byte-compiled / optimized / DLL files
2 | __pycache__/
3 | *.py[cod]
4 | *$py.class
5 |
6 | # C extensions
7 | *.so
8 |
9 | # Distribution / packaging
10 | .Python
11 | build/
12 | develop-eggs/
13 | dist/
14 | downloads/
15 | eggs/
16 | .eggs/
17 | lib/
18 | lib64/
19 | parts/
20 | sdist/
21 | var/
22 | wheels/
23 | pip-wheel-metadata/
24 | share/python-wheels/
25 | *.egg-info/
26 | .installed.cfg
27 | *.egg
28 | MANIFEST
29 |
30 | # PyInstaller
31 | # Usually these files are written by a python script from a template
32 | # before PyInstaller builds the exe, so as to inject date/other infos into it.
33 | *.manifest
34 | *.spec
35 |
36 | # Installer logs
37 | pip-log.txt
38 | pip-delete-this-directory.txt
39 |
40 | # Unit test / coverage reports
41 | htmlcov/
42 | .tox/
43 | .nox/
44 | .coverage
45 | .coverage.*
46 | .cache
47 | nosetests.xml
48 | coverage.xml
49 | *.cover
50 | *.py,cover
51 | .hypothesis/
52 | .pytest_cache/
53 |
54 | # Translations
55 | *.mo
56 | *.pot
57 |
58 | # Django stuff:
59 | *.log
60 | local_settings.py
61 | db.sqlite3
62 | db.sqlite3-journal
63 |
64 | # Flask stuff:
65 | instance/
66 | .webassets-cache
67 |
68 | # Scrapy stuff:
69 | .scrapy
70 |
71 | # Sphinx documentation
72 | docs/_build/
73 |
74 | # PyBuilder
75 | target/
76 |
77 | # Jupyter Notebook
78 | .ipynb_checkpoints
79 |
80 | # IPython
81 | profile_default/
82 | ipython_config.py
83 |
84 | # pyenv
85 | .python-version
86 |
87 | # pipenv
88 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
89 | # However, in case of collaboration, if having platform-specific dependencies or dependencies
90 | # having no cross-platform support, pipenv may install dependencies that don't work, or not
91 | # install all needed dependencies.
92 | #Pipfile.lock
93 |
94 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow
95 | __pypackages__/
96 |
97 | # Celery stuff
98 | celerybeat-schedule
99 | celerybeat.pid
100 |
101 | # SageMath parsed files
102 | *.sage.py
103 |
104 | # Environments
105 | .env
106 | .venv
107 | env/
108 | venv/
109 | ENV/
110 | env.bak/
111 | venv.bak/
112 |
113 | # Spyder project settings
114 | .spyderproject
115 | .spyproject
116 |
117 | # Rope project settings
118 | .ropeproject
119 |
120 | # mkdocs documentation
121 | /site
122 |
123 | # mypy
124 | .mypy_cache/
125 | .dmypy.json
126 | dmypy.json
127 |
128 | # Pyre type checker
129 | .pyre/
130 |
131 | # Other scripts
132 | other/
133 |
134 | # PyCharm
135 | .idea
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | MIT License
2 |
3 | Copyright (c) 2022 Romain Hugonnet
4 |
5 | Permission is hereby granted, free of charge, to any person obtaining a copy
6 | of this software and associated documentation files (the "Software"), to deal
7 | in the Software without restriction, including without limitation the rights
8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9 | copies of the Software, and to permit persons to whom the Software is
10 | furnished to do so, subject to the following conditions:
11 |
12 | The above copyright notice and this permission notice shall be included in all
13 | copies or substantial portions of the Software.
14 |
15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21 | SOFTWARE.
22 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # dem_error_study
2 |
3 | Code and results of [**Hugonnet et al. (2022), *Uncertainty analysis of digital elevation models by spatial inference from stable terrain***](https://doi.org/10.1109/jstars.2022.3188922). :globe_with_meridians: :mount_fuji:
4 |
5 | The dataset is available at **[https://doi.org/10.5281/zenodo.7298913](https://doi.org/10.5281/zenodo.7298913)**.
6 |
7 | Below a short guide to: perform the uncertainty analysis of your own DEMs, retrieve the case study datasets, reproduce the processing steps with the case studies, reproduce the figures and tables of the paper.
8 |
9 |
10 |
11 | ## Uncertainty analysis of your own data with xDEM
12 |
13 | The methods of this study are implemented in [xDEM](https://github.com/GlacioHack/xdem), in routines that can be used to perform uncertainty analysis of your
14 | own DEM data.
15 |
16 | There are **three basic examples** with:
17 |
18 | - A single-line pipeline to [estimate an elevation error map](https://xdem.readthedocs.io/en/latest/basic_examples/plot_infer_heterosc.html#sphx-glr-basic-examples-plot-infer-heterosc-py),
19 | - A single-line pipeline to [estimate the spatial correlation of errors](https://xdem.readthedocs.io/en/latest/basic_examples/plot_infer_spatial_correlation.html#sphx-glr-basic-examples-plot-infer-spatial-correlation-py),
20 | - A single-line pipeline to [propagate elevation errors spatially](https://xdem.readthedocs.io/en/latest/basic_examples/plot_spatial_error_propagation.html#sphx-glr-basic-examples-plot-spatial-error-propagation-py).
21 |
22 | Additionally, there are **three advanced examples** with:
23 |
24 | - The detailed steps to [estimate and model the heteroscedasticity of elevation errors](https://xdem.readthedocs.io/en/latest/advanced_examples/plot_heterosc_estimation_modelling.html#sphx-glr-advanced-examples-plot-heterosc-estimation-modelling-py),
25 | - The detailed steps to [estimate and model the spatial correlation of elevation errors](https://xdem.readthedocs.io/en/latest/advanced_examples/plot_variogram_estimation_modelling.html#sphx-glr-advanced-examples-plot-variogram-estimation-modelling-py),
26 | - The detailed steps to [standardize elevation differences to use stable terrain as a proxy](https://xdem.readthedocs.io/en/latest/advanced_examples/plot_standardization.html#sphx-glr-advanced-examples-plot-standardization-py).
27 |
28 | The full documentation of xDEM can be found at [https://xdem.readthedocs.io/](https://xdem.readthedocs.io/).
29 |
30 | ***Note at the date of 07.11.22:** xDEM is still in development (version 0.0.7), and its documentation in
31 | construction. Compared to version 0.0.6 used in this repository, several changes were added to xDEM late 2022,
32 | including:*
33 |
34 | - *Construction of error pipelines that combines all steps,*
35 | - *Addition of existing gallery examples and streamlining of existing ones,*
36 | - *Minor fixes and improvements of routines.*
37 |
38 |
39 | ## Retrieve the case study datasets
40 |
41 | The dataset consists of:
42 | 1. **Nearly-simultaneous Pléiades–SPOT-6 elevation differences at the Mont-Blanc massif, the Pléiades DEM used as a
43 | reference for alignment and deriving terrain attributes, the SPOT-6 DEM** (.tif,
44 | *~1 GB*) at
45 | 5 m posting and the **forest mask derived from ESA CCI** (.shp, *~5 kB*) available at
46 | [https://doi.org/10.5281/zenodo.7298913](https://doi.org/10.5281/zenodo.7298913).
47 | 2. **Nearly-simultaneous ASTER–SPOT-5 elevation differences at the Northern Patagonian Icefield, the ASTER DEM
48 | used as a reference, the quality of stereo-correlation out of MicMac, and the SPOT-5 DEM** at 30 m
49 | posting (.tif, *~150 MB*) available at [https://doi.org/10.5281/zenodo.7298913](https://doi.org/10.5281/zenodo.7298913).
50 |
51 |
52 | ## Reproduce the processing steps with the case studies
53 |
54 | ### Setup environment
55 |
56 | Most scripts rely on the code assembled in the package [xDEM](https://github.com/GlacioHack/xdem) which in turns relies on [SciKit-GStat](https://github.com/mmaelicke/scikit-gstat).
57 | Some routines also rely on [GSTools](https://github.com/GeoStat-Framework/GSTools). You can rapidly
58 | install a working environment containing those packages and their dependencies with the
59 | *environment.yml* file, located at the root of the repository, using [conda](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html):
60 |
61 | ```sh
62 | conda env create -f environment.yml
63 | ```
64 |
65 | *Note: Due to continuous development changes, xDEM is set to v0.0.6 in the environment file to exactly reproduce the
66 | processing steps of the paper.*
67 |
68 | ### How to use
69 |
70 | Scripts for reproducing the processing steps are located in *case_study_montblanc/* or *case_study_npi/*. Those are generally quite short as they use one-liner routines of xDEM.
71 | Some computations (e.g., simulation of correlated field) are performed only in the *figures/* scripts. In the future, those might be integrated in xDEM.
72 |
73 | While all scripts are commented, the details on the functions used are available through **[xDEM's documentation](https://xdem.readthedocs.io/)**,
74 | **[SciKit-GStat's documentation](https://mmaelicke.github.io/scikit-gstat/)** and **[GSTools's documentation](https://geostat-framework.readthedocs.io/projects/gstools/en/stable/)**.
75 |
76 |
77 | ## Reproduce the figures and tables of the paper
78 |
79 | Scripts for reproducing the figures and tables are located in *figures/*. These scripts also depend on the environment
80 | file `environment.yml` as they rely on [Cartopy](https://scitools.org.uk/cartopy/docs/latest/) and
81 | [Seaborn](https://seaborn.pydata.org/) in addition to Matplotlib. In some occasions, the figure scripts duplicate the
82 | processing steps done in *case_study_montblanc/* or *case_study_npi/* for plotting purposes (e.g., violin plots require
83 | the full distribution of samples, not only the binned estimates of dispersion).
84 |
85 | For plotting figures of your own data, xDEM provides simpler plotting tools of binned data and variograms
86 | (see [example gallery](https://xdem.readthedocs.io/en/latest/basic_examples/index.html)).
87 |
88 |
89 | Enjoy! :volcano:
90 |
--------------------------------------------------------------------------------
/case_study_montblanc/estimate_heterosc_spatialcorr.py:
--------------------------------------------------------------------------------
1 | """Estimate heteroscedasticity and spatial correlation of errors for the Mont-Blanc case study"""
2 | import numpy as np
3 | import pandas as pd
4 | import xdem
5 | import geoutils as gu
6 |
7 | # Open files
8 | fn_ddem = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_Pleiades-SPOT6_Mont-Blanc_NK_Deramp.tif'
9 | fn_pleiades = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
10 | fn_shp = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_merged/11_rgi60_CentralEurope/11_rgi60_CentralEurope.shp'
11 | fn_forest = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/outlines/forest_Mont-Blanc_ESACCI_delainey.shp'
12 |
13 | # Custom mask to see more terrain on Fig. 5 panel b
14 | # fn_forest = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/outlines/forest_Mt-Blanc.shp'
15 |
16 | # Files on server
17 | # fn_ddem = '/data/icesat/travail_en_cours/romain/dem_error_study/final_run/dh_Pleiades-SPOT6_Mont-Blanc_NK_Deramp.tif'
18 | # fn_pleiades = '/data/icesat/travail_en_cours/romain/dem_error_study/final_run/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
19 | # fn_shp = '/data/icesat/travail_en_cours/romain/data/outlines/rgi60/00_rgi60_neighb_merged/11_rgi60_CentralEurope/11_rgi60_CentralEurope.shp'
20 | # fn_forest = '/data/icesat/travail_en_cours/romain/dem_error_study/final_run/outlines/forest_Mont-Blanc_ESACCI_delainey.shp'
21 |
22 |
23 | pleia_ddem = gu.Raster(fn_ddem)
24 | ref_dem = gu.Raster(fn_pleiades)
25 | glaciers_outlines = gu.Vector(fn_shp)
26 | forest_outlines = gu.Vector(fn_forest)
27 | mask_glacier = glaciers_outlines.create_mask(pleia_ddem)
28 | mask_forest = forest_outlines.create_mask(pleia_ddem)
29 |
30 | # Remove forest, very large outliers
31 | pleia_ddem.data[mask_forest] = np.nan
32 | pleia_ddem.data[np.abs(pleia_ddem.data)>500] = np.nan
33 |
34 | slope, planc, profc = xdem.terrain.get_terrain_attribute(ref_dem, attribute=['slope', 'planform_curvature',
35 | 'profile_curvature'])
36 | maxabsc = np.maximum(np.abs(planc), np.abs(profc))
37 |
38 | # 0/ Filter large outliers per category of slope and curvature
39 | bins_slope = [0, 2.5, 5, 10, 15, 20, 30, 40, 50, 70, 90]
40 | bins_curv = [0, 0.2, 0.5, 1, 2, 3, 4, 6, 10, 20, 50]
41 | for i in range(len(bins_slope) - 1):
42 | # Subset by slope category
43 | subset = np.logical_and(slope.data >= bins_slope[i], slope.data < bins_slope[i + 1])
44 | dh_sub = pleia_ddem.data[subset]
45 | # Remove very large outliers of the category
46 | med_sub = np.nanmedian(dh_sub)
47 | nmad_sub = xdem.spatialstats.nmad(dh_sub)
48 | # Remove outliers
49 | pleia_ddem.data[np.logical_and(subset, np.abs(pleia_ddem.data-med_sub) > 7 * nmad_sub)] = np.nan
50 | for i in range(len(bins_curv) - 1):
51 | # Subset by curvature category
52 | subset = np.logical_and(maxabsc >= bins_curv[i], maxabsc < bins_curv[i + 1])
53 | dh_sub = pleia_ddem.data[subset]
54 | # Remove very large outliers of the category
55 | med_sub = np.nanmedian(dh_sub)
56 | nmad_sub = xdem.spatialstats.nmad(dh_sub)
57 | # Remove outliers
58 | pleia_ddem.data[np.logical_and(subset, np.abs(pleia_ddem.data-med_sub) > 7 * nmad_sub)] = np.nan
59 |
60 |
61 | # Subsample on stable terrain
62 | pleia_ddem_sta = pleia_ddem.data[~mask_glacier]
63 | slope_sta = slope.data[~mask_glacier]
64 | maxabsc_sta = maxabsc[~mask_glacier]
65 |
66 | # 1/ Estimate heteroscedasticity with slope and maximum curvature
67 | df_sub = xdem.spatialstats.nd_binning(pleia_ddem_sta, list_var=[slope_sta, maxabsc_sta], list_var_names=['slope', 'maxc'], list_var_bins=(bins_slope, bins_curv))
68 | df_sub['slope_mid'] = pd.IntervalIndex(df_sub.slope).mid.values
69 | df_sub['maxc_mid'] = pd.IntervalIndex(df_sub.maxc).mid.values
70 |
71 | # Save binned estimates to file
72 | # df_sub.to_csv('/data/icesat/travail_en_cours/romain/dem_error_study/final_run/df_heteroscedas_slope_curv.csv')
73 |
74 | # Interpolate with N-D binning
75 | fn = xdem.spatialstats.interp_nd_binning(df_sub, list_var_names=['slope', 'maxc'], statistic='nmad', min_count=30)
76 |
77 | # Create an error map, filter outliers, and standardize elevation differences
78 | maxabsc[maxabsc>50] = 50
79 | dh_err = fn((slope.data, maxabsc))
80 | std_dh = pleia_ddem.data.data/dh_err
81 | std_dh[np.abs(std_dh)>7*xdem.spatialstats.nmad(std_dh)] = np.nan
82 | std_dh /= xdem.spatialstats.nmad(std_dh)
83 | std_r = pleia_ddem.copy(new_array=std_dh)
84 |
85 | del dh_err, maxabsc, slope, planc, profc, slope_sta, maxabsc_sta, pleia_ddem_sta
86 | # Save standardized elevation difference map to file
87 | std_r.save('/data/icesat/travail_en_cours/romain/dem_error_study/final_run/dh_NK_Deramp_std.tif')
88 | # This one is saved with a custom mask to see more terrain Fig. 5
89 | # std_r.save('/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_NK_Deramp_std_customforest.tif')
90 |
91 | # Subsample on glacier terrain
92 | pleia_ddem_gla = pleia_ddem.copy()
93 | pleia_ddem_gla.data[~mask_glacier] = np.nan
94 | pleia_ddem.data.data[mask_glacier] = np.nan
95 |
96 | std_dh_gla = np.copy(std_dh)
97 | std_dh_gla[~mask_glacier] = np.nan
98 | std_dh[mask_glacier] = np.nan
99 |
100 | # 2/ Run patches method with a fixed random state for validation
101 | # areas_emp = [100 * 2 ** i for i in range(18)]
102 | # list_stderr_empirical, list_stderr_empirical_gla = ([] for i in range(2))
103 | # for area_emp in areas_emp:
104 | #
105 | # print('Working on patches for area size: '+str(area_emp))
106 | #
107 | # # First, sample intensively circular patches of a given area, and derive the mean elevation differences
108 | # df_patches = xdem.spatialstats.patches_method(std_dh.squeeze(), gsd=pleia_ddem.res[0], area=area_emp, n_patches=1000, random_state=42,
109 | # perc_min_valid=80.)
110 | # df_patches_gla = xdem.spatialstats.patches_method(std_dh_gla.squeeze(), gsd=pleia_ddem.res[0], area=area_emp, n_patches=1000, random_state=42,
111 | # perc_min_valid=80.)
112 | #
113 | # # Second, estimate the dispersion of the means of each patch, i.e. the standard error of the mean
114 | # if len(df_patches)>30:
115 | # stderr_empirical = xdem.spatialstats.nmad(df_patches['nanmedian'].values)
116 | # else:
117 | # stderr_empirical = np.nan
118 | # if len(df_patches_gla) > 30:
119 | # stderr_empirical_gla = xdem.spatialstats.nmad(df_patches_gla['nanmedian'].values)
120 | # else:
121 | # stderr_empirical_gla = np.nan
122 | #
123 | # list_stderr_empirical.append(stderr_empirical)
124 | # list_stderr_empirical_gla.append(stderr_empirical_gla)
125 | #
126 | # # Save patches results to file
127 | # df_all_patches = pd.DataFrame()
128 | # df_all_patches = df_all_patches.assign(area=areas_emp, stderr_emp_sta = list_stderr_empirical, stderr_emp_gla = list_stderr_empirical_gla)
129 | # df_all_patches.to_csv('/data/icesat/travail_en_cours/romain/dem_error_study/final_run/df_patches_sta_gla.csv')
130 |
131 | # 3/ Estimate variograms of standardized differences on stable and glacier terrain
132 | print('FIRST VARIO')
133 | df_vgm_sta = xdem.spatialstats.sample_empirical_variogram(pleia_ddem.data.data, pleia_ddem.res[0],
134 | subsample=50, n_variograms=100, runs=20,
135 | estimator='dowd', n_jobs=5, random_state=42, verbose=True)
136 | df_vgm_sta.to_csv('/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_sta.csv')
137 | print('SECOND VARIO')
138 | df_vgm_gla = xdem.spatialstats.sample_empirical_variogram(pleia_ddem_gla.data.data, pleia_ddem.res[0],
139 | subsample=50, n_variograms=100, runs=20,
140 | estimator='dowd', n_jobs=5, random_state=42, verbose=True)
141 | df_vgm_gla.to_csv('/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_gla.csv')
142 | print('THIRD VARIO')
143 | df_std_sta = xdem.spatialstats.sample_empirical_variogram(std_dh, pleia_ddem.res[0],
144 | subsample=50, n_variograms=100, runs=20,
145 | estimator='dowd', n_jobs=5, random_state=42, verbose=True)
146 | df_std_sta.to_csv('/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_std_sta.csv')
147 | print('FOURTH VARIO')
148 | df_std_gla = xdem.spatialstats.sample_empirical_variogram(std_dh_gla, pleia_ddem.res[0],
149 | subsample=50, n_variograms=100, runs=20,
150 | estimator='dowd', n_jobs=5, random_state=42, verbose=True)
151 | df_std_gla.to_csv('/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_std_gla.csv')
152 |
153 | # Save variograms to files
154 |
155 | # On server
156 | # df_vgm_sta.to_csv('/data/icesat/travail_en_cours/romain/dem_error_study/final_run/df_vgm_sta.csv')
157 | # df_vgm_gla.to_csv('/data/icesat/travail_en_cours/romain/dem_error_study/final_run/df_vgm_gla.csv')
158 | # df_std_sta.to_csv('/data/icesat/travail_en_cours/romain/dem_error_study/final_run/df_vgm_std_sta.csv')
159 | # df_std_gla.to_csv('/data/icesat/travail_en_cours/romain/dem_error_study/final_run/df_vgm_std_gla.csv')
160 |
--------------------------------------------------------------------------------
/case_study_montblanc/preprocess_pleiades_spot6_dh.py:
--------------------------------------------------------------------------------
1 | """Pre-process Pléiades and SPOT-6 DEMs into elevation changes for the Mont-Blanc case study"""
2 | import os
3 | import warnings
4 | with warnings.catch_warnings():
5 | warnings.filterwarnings("ignore",category=DeprecationWarning)
6 | import xdem
7 | import geoutils as gu
8 | import numpy as np
9 |
10 | # Open DEMs and outlines
11 | fn_glacier_outlines = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_renamed/11_rgi60_CentralEurope/region_11_rgi60_CentralEurope.shp'
12 | fn_dem_spot = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/SPOT6_Mont-Blanc_2017-10-24_DEM_5m.tif'
13 | fn_dem_pleiades = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
14 | fn_forest_shp_simplified='/home/atom/ongoing/work_stderr_dem/case_study_montblanc/outlines/forest_Mont-Blanc_ESACCI_delainey.shp'
15 |
16 | dem_spot = xdem.DEM(fn_dem_spot)
17 | dem_pleiades = xdem.DEM(fn_dem_pleiades)
18 | glacier_outlines = gu.Vector(fn_glacier_outlines)
19 | forest_outlines = gu.Vector(fn_forest_shp_simplified)
20 |
21 | # Reproject on the Pléiades DEM
22 | reproj_dem_spot = dem_spot.reproject(dem_pleiades)
23 | init_dh = reproj_dem_spot.data - dem_pleiades.data
24 |
25 | # Create mask of inlier data
26 | inlier_mask = np.logical_and.reduce((~glacier_outlines.create_mask(dem_pleiades),
27 | ~forest_outlines.create_mask(dem_pleiades),
28 | np.abs(init_dh - np.nanmedian(init_dh))<3*xdem.spatialstats.nmad(init_dh)))
29 |
30 | # Coregistration pipeline with horizontal alignment, tilt, and second horizontal alignment
31 | nk_deramp = xdem.coreg.NuthKaab() + xdem.coreg.Deramp() + xdem.coreg.NuthKaab()
32 | nk_deramp.fit(dem_pleiades, reproj_dem_spot, inlier_mask=inlier_mask, verbose=True)
33 | aligned_dem_spot = nk_deramp.apply(reproj_dem_spot)
34 |
35 | # Save co-registered elevation differences to file
36 | dh = dem_pleiades - aligned_dem_spot
37 | dh.save('/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_Pleiades-SPOT6_Mont-Blanc_NK_Deramp.tif')
38 |
39 | # For Figure S2: saving independent steps of co-registration
40 | nk = xdem.coreg.NuthKaab()
41 | nk.fit(reproj_dem_spot, dem_pleiades, inlier_mask=inlier_mask, verbose=True)
42 | aligned_dem_pleiades = nk.apply(dem_pleiades)
43 |
44 | dh_nk = aligned_dem_pleiades - dem_pleiades
45 | fn_nk = os.path.join(os.path.dirname(fn_dem_pleiades), 'dh_shift_nk_Pleiades.tif')
46 | dh_nk.save(fn_nk)
47 |
48 | deramp = xdem.coreg.Deramp()
49 | deramp.fit(reproj_dem_spot, aligned_dem_pleiades, inlier_mask=inlier_mask, verbose=True)
50 | deramped_dem_pleiades = deramp.apply(aligned_dem_pleiades)
51 |
52 | dh_deramp = deramped_dem_pleiades - aligned_dem_pleiades
53 | fn_deramp = os.path.join(os.path.dirname(fn_dem_pleiades), 'dh_shift_deramp_Pleiades.tif')
54 | dh_deramp.save(fn_deramp)
55 |
--------------------------------------------------------------------------------
/case_study_npi/preprocess_aster_spot5_dh.py:
--------------------------------------------------------------------------------
1 | """Pre-process ASTER and SPOT-5 DEMs into elevation changes for the NPI case study"""
2 | import warnings
3 | with warnings.catch_warnings():
4 | warnings.filterwarnings("ignore",category=DeprecationWarning)
5 | import xdem
6 | import geoutils as gu
7 | import numpy as np
8 | import matplotlib.pyplot as plt
9 |
10 | # Open DEMs and outlines
11 | fn_glacier_outlines = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_merged/17_rgi60_SouthernAndes/17_rgi60_SouthernAndes.shp'
12 | fn_dem_spot = '/home/atom/ongoing/work_stderr_dem/case_study_npi/SPOT5_2012-03-18_NPI_NDV.tif'
13 | fn_dem_aster = '/home/atom/ongoing/work_stderr_dem/case_study_npi/AST_L1A_00303182012144228/ASTER_NPI_00303182012144228_Z.tif'
14 | fn_corr='/home/atom/ongoing/work_stderr_dem/case_study_npi/AST_L1A_00303182012144228/ASTER_NPI_00303182012144228_CORR.tif'
15 |
16 | dem_spot = gu.Raster(fn_dem_spot)
17 | dem_aster = xdem.DEM(fn_dem_aster)
18 | glacier_outlines = gu.Vector(fn_glacier_outlines)
19 |
20 | # Reproject on the ASTER DEM
21 | reproj_dem_spot = dem_spot.reproject(dem_aster)
22 | init_dh = reproj_dem_spot - dem_aster
23 |
24 | # Open quality of stereo-correlation raster, and remove low corrections for alignment
25 | corr = gu.Raster(fn_corr)
26 | mask_highcorr = corr.data>70.
27 | mask_noglacier = ~glacier_outlines.create_mask(dem_aster)
28 | mask_nooutliers = np.abs(init_dh.data - np.nanmedian(init_dh.data))<3*xdem.spatialstats.nmad(init_dh.data)
29 |
30 | # Create inlier mask
31 | inlier_mask = np.logical_and.reduce((mask_noglacier, mask_nooutliers, mask_highcorr))
32 |
33 | # Coregistration pipeline with horizontal alignment, tilt, and second horizontal alignment
34 | nk_deramp = xdem.coreg.NuthKaab() + xdem.coreg.Deramp() + xdem.coreg.NuthKaab()
35 | nk_deramp.fit(dem_aster, reproj_dem_spot, inlier_mask=inlier_mask, verbose=True)
36 | aligned_dem_spot = nk_deramp.apply(reproj_dem_spot)
37 |
38 | # Save co-registered elevation differences to file
39 | dh = dem_aster - aligned_dem_spot
40 | dh.save('/home/atom/ongoing/work_stderr_dem/case_study_npi/dh_ASTER-SPOT5_NPI_NK_Deramp.tif')
--------------------------------------------------------------------------------
/dev-requirements.txt:
--------------------------------------------------------------------------------
1 | xdem
2 | cartopy
3 | seaborn
4 | gstools
5 |
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | name: dem-error
2 | channels:
3 | - conda-forge
4 | dependencies:
5 | - xdem=0.0.6
6 | - cartopy
7 | - seaborn
8 | - gstools
9 |
--------------------------------------------------------------------------------
/figures/fig_1_patterns_bias_noise.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure 1: patterns of random and systematic errors in DEMs"""
2 | import matplotlib.pyplot as plt
3 | import matplotlib.patches as mpatches
4 | import matplotlib.colors as colors
5 | import numpy as np
6 | import cartopy.crs as ccrs
7 | import geoutils as gu
8 | import xdem
9 |
10 | # Showing dh before and after alignment with hillshade
11 | fn_hs = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Mont-Blanc_2017-10-25_DEM_5m_hillshade.tif'
12 | fn_dh_nk = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_shift_nk_Pleiades.tif'
13 | fn_dh_final_dh = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_Pleiades-SPOT6_Mont-Blanc_NK_Deramp.tif'
14 |
15 | # Showing patterns of noise
16 | fn_dh_pleiades_noise = '/home/atom/ongoing/work_stderr_dem/noise_examples/artefact_dh/dh_Peru_2017-09-01_PHR_vs_2017-08-20_PHR.tif'
17 | fn_dh_kh9_noise = '/home/atom/ongoing/work_stderr_dem/noise_examples/artefact_dh/DZB1212-500129_003_004-DEM_coreg-diff_utm.tif'
18 |
19 | crop_ext = [338680, 5086760, 340680, 5087460]
20 |
21 | fig = plt.figure(figsize=(6, 5.5))
22 |
23 | # 1/ Plot the hillshade of panel a
24 |
25 | ax = fig.add_axes([0.25,0.79,0.5,0.2],
26 | projection=ccrs.UTM(32), label='Hillshade')
27 |
28 | hs = gu.Raster(fn_hs)
29 | hs.crop(crop_ext)
30 | plt_extent=[hs.bounds.left, hs.bounds.right, hs.bounds.bottom, hs.bounds.top]
31 |
32 | color1 = colors.to_rgba('black')
33 | color2 = colors.to_rgba('white')
34 | cmap_ll = colors.LinearSegmentedColormap.from_list('my_cmap_hs', [color1, color2], 256)
35 | cmap_ll.set_bad(color='None')
36 |
37 | ax.imshow(hs.data[0, :, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap_ll,
38 | interpolation=None, zorder=2)
39 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
40 | ax.text(-0.1, 0.5, 'a', transform=ax.transAxes, ha='left', va='center', fontweight='bold', fontsize=14)
41 |
42 | y_extent = hs.bounds.top - hs.bounds.bottom
43 | x_extent = hs.bounds.right - hs.bounds.left
44 | ax.add_patch(mpatches.Rectangle((crop_ext[2] - x_extent/20 - 400, crop_ext[1] + y_extent/5),200, 30,
45 | edgecolor='black',facecolor='black',transform=ccrs.UTM(32),zorder=10,linewidth=0.5))
46 | ax.add_patch(mpatches.Rectangle((crop_ext[2] - x_extent/20 - 200, crop_ext[1] + y_extent/5),200, 30,
47 | edgecolor='black',facecolor='white',transform=ccrs.UTM(32),zorder=10,linewidth=0.5))
48 | ax.text(crop_ext[2] - x_extent/20 - 400, crop_ext[1] + y_extent/5 - 10,'0',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
49 | ax.text(crop_ext[2] - x_extent/20 - 200, crop_ext[1] + y_extent/5 - 10,'200',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
50 | ax.text(crop_ext[2] - x_extent/20 - 200, crop_ext[1] + y_extent/5 - 70,'m',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
51 | ax.text(crop_ext[2] - x_extent/20 - 0, crop_ext[1] + y_extent/5 - 10,'400',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
52 |
53 |
54 | # 2/ Plot the horizontal shift biases of panel a
55 |
56 | ax = fig.add_axes([0,0.55,0.5,0.2],
57 | projection=ccrs.UTM(32), label='Biases')
58 |
59 |
60 | dh = gu.Raster(fn_dh_nk)
61 | dh.crop(crop_ext)
62 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh)
63 |
64 | cmap = plt.get_cmap('RdYlBu').copy()
65 | cmap.set_bad(color='None')
66 |
67 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
68 | interpolation=None, zorder=2, vmin=-21, vmax=-1)
69 | ax.text(0.5, 1.02, 'Horizontal shift biases', transform=ax.transAxes, ha='center', va='bottom', fontweight='bold')
70 |
71 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
72 |
73 | # 3/ Plot the residuals after correcting horizontal shift
74 |
75 | ax = fig.add_axes([0.5,0.55,0.5,0.2],
76 | projection=ccrs.UTM(32), label='Heterosc')
77 |
78 |
79 | dh_final = gu.Raster(fn_dh_final_dh)
80 | dh_final.crop(crop_ext)
81 | plt_extent=[dh_final.bounds.left, dh_final.bounds.right, dh_final.bounds.bottom, dh_final.bounds.top]
82 |
83 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh_final)
84 | cmap = plt.get_cmap('RdYlBu').copy()
85 | cmap.set_bad(color='None')
86 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
87 | interpolation=None, zorder=2, vmin=-10, vmax=10)
88 |
89 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
90 | ax.text(0.5, 1.02, 'After alignment', transform=ax.transAxes, ha='center', va='bottom', fontweight='bold')
91 |
92 |
93 | cbaxes = ax.inset_axes([-0.25, -0.125, 0.5, 0.075], zorder=10)
94 |
95 | norm = colors.Normalize(vmin=-10, vmax=10)
96 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
97 | sm.set_array([])
98 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-10, -5, 0, 5, 10], orientation='horizontal', extend='both', shrink=0.2)
99 | cb.set_label('Elevation difference (m)')
100 |
101 |
102 | # 4/ Plot the along-track undulation noise from a Pléiades/Pléiades difference
103 |
104 | ax = fig.add_axes([0.05,0.05,0.4,0.425],
105 | projection=ccrs.UTM(19), label='Noise Pléiades')
106 |
107 | dh_pleiades = gu.Raster(fn_dh_pleiades_noise)
108 | crop_ext_pleiades = [297000, 8455000, 309000, 8463000]
109 | dh_pleiades.crop(crop_ext_pleiades)
110 |
111 | plt_extent=[dh_pleiades.bounds.left, dh_pleiades.bounds.right, dh_pleiades.bounds.bottom, dh_pleiades.bounds.top]
112 |
113 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh_pleiades)
114 | hs_arr[np.abs(hs_arr)>5] = np.nan
115 | cmap = plt.get_cmap('RdYlBu').copy()
116 | cmap.set_bad(color='None')
117 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(19), cmap=cmap,
118 | interpolation=None, zorder=2, vmin=-1, vmax=1)
119 |
120 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
121 | ax.text(-0.1, 1.1, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
122 | ax.text(0.5, 1.02, 'Along-track undulations', transform=ax.transAxes, ha='center', va='bottom', fontweight='bold')
123 |
124 | y_extent = dh_pleiades.bounds.top - dh_pleiades.bounds.bottom
125 | x_extent = dh_pleiades.bounds.right - dh_pleiades.bounds.left
126 | ax.add_patch(mpatches.Rectangle((crop_ext_pleiades[2] - x_extent/20 - 2000, crop_ext_pleiades[1] + y_extent/7),1000, 150,
127 | edgecolor='black',facecolor='black',transform=ccrs.UTM(19),zorder=10,linewidth=0.5))
128 | ax.add_patch(mpatches.Rectangle((crop_ext_pleiades[2] - x_extent/20 - 1000, crop_ext_pleiades[1] + y_extent/7),1000, 150,
129 | edgecolor='black',facecolor='white',transform=ccrs.UTM(19),zorder=10,linewidth=0.5))
130 | ax.text(crop_ext_pleiades[2] - x_extent/20 - 2000, crop_ext_pleiades[1] + y_extent/7 - 100,'0',ha='center',va='top',transform=ccrs.UTM(19),zorder=10)
131 | ax.text(crop_ext_pleiades[2] - x_extent/20 - 1000, crop_ext_pleiades[1] + y_extent/7 - 100,'1',ha='center',va='top',transform=ccrs.UTM(19),zorder=10)
132 | ax.text(crop_ext_pleiades[2] - x_extent/20 - 1000, crop_ext_pleiades[1] + y_extent/7 - 600,'km',ha='center',va='top',transform=ccrs.UTM(19),zorder=10)
133 | ax.text(crop_ext_pleiades[2] - x_extent/20 - 0, crop_ext_pleiades[1] + y_extent/7 - 100,'2',ha='center',va='top',transform=ccrs.UTM(19),zorder=10)
134 |
135 |
136 | cbaxes = ax.inset_axes([0.2, -0.075, 0.6, 0.05], zorder=10)
137 |
138 | norm = colors.Normalize(vmin=-1, vmax=1)
139 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
140 | sm.set_array([])
141 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-1, -0.5, 0, 0.5, 1], orientation='horizontal', extend='both', shrink=0.2)
142 | cb.set_label('Elevation difference (m)')
143 |
144 | # 5/ Plot the digitization artefacts from KH-9 imagery
145 |
146 | ax = fig.add_axes([0.55,0.05,0.4,0.425],
147 | projection=ccrs.UTM(6), label='KH9')
148 |
149 | crop_ext_kh9 = [445000, 7634000, 505000, 7672000]
150 |
151 |
152 | dh_kh9 = gu.Raster(fn_dh_kh9_noise)
153 | dh_kh9.crop(crop_ext_kh9)
154 | plt_extent=[dh_kh9.bounds.left, dh_kh9.bounds.right, dh_kh9.bounds.bottom, dh_kh9.bounds.top]
155 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh_kh9)
156 |
157 | cmap = plt.get_cmap('RdYlBu')
158 | cmap.set_bad(color='None')
159 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(6), cmap=cmap,
160 | interpolation=None, zorder=2, vmin=-10, vmax=10)
161 |
162 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
163 | ax.text(-0.1, 1.1, 'c', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
164 | ax.text(0.5, 1.02, 'Digitization artefacts', transform=ax.transAxes, ha='center', va='bottom', fontweight='bold')
165 |
166 | y_extent = dh_kh9.bounds.top - dh_kh9.bounds.bottom
167 | x_extent = dh_kh9.bounds.right - dh_kh9.bounds.left
168 | ax.add_patch(mpatches.Rectangle((crop_ext_kh9[2] - x_extent/20 - 10000, crop_ext_kh9[1] + y_extent/7),5000, 750,
169 | edgecolor='black',facecolor='black',transform=ccrs.UTM(6),zorder=10,linewidth=0.5))
170 | ax.add_patch(mpatches.Rectangle((crop_ext_kh9[2] - x_extent/20 - 5000, crop_ext_kh9[1] + y_extent/7),5000, 750,
171 | edgecolor='black',facecolor='white',transform=ccrs.UTM(6),zorder=10,linewidth=0.5))
172 | ax.text(crop_ext_kh9[2] - x_extent/20 - 10000, crop_ext_kh9[1] + y_extent/7 - 500,'0',ha='center',va='top',transform=ccrs.UTM(6),zorder=10)
173 | ax.text(crop_ext_kh9[2] - x_extent/20 - 5000, crop_ext_kh9[1] + y_extent/7 - 500,'5',ha='center',va='top',transform=ccrs.UTM(6),zorder=10)
174 | ax.text(crop_ext_kh9[2] - x_extent/20 - 5000, crop_ext_kh9[1] + y_extent/7 - 3000,'km',ha='center',va='top',transform=ccrs.UTM(6),zorder=10)
175 | ax.text(crop_ext_kh9[2] - x_extent/20 - 0, crop_ext_kh9[1] + y_extent/7 - 500,'10',ha='center',va='top',transform=ccrs.UTM(6),zorder=10)
176 |
177 |
178 | cbaxes = ax.inset_axes([0.2, -0.075, 0.6, 0.05], zorder=10)
179 |
180 | norm = colors.Normalize(vmin=-10, vmax=10)
181 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
182 | sm.set_array([])
183 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-10, -5, 0, 5, 10], orientation='horizontal', extend='both', shrink=0.2)
184 | cb.set_label('Elevation difference (m)')
185 |
186 | # Save to file
187 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_1_final.png', dpi=400)
--------------------------------------------------------------------------------
/figures/fig_1_patterns_bias_noise_pdf_type42.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure 1: patterns of random and systematic errors in DEMs"""
2 | import matplotlib.pyplot as plt
3 | import matplotlib.patches as mpatches
4 | import matplotlib.colors as colors
5 | import numpy as np
6 | import cartopy.crs as ccrs
7 | import geoutils as gu
8 | import xdem
9 |
10 | plt.rcParams.update({'font.size': 12})
11 | plt.rcParams.update({'pdf.fonttype':42})
12 | plt.rcParams["font.family"] = "Times New Roman"
13 | plt.rcParams["text.usetex"] = True
14 |
15 | # Showing dh before and after alignment with hillshade
16 | fn_hs = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Mont-Blanc_2017-10-25_DEM_5m_hillshade.tif'
17 | fn_dh_nk = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_shift_nk_Pleiades.tif'
18 | fn_dh_final_dh = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_Pleiades-SPOT6_Mont-Blanc_NK_Deramp.tif'
19 |
20 | # Showing patterns of noise
21 | fn_dh_pleiades_noise = '/home/atom/ongoing/work_stderr_dem/noise_examples/artefact_dh/dh_Peru_2017-09-01_PHR_vs_2017-08-20_PHR.tif'
22 | fn_dh_kh9_noise = '/home/atom/ongoing/work_stderr_dem/noise_examples/artefact_dh/DZB1212-500129_003_004-DEM_coreg-diff_utm.tif'
23 |
24 | crop_ext = [338680, 5086760, 340680, 5087460]
25 |
26 | fig = plt.figure(figsize=(6, 5.5))
27 |
28 | # 1/ Plot the hillshade of panel a
29 |
30 | ax = fig.add_axes([0.25,0.8,0.5,0.2],
31 | projection=ccrs.UTM(32), label='Hillshade')
32 |
33 | hs = gu.Raster(fn_hs)
34 | hs.crop(crop_ext)
35 | plt_extent=[hs.bounds.left, hs.bounds.right, hs.bounds.bottom, hs.bounds.top]
36 |
37 | color1 = colors.to_rgba('black')
38 | color2 = colors.to_rgba('white')
39 | cmap_ll = colors.LinearSegmentedColormap.from_list('my_cmap_hs', [color1, color2], 256)
40 | cmap_ll.set_bad(color='None')
41 |
42 | ax.imshow(hs.data[0, :, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap_ll,
43 | interpolation=None, zorder=2)
44 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
45 | ax.text(0.5, -1.76, '(a)', transform=ax.transAxes, ha='center', va='top', fontsize=15)
46 |
47 | y_extent = hs.bounds.top - hs.bounds.bottom
48 | x_extent = hs.bounds.right - hs.bounds.left
49 | ax.add_patch(mpatches.Rectangle((crop_ext[2] - x_extent/20 - 400, crop_ext[1] + y_extent/3.5),200, 30,
50 | edgecolor='black',facecolor='black',transform=ccrs.UTM(32),zorder=10,linewidth=0.5))
51 | ax.add_patch(mpatches.Rectangle((crop_ext[2] - x_extent/20 - 200, crop_ext[1] + y_extent/3.5),200, 30,
52 | edgecolor='black',facecolor='white',transform=ccrs.UTM(32),zorder=10,linewidth=0.5))
53 | ax.text(crop_ext[2] - x_extent/20 - 400, crop_ext[1] + y_extent/3.5 - 30,'0',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
54 | ax.text(crop_ext[2] - x_extent/20 - 200, crop_ext[1] + y_extent/3.5 - 30,'200',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
55 | ax.text(crop_ext[2] - x_extent/20 - 200, crop_ext[1] + y_extent/3.5 - 100,'m',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
56 | ax.text(crop_ext[2] - x_extent/20 - 0, crop_ext[1] + y_extent/3.5 - 30,'400',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
57 |
58 |
59 | # 2/ Plot the horizontal shift biases of panel a
60 |
61 | ax0 = fig.add_axes([0.01,0.56,0.49,0.2],
62 | projection=ccrs.UTM(32), label='Biases')
63 |
64 |
65 | dh = gu.Raster(fn_dh_nk)
66 | dh.crop(crop_ext)
67 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh)
68 |
69 | cmap = plt.get_cmap('RdYlBu').copy()
70 | cmap.set_bad(color='None')
71 |
72 | ax0.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
73 | interpolation=None, zorder=2, vmin=-21, vmax=-1)
74 | ax0.text(0.5, 1.02, '$$\\textbf{Horizontal shift biases}$$', transform=ax0.transAxes, ha='center', va='bottom')
75 |
76 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
77 |
78 | # 3/ Plot the residuals after correcting horizontal shift
79 |
80 | ax1 = fig.add_axes([0.5,0.56,0.49,0.2],
81 | projection=ccrs.UTM(32), label='Heterosc')
82 |
83 |
84 | dh_final = gu.Raster(fn_dh_final_dh)
85 | dh_final.crop(crop_ext)
86 | plt_extent=[dh_final.bounds.left, dh_final.bounds.right, dh_final.bounds.bottom, dh_final.bounds.top]
87 |
88 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh_final)
89 | cmap = plt.get_cmap('RdYlBu').copy()
90 | cmap.set_bad(color='None')
91 | ax1.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
92 | interpolation=None, zorder=2, vmin=-10, vmax=10)
93 |
94 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
95 | ax1.text(0.5, 1.02, '$$\\textbf{After alignment}$$', transform=ax1.transAxes, ha='center', va='bottom')
96 |
97 |
98 | cbaxes = ax1.inset_axes([-0.25, -0.125, 0.5, 0.075], zorder=10)
99 |
100 | norm = colors.Normalize(vmin=-10, vmax=10)
101 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
102 | sm.set_array([])
103 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-10, -5, 0, 5, 10], orientation='horizontal', extend='both', shrink=0.2)
104 | cb.set_label('Elevation difference (m)', labelpad=-0.5)
105 |
106 | ax.add_patch(mpatches.ConnectionPatch(xyA=(0, 1), xyB=(0, 1), coordsA=ax.transAxes, coordsB=ax0.transAxes,
107 | zorder=10, facecolor='black'))
108 | ax.add_patch(mpatches.ConnectionPatch(xyA=(1, 1), xyB=(1, 1), coordsA=ax.transAxes, coordsB=ax1.transAxes,
109 | zorder=10, facecolor='black'))
110 |
111 | # 4/ Plot the along-track undulation noise from a Pléiades/Pléiades difference
112 |
113 | ax = fig.add_axes([0.05,0.07,0.4,0.425],
114 | projection=ccrs.UTM(19), label='Noise Pléiades')
115 |
116 | dh_pleiades = gu.Raster(fn_dh_pleiades_noise)
117 | crop_ext_pleiades = [297000, 8455000, 309000, 8463000]
118 | dh_pleiades.crop(crop_ext_pleiades)
119 |
120 | plt_extent=[dh_pleiades.bounds.left, dh_pleiades.bounds.right, dh_pleiades.bounds.bottom, dh_pleiades.bounds.top]
121 |
122 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh_pleiades)
123 | hs_arr[np.abs(hs_arr)>5] = np.nan
124 | cmap = plt.get_cmap('RdYlBu').copy()
125 | cmap.set_bad(color='None')
126 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(19), cmap=cmap,
127 | interpolation=None, zorder=2, vmin=-1, vmax=1)
128 |
129 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
130 | ax.text(0.5, -0.33, '(b)', transform=ax.transAxes, ha='center', va='top', fontsize=15)
131 | ax.text(0.5, 1.02, '$$\\textbf{Along-track undulations}$$', transform=ax.transAxes, ha='center', va='bottom')
132 |
133 | y_extent = dh_pleiades.bounds.top - dh_pleiades.bounds.bottom
134 | x_extent = dh_pleiades.bounds.right - dh_pleiades.bounds.left
135 | ax.add_patch(mpatches.Rectangle((crop_ext_pleiades[2] - x_extent/20 - 2000, crop_ext_pleiades[1] + y_extent/5),1000, 150,
136 | edgecolor='black',facecolor='black',transform=ccrs.UTM(19),zorder=10,linewidth=0.5))
137 | ax.add_patch(mpatches.Rectangle((crop_ext_pleiades[2] - x_extent/20 - 1000, crop_ext_pleiades[1] + y_extent/5),1000, 150,
138 | edgecolor='black',facecolor='white',transform=ccrs.UTM(19),zorder=10,linewidth=0.5))
139 | ax.text(crop_ext_pleiades[2] - x_extent/20 - 2000, crop_ext_pleiades[1] + y_extent/5 - 100,'0',ha='center',va='top',transform=ccrs.UTM(19),zorder=10)
140 | ax.text(crop_ext_pleiades[2] - x_extent/20 - 1000, crop_ext_pleiades[1] + y_extent/5 - 100,'1',ha='center',va='top',transform=ccrs.UTM(19),zorder=10)
141 | ax.text(crop_ext_pleiades[2] - x_extent/20 - 1000, crop_ext_pleiades[1] + y_extent/5 - 600,'km',ha='center',va='top',transform=ccrs.UTM(19),zorder=10)
142 | ax.text(crop_ext_pleiades[2] - x_extent/20 - 0, crop_ext_pleiades[1] + y_extent/5 - 100,'2',ha='center',va='top',transform=ccrs.UTM(19),zorder=10)
143 |
144 |
145 | cbaxes = ax.inset_axes([0.2, -0.075, 0.6, 0.05], zorder=10)
146 |
147 | norm = colors.Normalize(vmin=-1, vmax=1)
148 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
149 | sm.set_array([])
150 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-1, -0.5, 0, 0.5, 1], orientation='horizontal', extend='both', shrink=0.2)
151 | cb.set_label('Elevation difference (m)', labelpad=-0.5)
152 |
153 | # 5/ Plot the digitization artefacts from KH-9 imagery
154 |
155 | ax = fig.add_axes([0.55,0.07,0.4,0.425],
156 | projection=ccrs.UTM(6), label='KH9')
157 |
158 | crop_ext_kh9 = [445000, 7634000, 505000, 7673500]
159 |
160 |
161 | dh_kh9 = gu.Raster(fn_dh_kh9_noise)
162 | dh_kh9.crop(crop_ext_kh9)
163 | plt_extent=[dh_kh9.bounds.left, dh_kh9.bounds.right, dh_kh9.bounds.bottom, dh_kh9.bounds.top]
164 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh_kh9)
165 |
166 | cmap = plt.get_cmap('RdYlBu')
167 | cmap.set_bad(color='None')
168 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(6), cmap=cmap,
169 | interpolation=None, zorder=2, vmin=-10, vmax=10)
170 |
171 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
172 | ax.text(0.5, -0.34, '(c)', transform=ax.transAxes, ha='center', va='top', fontsize=15)
173 | ax.text(0.5, 1.02, '$$\\textbf{Digitization artefacts}$$', transform=ax.transAxes, ha='center', va='bottom')
174 |
175 | y_extent = dh_kh9.bounds.top - dh_kh9.bounds.bottom
176 | x_extent = dh_kh9.bounds.right - dh_kh9.bounds.left
177 | ax.add_patch(mpatches.Rectangle((crop_ext_kh9[2] - x_extent/20 - 10000, crop_ext_kh9[1] + y_extent/5),5000, 750,
178 | edgecolor='black',facecolor='black',transform=ccrs.UTM(6),zorder=10,linewidth=0.5))
179 | ax.add_patch(mpatches.Rectangle((crop_ext_kh9[2] - x_extent/20 - 5000, crop_ext_kh9[1] + y_extent/5),5000, 750,
180 | edgecolor='black',facecolor='white',transform=ccrs.UTM(6),zorder=10,linewidth=0.5))
181 | ax.text(crop_ext_kh9[2] - x_extent/20 - 10000, crop_ext_kh9[1] + y_extent/5 - 500,'0',ha='center',va='top',transform=ccrs.UTM(6),zorder=10)
182 | ax.text(crop_ext_kh9[2] - x_extent/20 - 5000, crop_ext_kh9[1] + y_extent/5 - 500,'5',ha='center',va='top',transform=ccrs.UTM(6),zorder=10)
183 | ax.text(crop_ext_kh9[2] - x_extent/20 - 5000, crop_ext_kh9[1] + y_extent/5 - 3000,'km',ha='center',va='top',transform=ccrs.UTM(6),zorder=10)
184 | ax.text(crop_ext_kh9[2] - x_extent/20 - 0, crop_ext_kh9[1] + y_extent/5 - 500,'10',ha='center',va='top',transform=ccrs.UTM(6),zorder=10)
185 |
186 |
187 | cbaxes = ax.inset_axes([0.2, -0.075, 0.6, 0.05], zorder=10)
188 |
189 | norm = colors.Normalize(vmin=-10, vmax=10)
190 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
191 | sm.set_array([])
192 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-10, -5, 0, 5, 10], orientation='horizontal', extend='both', shrink=0.2)
193 | cb.set_label('Elevation difference (m)', labelpad=-0.5)
194 |
195 | # Save to file
196 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_1_final.pdf', dpi=400, transparent=True)
--------------------------------------------------------------------------------
/figures/fig_2.pdf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhugonnet/dem_error_study/c022b613f34f8c9bc28ac876df81f55b8e64de1c/figures/fig_2.pdf
--------------------------------------------------------------------------------
/figures/fig_2.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/rhugonnet/dem_error_study/c022b613f34f8c9bc28ac876df81f55b8e64de1c/figures/fig_2.png
--------------------------------------------------------------------------------
/figures/fig_3_case_study_map.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure 3: map of Mont-Blanc case study"""
2 | import matplotlib.pyplot as plt
3 | import matplotlib.patches as mpatches
4 | import matplotlib.colors as colors
5 | import os
6 | import cartopy.crs as ccrs
7 | import geoutils as gu
8 | import pyproj
9 | from cartopy.io.shapereader import Reader
10 | from cartopy.feature import ShapelyFeature
11 | import cartopy.feature as cfeature
12 |
13 | fn_lc = '/home/atom/data/inventory_products/Land_cover/ESACCI-LC-L4-LCCS-Map-300m-P1Y-2015-v2.0.7_robinson.tif'
14 | fn_hs = '/home/atom/documents/paper/Hugonnet_2020/figures/world_robin_rs.tif'
15 | fn_land = '/home/atom/data/inventory_products/NaturalEarth/ne_50m_land/ne_50m_land.shp'
16 | fn_shp = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_merged/11_rgi60_CentralEurope/11_rgi60_CentralEurope.shp'
17 | fn_shp_buff = '/home/atom/data/inventory_products/RGI/buffered/rgi60_buff_diss.shp'
18 | fn_ais = '/home/atom/data/inventory_products/RGI/AIS_mask/ais_glacier_ice_mask_wgs84.shp'
19 | fn_gis = '/home/atom/data/inventory_products/RGI/GIS_mask/GreenlandMasks/Greenland_IceMask_wgs84.shp'
20 | fn_forest_shp_simplified='/home/atom/ongoing/work_stderr_dem/case_study_montblanc/outlines/forest_Mont-Blanc_ESACCI_delainey.shp'
21 |
22 | fn_hs_montblanc = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Mont-Blanc_2017-10-25_DEM_5m_hillshade.tif'
23 |
24 | hs_r = gu.Raster(fn_hs)
25 |
26 | fig = plt.figure(figsize=(6, 7))
27 |
28 | grid = plt.GridSpec(20, 20, wspace=0.1, hspace=0.1)
29 |
30 | # First panel: world map with coverage + inset for Mont-Blanc case study
31 | # sub_ax = fig.add_axes([0,0.1,0.6,0.9],
32 | # projection=ccrs.Robinson(), label='world')
33 | #
34 | # bounds = [-179.99,179.99,-89.99,89.99]
35 | #
36 | # def poly_from_extent(ext):
37 | #
38 | # poly = np.array([(ext[0],ext[2]),(ext[1],ext[2]),(ext[1],ext[3]),(ext[0],ext[3]),(ext[0],ext[2])])
39 | #
40 | # return poly
41 | # polygon = poly_from_extent(bounds)
42 | #
43 | # # Add hillshade
44 | # img = GeoImg(fn_hs)
45 | # land_mask = create_mask_from_shapefile(img,fn_land)
46 | # ds = gdal.Open(fn_hs)
47 | # gt = ds.GetGeoTransform() # Defining bounds
48 | # ext = (gt[0], gt[0] + ds.RasterXSize * gt[1],
49 | # gt[3] + ds.RasterYSize * gt[5], gt[3])
50 | # hs = ds.ReadAsArray()
51 | # hs = hs.astype(float)
52 | # ds = None
53 | #
54 | # def stretch_hs(hs,stretch_factor=1.):
55 | #
56 | # max_hs = 255
57 | # min_hs = 0
58 | #
59 | # hs_s = (hs - (max_hs-min_hs)/2)*stretch_factor + (max_hs-min_hs)/2
60 | #
61 | # return hs_s
62 | #
63 | # hs = stretch_hs(hs,stretch_factor=0.9)
64 | #
65 | # hs_land = hs.copy()
66 | # hs_land[~land_mask]=0
67 | # hs_notland = hs.copy()
68 | # hs_notland[land_mask]=0
69 | #
70 | # hs_tmp = hs_land.copy()
71 | # hs_tmp_nl = hs_notland.copy()
72 | #
73 | # def inter_poly_coords(polygon_coords):
74 | # list_lat_interp = []
75 | # list_lon_interp = []
76 | # for i in range(len(polygon_coords) - 1):
77 | # lon_interp = np.linspace(polygon_coords[i][0], polygon_coords[i + 1][0], 50)
78 | # lat_interp = np.linspace(polygon_coords[i][1], polygon_coords[i + 1][1], 50)
79 | #
80 | # list_lon_interp.append(lon_interp)
81 | # list_lat_interp.append(lat_interp)
82 | #
83 | # all_lon_interp = np.concatenate(list_lon_interp)
84 | # all_lat_interp = np.concatenate(list_lat_interp)
85 | #
86 | # return np.array(list(zip(all_lon_interp, all_lat_interp)))
87 | #
88 | # def out_of_poly_mask(geoimg, poly_coords):
89 | #
90 | # poly = ot.poly_from_coords(inter_poly_coords(poly_coords))
91 | # srs = osr.SpatialReference()
92 | # srs.ImportFromEPSG(4326)
93 | #
94 | # # put in a memory vector
95 | # ds_shp = ot.create_mem_shp(poly, srs)
96 | #
97 | # return ot.geoimg_mask_on_feat_shp_ds(ds_shp, geoimg)
98 | #
99 | # mask = out_of_poly_mask(img, polygon)
100 | #
101 | # hs_tmp[~mask] = 0
102 | # hs_tmp_nl[~mask] = 0
103 | #
104 | # color1 = mpl.colors.to_rgba('black')
105 | # color2 = mpl.colors.to_rgba('white')
106 | # cmap2 = mpl.colors.LinearSegmentedColormap.from_list('my_cmap2', [color1, color2], 256)
107 | # cmap2._init()
108 | # cmap2._lut[0:1, -1] = 0.0 # We made transparent de 10 first levels of hillshade,
109 | # cmap2._lut[1:, -1] = 0.60
110 | #
111 | # cmap22 = mpl.colors.LinearSegmentedColormap.from_list('my_cmap22', [color1, color2], 256)
112 | # cmap22._init()
113 | # cmap22._lut[0:1, -1] = 0.0 # We made transparent de 10 first levels of hillshade,
114 | # cmap22._lut[1:, -1] = 0.3
115 | #
116 | # sc_img = GeoImg(fn_out)
117 | # cmap_sc = mpl.colors.LinearSegmentedColormap.from_list('my_cmap_sc', [color1, color2], 2)
118 | # cmap_sc._init()
119 | # cmap_sc._lut[0:1, -1] = 0
120 | # cmap_sc._lut[1, -1] = 0.9
121 | #
122 | # lc_r = gu.Raster(fn_lc)
123 | # lc_arr, _ = gu.spatial_tools.get_array_and_mask(lc_r)
124 | # forest_mask = np.logical_or(np.logical_and(lc_arr>=50, lc_arr<=90), lc_arr==160, lc_arr==170)
125 | # water_bodies = lc_arr==210
126 | # water_bodies[~land_mask] = False
127 | #
128 | # cmap_forest = mpl.colors.LinearSegmentedColormap.from_list('my_cmap_forest', [ color1, mpl.colors.to_rgba('tab:green')], 2)
129 | # cmap_forest._init()
130 | # cmap_forest._lut[0:1, -1] = 0
131 | # cmap_forest._lut[1, -1] = 0.8
132 | #
133 | # cmap_wb = mpl.colors.LinearSegmentedColormap.from_list('my_cmap_waterbodies', [ color1, mpl.colors.to_rgba('tab:blue')], 2)
134 | # cmap_wb._init()
135 | # cmap_wb._lut[0:1, -1] = 0
136 | # cmap_wb._lut[1, -1] = 0.8
137 | #
138 | #
139 | # shape_feature = ShapelyFeature(Reader(fn_shp_buff).geometries(), ccrs.PlateCarree(), edgecolor='None', alpha=0.5,
140 | # facecolor='tab:cyan', linewidth=0, zorder=6)
141 | # sub_ax.add_feature(shape_feature)
142 | # shape_feature = ShapelyFeature(Reader(fn_ais).geometries(), ccrs.PlateCarree(), edgecolor='None', alpha=0.5,
143 | # facecolor='tab:cyan', linewidth=0, zorder=6)
144 | # sub_ax.add_feature(shape_feature)
145 | # shape_feature = ShapelyFeature(Reader(fn_gis).geometries(), ccrs.PlateCarree(), edgecolor='None', alpha=0.5,
146 | # facecolor='tab:cyan', linewidth=0, zorder=6)
147 | # sub_ax.add_feature(shape_feature)
148 | #
149 | # shape_feature = ShapelyFeature(Reader(fn_studysites).geometries(), ccrs.PlateCarree(), edgecolor='none', alpha=0.45,
150 | # facecolor='tab:orange', linewidth=0, zorder=7)
151 | # sub_ax.add_feature(shape_feature)
152 | # shape_feature = ShapelyFeature(Reader(fn_studysites).geometries(), ccrs.PlateCarree(), edgecolor='tab:orange', alpha=1,
153 | # facecolor='none', linewidth=0.5, zorder=7)
154 | # sub_ax.add_feature(shape_feature)
155 | #
156 | # sub_ax.imshow(hs_tmp[:, :], extent=ext, transform=ccrs.Robinson(), cmap=cmap2, zorder=2, interpolation='nearest',rasterized=True)
157 | # sub_ax.imshow(hs_tmp_nl[:, :], extent=ext, transform=ccrs.Robinson(), cmap=cmap22, zorder=2,interpolation='nearest',rasterized=True)
158 | #
159 | # sub_ax.imshow(sc_img.img[:, :], extent=ext, transform=ccrs.Robinson(), cmap=cmap_sc, zorder=3, interpolation='nearest', rasterized=True)
160 | #
161 | # sub_ax.imshow(forest_mask.astype(np.float32)[:, :], extent=ext, transform=ccrs.Robinson(), cmap=cmap_forest, zorder=4, interpolation='nearest', rasterized=True)
162 | # sub_ax.imshow(water_bodies.astype(np.float32)[:, :], extent=ext, transform=ccrs.Robinson(), cmap=cmap_wb, zorder=5, interpolation='nearest', rasterized=True)
163 | #
164 | #
165 | # sub_ax.set_extent([-179.99,179.99,-89.99,89.99], ccrs.Geodetic())
166 | #
167 | # sub_ax.add_feature(cfeature.NaturalEarthFeature('physical', 'ocean', '50m', facecolor='lightgrey'), alpha=0.5)
168 | # sub_ax.add_feature(cfeature.NaturalEarthFeature('physical', 'land', '50m', facecolor=plt.cm.Greys(0.9)), alpha=0.5)
169 | #
170 | # sub_ax.outline_patch.set_edgecolor('lightgrey')
171 | # sub_ax.text(0.05, 0.95, 'a', transform=sub_ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
172 |
173 |
174 | # Add the coverage of various data
175 | # shape_feature = ShapelyFeature(Reader(fn_shp).geometries(), ccrs.PlateCarree(), edgecolor='None', alpha=0.5,
176 | # facecolor='tab:cyan', linewidth=1)
177 | # sub_ax.add_feature(shape_feature)
178 |
179 |
180 | # 2/ Legend (plot in advance to be behind)
181 |
182 | legendax = fig.add_axes([0, 0.025, 0.2, 0.2], label='legends')
183 |
184 | legendax.set_xlim((-0.5, 0.5))
185 | legendax.set_ylim((-0.15, 1.15))
186 | legendax.set_xticks([])
187 | legendax.set_yticks([])
188 | legendax.spines['top'].set_visible(False)
189 | legendax.spines['left'].set_visible(False)
190 | legendax.spines['right'].set_visible(False)
191 | legendax.spines['bottom'].set_visible(False)
192 |
193 | legendax.add_patch(mpatches.Rectangle((0, 0.25), 0.2 ,0.2 , edgecolor='black',facecolor='tab:cyan', zorder=10, linewidth=0.5))
194 | legendax.text(0.3, 0.35, 'Glacierized', va='center', ha='left')
195 |
196 | legendax = fig.add_axes([0.575, 0.025, 0.2, 0.2], label='legends3')
197 |
198 | legendax.set_xlim((-0.5, 0.5))
199 | legendax.set_ylim((-0.15, 1.15))
200 | legendax.set_xticks([])
201 | legendax.set_yticks([])
202 | legendax.spines['top'].set_visible(False)
203 | legendax.spines['left'].set_visible(False)
204 | legendax.spines['right'].set_visible(False)
205 | legendax.spines['bottom'].set_visible(False)
206 |
207 | legendax.add_patch(mpatches.Rectangle((0, 0.25), 0.2 ,0.2 , edgecolor='darkblue',facecolor='white', zorder=10, linewidth=2))
208 | legendax.text(0.3, 0.35, 'Example glaciers', va='center', ha='left')
209 |
210 | legendax = fig.add_axes([0.3, 0.025, 0.2, 0.2], label='legends2')
211 |
212 | legendax.set_xlim((-0.5, 0.5))
213 | legendax.set_ylim((-0.15, 1.15))
214 | legendax.set_xticks([])
215 | legendax.set_yticks([])
216 | legendax.spines['top'].set_visible(False)
217 | legendax.spines['left'].set_visible(False)
218 | legendax.spines['right'].set_visible(False)
219 | legendax.spines['bottom'].set_visible(False)
220 |
221 | legendax.add_patch(mpatches.Rectangle((0, 0.25), 0.2 ,0.2 , edgecolor='black',facecolor='tab:green', zorder=10, linewidth=0.5))
222 | legendax.text(0.3, 0.35, 'Forested', va='center', ha='left')
223 |
224 | # 3/ Plot Mont-Blanc case study map
225 |
226 | ax = fig.add_axes([0.075,0.15,0.9,0.8],
227 | projection=ccrs.UTM(32), label='Mont-Blanc')
228 |
229 | hs = gu.Raster(fn_hs_montblanc)
230 | plt_extent=[hs.bounds.left, hs.bounds.right, hs.bounds.bottom, hs.bounds.top]
231 | crop_extent = [plt_extent[0], plt_extent[2], plt_extent[1], plt_extent[3]]
232 |
233 | hs_arr = hs.data
234 |
235 | ax.set_extent([hs.bounds.left -1500, hs.bounds.right+1500, hs.bounds.bottom, hs.bounds.top], ccrs.UTM(32))
236 |
237 | color1 = colors.to_rgba('black')
238 | color2 = colors.to_rgba('white')
239 | cmap_ll = colors.LinearSegmentedColormap.from_list('my_cmap_hs', [color1, color2], 256)
240 | # cmap_ll._init()
241 | # cmap_ll._lut[1:, -1] = 1
242 | # cmap_ll._lut[0:1, -1] = 0.0 # We make transparent the lowest hillshade value
243 | cmap_ll.set_bad(color='None')
244 |
245 | shape_feature = ShapelyFeature(Reader(fn_shp).geometries(), ccrs.PlateCarree(), edgecolor='None', alpha=0.65,
246 | facecolor='tab:cyan', linewidth=1, zorder=4)
247 | ax.add_feature(shape_feature)
248 |
249 | ax.imshow(hs_arr[0, :, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap_ll,
250 | interpolation=None, zorder=2)
251 |
252 | y_extent = hs.bounds.top - hs.bounds.bottom
253 | x_extent = hs.bounds.right - hs.bounds.left
254 | ax.add_patch(mpatches.Rectangle((crop_extent[2] - x_extent/20 - 4000, crop_extent[1] + y_extent/15),2000, 300,
255 | edgecolor='black',facecolor='black',transform=ccrs.UTM(32),zorder=10,linewidth=0.5))
256 | ax.add_patch(mpatches.Rectangle((crop_extent[2] - x_extent/20 - 2000, crop_extent[1] + y_extent/15),2000, 300,
257 | edgecolor='black',facecolor='white',transform=ccrs.UTM(32),zorder=10,linewidth=0.5))
258 | ax.text(crop_extent[2] - x_extent/20 - 4000, crop_extent[1] + y_extent/15 - 100,'0',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
259 | ax.text(crop_extent[2] - x_extent/20 - 2000, crop_extent[1] + y_extent/15 - 100,'2',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
260 | ax.text(crop_extent[2] - x_extent/20 - 2000, crop_extent[1] + y_extent/15 - 800,'km',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
261 | ax.text(crop_extent[2] - x_extent/20 - 0, crop_extent[1] + y_extent/15 - 100,'4',ha='center',va='top',transform=ccrs.UTM(32),zorder=10)
262 |
263 | shape_feature = ShapelyFeature(Reader(fn_forest_shp_simplified).geometries(), ccrs.UTM(32), edgecolor='None', alpha=0.65,
264 | facecolor='tab:green', linewidth=1, zorder=3)
265 | ax.add_feature(shape_feature)
266 |
267 | ax.gridlines(draw_labels={'top':'x', 'left':'y'}, dms=True, x_inline=False, y_inline=False)
268 |
269 | ll = (hs.bounds.left, hs.bounds.bottom)
270 | lr = (hs.bounds.right, hs.bounds.bottom)
271 | ul = (hs.bounds.left, hs.bounds.top)
272 | ur = (hs.bounds.right, hs.bounds.top)
273 |
274 | # Extent of Figure on heteroscedasticty
275 | y_extent = hs.bounds.top - hs.bounds.bottom
276 | x_extent = hs.bounds.right - hs.bounds.left
277 | plt_extent0 = [
278 | hs.bounds.left + 3/10*x_extent,
279 | hs.bounds.right - 2/10*x_extent,
280 | hs.bounds.bottom + 1/10*y_extent,
281 | hs.bounds.top - 4.3/10*y_extent,
282 | ]
283 |
284 | ax.add_patch(mpatches.Rectangle((plt_extent0[0], plt_extent0[2]), plt_extent0[1] - plt_extent0[0], plt_extent0[3] - plt_extent0[2],
285 | edgecolor='black',facecolor='none',transform=ccrs.UTM(32),zorder=10,linewidth=2, linestyle='dashed'))
286 |
287 | ax.text(plt_extent0[0]+500, plt_extent0[3]-500,
288 | 'Fig. 4d', ha='left', va='top', bbox=dict(edgecolor='black', facecolor='white', alpha=1), zorder=5)
289 |
290 | # Extent of Figure on spatial correlations
291 | plt_extent = [
292 | hs.bounds.left+1/40*x_extent,
293 | hs.bounds.right-1/5*x_extent,
294 | hs.bounds.bottom + 6/10*y_extent,
295 | hs.bounds.top-1/20*y_extent,
296 | ]
297 | ax.add_patch(mpatches.Rectangle((plt_extent[0], plt_extent[2]), plt_extent[1] - plt_extent[0], plt_extent[3] - plt_extent[2],
298 | edgecolor='black',facecolor='none',transform=ccrs.UTM(32),zorder=10,linewidth=2, linestyle='dashed'))
299 | ax.text(plt_extent[1]-500, plt_extent[3]-500, 'Fig. 5b', ha='right', va='top', bbox=dict(edgecolor='black', facecolor='white', alpha=1), zorder=5)
300 |
301 | # Extent of Figure on slope simulation
302 | crop_ext = [333500, 5076000, 335500, 5078000]
303 |
304 | ax.add_patch(mpatches.Rectangle((crop_ext[0], crop_ext[1]), crop_ext[2] - crop_ext[0], crop_ext[3] - crop_ext[1],
305 | edgecolor='black',facecolor='none',transform=ccrs.UTM(32),zorder=10,linewidth=2, linestyle='dashed'))
306 | ax.text(crop_ext[2]+500, crop_ext[3]-500, 'Fig. 6a', ha='left', va='top', bbox=dict(edgecolor='black', facecolor='white', alpha=1), zorder=5)
307 |
308 | # Display glaciers used in Table 2
309 | names_gla = ['Bossons', 'Taconnaz', 'Griaz', 'Bourgeat']
310 | rgiid_gla = ['RGI60-11.03646', 'RGI60-11.03647', 'RGI60-11.03280', 'RGI60-11.03290']
311 | glacier_inventory = gu.Vector(fn_shp)
312 | shift_yy = [3100, 2300, -100, 900]
313 | shift_xx = [-1500, -900, -1700, -800]
314 | for rgiid in rgiid_gla:
315 | gla_ds = glacier_inventory.ds[glacier_inventory.ds['RGIId'].values==rgiid]
316 | shape_feature = ShapelyFeature(gla_ds.geometry, ccrs.PlateCarree(), edgecolor='darkblue',
317 | alpha=1,
318 | facecolor='None', linewidth=1, zorder=4)
319 | ax.add_feature(shape_feature)
320 | lat, lon = gla_ds['CenLat'].values[0], gla_ds['CenLon'].values[0]
321 | xx, yy = gu.projtools.reproject_from_latlon((lat, lon), out_crs=pyproj.CRS(32632))
322 |
323 | ax.text(xx + shift_xx[rgiid_gla.index(rgiid)], yy + shift_yy[rgiid_gla.index(rgiid)], names_gla[rgiid_gla.index(rgiid)], va='center', ha='center', color='darkblue', zorder=30, fontweight='bold')
324 |
325 |
326 | # Mont-Blanc location
327 | x_mb = 334360
328 | y_mb = 5077742
329 | ax.scatter(x_mb, y_mb, s=100, marker='x', color='white', linewidths=3, zorder=30)
330 | ax.text(x_mb - 800 , y_mb, 'Mont\nBlanc', color='white', va='center', ha='right', zorder=30, fontweight='bold')
331 |
332 | # ax.text(0.025, 0.975, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14, zorder=30)
333 |
334 | sub_ax = fig.add_axes([0.05,0.775,0.25,0.25],
335 | projection=ccrs.Robinson(), label='world')
336 |
337 | sub_ax.add_feature(cfeature.NaturalEarthFeature('physical', 'ocean', '50m', facecolor='lightgrey'), alpha=0.3)
338 | sub_ax.add_feature(cfeature.NaturalEarthFeature('physical', 'land', '50m', facecolor=plt.cm.Greys(0.95)), alpha=0.8)
339 | plt_extent=[hs_r.bounds.left, hs_r.bounds.right, hs_r.bounds.bottom, hs_r.bounds.top]
340 |
341 | hs_arr = gu.spatial_tools.get_array_and_mask(hs_r)[0]
342 | color1 = colors.to_rgba('black')
343 | color2 = colors.to_rgba('white')
344 | cmap2 = colors.LinearSegmentedColormap.from_list('my_cmap2', [color1, color2], 256)
345 | cmap2._init()
346 | cmap2._lut[0:1, -1] = 0.0 # We made transparent de 10 first levels of hillshade,
347 | cmap2._lut[1:, -1] = 0.60
348 | sub_ax.imshow(hs_arr[:, :], transform=ccrs.Robinson(), extent=plt_extent, cmap=cmap2, zorder=2, interpolation='nearest',rasterized=True)
349 |
350 | bounds = [-10, 20, 35, 55]
351 | sub_ax.set_extent(bounds, ccrs.Geodetic())
352 |
353 | sub_ax.plot(6.86, 45.83, marker='s', color='red', transform=ccrs.Geodetic())
354 | # sub_ax.text(10, 50, 'Europe', color=plt.cm.Greys(0.8), ha='center', va='center', transform=ccrs.Geodetic(), fontweight='bold', rotation=15)
355 |
356 | # Save to file
357 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_3_final.png', dpi=400)
--------------------------------------------------------------------------------
/figures/fig_6_example_slope.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure 6: example of propagation of errors to terrain slope and aspect for the Mont-Blanc case study"""
2 | import os
3 | import gstools as gs
4 | import matplotlib.pyplot as plt
5 | import numpy as np
6 | from geoutils import Raster
7 | import xdem
8 | import time
9 | import pandas as pd
10 | import seaborn as sns
11 | import matplotlib.colors as colors
12 | import cartopy.crs as ccrs
13 |
14 | # Open file
15 | fn_dem = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
16 | n_sim = 200
17 | fn_hs = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Mont-Blanc_2017-10-25_DEM_5m_hillshade.tif'
18 | r = Raster(fn_dem)
19 |
20 | # Crop around Mont-Blanc
21 | crop_ext = [333500, 5076000, 335500, 5078000]
22 | r.crop(crop_ext)
23 | hs = Raster(fn_hs)
24 | hs.crop(crop_ext)
25 |
26 | # Open files with estimates of heteroscedasticity and spatial correlation
27 | fn_hetsce = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_heteroscedas_slope_curv.csv'
28 | fn_vgm = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_std_sta.csv'
29 |
30 | # Model those estimates
31 | df_h = pd.read_csv(fn_hetsce)
32 | df_v = pd.read_csv(fn_vgm)
33 | df_v = df_v[df_v.bins<30000]
34 | df_v.err_exp /= np.sqrt(100)/2
35 | std_fac = np.nanmean(df_v.exp.values[-3:])
36 | df_v.exp /= std_fac
37 | df_v.err_exp /= std_fac
38 | dem = np.copy(r.data.data).squeeze()
39 | dem[dem==r.nodata] = np.nan
40 |
41 | slope, aspect, planc, profc = xdem.terrain.get_terrain_attribute(dem, resolution=r.res[0], attribute=['slope', 'aspect', 'planform_curvature',
42 | 'profile_curvature'])
43 | maxabsc = np.maximum(np.abs(planc), np.abs(profc))
44 |
45 | shape = np.shape(dem)
46 |
47 | # Grid definition
48 | x = np.arange(0, shape[0]) * r.res[0]
49 | y = np.arange(0, shape[1]) * r.res[0]
50 |
51 | # Fit sum of Gaussian models (only a tiny difference with Spherical for long range, works better with Fourier simulation)
52 | fun1, params = xdem.spatialstats.fit_sum_model_variogram(list_model=['Gau', 'Gau', 'Gau'], empirical_variogram=df_v,
53 | bounds=[(0, 200), (0, 9), (500, 5000), (0, 9), (2000, 15000), (0,9)],
54 | p0=[100, 1.5, 2000,1.5, 5000,1.5])
55 |
56 | fn = xdem.spatialstats.interp_nd_binning(df_h, list_var_names=['slope_mid', 'maxc_mid'], statistic='nmad', min_count=30)
57 | maxabsc[maxabsc>50] = 50
58 | dh_err = fn((slope.data, maxabsc))
59 |
60 | # NMAD on complete stable terrain (not only cropped)
61 | nmad_stable = 1.60
62 | # nmad_stable = np.nanmedian(dh_err)
63 |
64 | # Need to specify the rescale factor to match Gaussian parameters between skgstat and gstools
65 | model_s_alone = gs.Gaussian(dim=2, var=1, len_scale=params[0], rescale=2)
66 |
67 | model_s = gs.Gaussian(dim=2, var=params[1], len_scale=params[0], rescale=2)
68 | model_l = gs.Gaussian(dim=2, var=params[3], len_scale=params[2], rescale=2)
69 | model_l2 = gs.Gaussian(dim=2, var=params[5], len_scale=params[4], rescale=2)
70 |
71 | sim_slope_dems, sim_aspect_dems = (np.empty((6, n_sim,) + shape, dtype=np.float32) for i in range(2))
72 |
73 | for i in range(n_sim):
74 |
75 | print('Working on simulation '+str(i+1))
76 |
77 | print('Generating random field...')
78 |
79 | t0 = time.time()
80 |
81 | # Using GSTools, let's generate a correlated signal at two different length: 5 and 100 (spherical)
82 | srf_s_alone = gs.SRF(model_s_alone, mode_no=100)
83 | srf_s = gs.SRF(model_s, mode_no=100)
84 | srf_l = gs.SRF(model_l, mode_no=100)
85 | srf_l2 = gs.SRF(model_l2, mode_no=100)
86 |
87 | # We combine the two random correlated fields (e.g, short-range could represent resolution, and long-range the noise)
88 | field_s_alone = srf_s_alone.structured([x, y])
89 |
90 | field_s = srf_s((x, y), mesh_type='structured')
91 | field_l = srf_l((x, y), mesh_type='structured')
92 | field_l2 = srf_l2((x, y), mesh_type='structured')
93 |
94 | # Stationary variance with purely random noise
95 | pixel_noise = np.random.normal(0, 1, size=np.shape(dem))
96 | noisy_stationary_dem = dem + pixel_noise * nmad_stable
97 |
98 | # Heteroscedasticity with purely random noise
99 | noisy_hetsce_dem = dem + pixel_noise * dh_err
100 |
101 | # Stationary variance with correlated noise (short, and short+long range)
102 | noisy_stationary_sr_dem = dem + nmad_stable * field_s_alone
103 | noisy_stationary_lr_dem = dem + nmad_stable * field_s + nmad_stable * (field_l + field_l2)
104 |
105 | # Heteroscedasticity with correlated noise
106 | # !! Careful !! The long-range noise is scaled to the average variance, as it is not linked to heteroscedasticity
107 | noisy_hetsce_sr_dem = dem + dh_err * field_s_alone
108 | noisy_hetsce_lr_dem = dem + dh_err * field_s + nmad_stable * (field_l + field_l2)
109 |
110 | t1 = time.time()
111 |
112 | print('Elapsed: {:.1f} seconds'.format(t1-t0))
113 |
114 | print('Deriving slopes...')
115 |
116 | # Derive attribute for each simulation
117 | slope_stationary, aspect_stationary = xdem.terrain.get_terrain_attribute(noisy_stationary_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
118 | slope_hetsce, aspect_hetsce = xdem.terrain.get_terrain_attribute(noisy_hetsce_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
119 | slope_stationary_sr, aspect_stationary_sr = xdem.terrain.get_terrain_attribute(noisy_stationary_sr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
120 | slope_stationary_lr, aspect_stationary_lr = xdem.terrain.get_terrain_attribute(noisy_stationary_lr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
121 | slope_hetsce_sr, aspect_hetsce_sr = xdem.terrain.get_terrain_attribute(noisy_hetsce_sr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
122 | slope_hetsce_lr, aspect_hetsce_lr = xdem.terrain.get_terrain_attribute(noisy_hetsce_lr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
123 |
124 | t2 = time.time()
125 | print('Elapsed: {:.1f} seconds'.format(t2-t1))
126 |
127 | # Save in 4D array
128 | sim_slope_dems[0, i, :, :] = slope_stationary
129 | sim_slope_dems[1, i, :, :] = slope_hetsce
130 | sim_slope_dems[2, i, :, :] = slope_stationary_sr
131 | sim_slope_dems[3, i, :, :] = slope_stationary_lr
132 | sim_slope_dems[4, i, :, :] = slope_hetsce_sr
133 | sim_slope_dems[5, i, :, :] = slope_hetsce_lr
134 |
135 | sim_aspect_dems[0, i, :, :] = aspect_stationary
136 | sim_aspect_dems[1, i, :, :] = aspect_hetsce
137 | sim_aspect_dems[2, i, :, :] = aspect_stationary_sr
138 | sim_aspect_dems[3, i, :, :] = aspect_stationary_lr
139 | sim_aspect_dems[4, i, :, :] = aspect_hetsce_sr
140 | sim_aspect_dems[5, i, :, :] = aspect_hetsce_lr
141 |
142 |
143 | # Define bins to compute statistics on
144 | bins_slope = [0, 5, 10, 15, 20, 30, 40, 50, 70, 90]
145 | bins_curv = [0, 0.2, 0.5, 1, 2, 3, 4, 6, 10, 20, 50]
146 |
147 | northness = np.cos(sim_aspect_dems * np.pi / 180)
148 | eastness = np.sin(sim_aspect_dems * np.pi / 180)
149 |
150 | list_slope_map, list_maxnortheast_map, list_df_bp, list_df_bp_northeast = ([] for i in range(4))
151 | run_names = ['stationary_random', 'hetsce_random', 'stationary_shortrange', 'stationary_longrange', 'hetsce_shortrange', 'hetsce_longrange']
152 | for i in range(6):
153 |
154 |
155 | slope_1sig = (np.nanpercentile(sim_slope_dems[i, :, :, :], 84, axis=0)\
156 | - np.nanpercentile(sim_slope_dems[i, :, :, :], 16, axis=0)) / 2
157 | northness_1sig = (np.nanpercentile(northness[i, :, :, :], 84, axis=0)\
158 | - np.nanpercentile(northness[i, :, :, :], 16, axis=0)) / 2
159 | eastness_1sig = (np.nanpercentile(eastness[i, :, :, :], 84, axis=0)\
160 | - np.nanpercentile(eastness[i, :, :, :], 16, axis=0)) / 2
161 | maxnortheast_1sig = np.maximum(northness_1sig, eastness_1sig)
162 |
163 | for j in range(len(bins_slope) - 1):
164 | # Subset by slope category
165 | subset = np.logical_and(slope >= bins_slope[j], slope < bins_slope[j + 1])
166 | sub_slope = slope_1sig[subset]
167 | sub_northeast = maxnortheast_1sig[subset]
168 | # Store in dataframe
169 | df_subset = pd.DataFrame()
170 | df_subset = df_subset.assign(err_slope=sub_slope, run=[run_names[i]]*len(sub_slope))
171 | bin_name = str(bins_slope[j]) + '–' + str(bins_slope[j + 1])
172 | df_subset['slope_category'] = bin_name
173 | list_df_bp.append(df_subset)
174 |
175 | df_subset_northeast = pd.DataFrame()
176 | df_subset_northeast = df_subset_northeast.assign(err_northeast=sub_northeast, run=[run_names[i]] * len(sub_slope))
177 | bin_name = str(bins_slope[j]) + '–' + str(bins_slope[j + 1])
178 | df_subset_northeast['slope_category'] = bin_name
179 | list_df_bp_northeast.append(df_subset_northeast)
180 |
181 | list_slope_map.append(slope_1sig)
182 | list_maxnortheast_map.append(maxnortheast_1sig)
183 |
184 | list_bins_slope_2 = []
185 | for j in range(len(bins_slope) - 1):
186 | list_bins_slope_2.append(str(bins_slope[j]) + '–\n' + str(bins_slope[j + 1]))
187 |
188 | df_bp = pd.concat(list_df_bp)
189 | df_bp_northeast = pd.concat(list_df_bp_northeast)
190 |
191 | # Subset for boxplot of uncertainty with slope categories
192 | orig_names = ['stationary_random', 'stationary_longrange','hetsce_random', 'hetsce_longrange']
193 | df_bp_sub = df_bp[df_bp.run.isin(orig_names)]
194 | df_bp_northeast_sub = df_bp_northeast[df_bp_northeast.run.isin(orig_names)]
195 |
196 | names = ['Homosc., no corr.', 'Homosc., long-range', 'Heterosc., no corr.', 'Heterosc., long-range']
197 | for i, oname in enumerate(orig_names):
198 | df_bp_sub.loc[df_bp_sub.run == oname, 'run'] = names[i]
199 | df_bp_northeast_sub.loc[df_bp_northeast_sub.run == oname, 'run'] = names[i]
200 |
201 |
202 | # Define a submap function
203 |
204 | def add_submap(fig, slices_grid, array, cmap, col_bounds, label, pos_colorbar=None, add_colorbar=True, label_colorbar=None, add_panel_letter=None):
205 |
206 | ax0 = fig.add_subplot(grid[slices_grid[0], slices_grid[1]], projection=ccrs.UTM(32), label=label)
207 |
208 | tmp_disp = r.copy()
209 | ext = [tmp_disp.bounds[0], tmp_disp.bounds[2], tmp_disp.bounds[1], tmp_disp.bounds[3]]
210 |
211 | tmp_disp.data[0, :, :] = array
212 |
213 | cb = []
214 | cb_val = np.linspace(0, 1, len(col_bounds))
215 | for j in range(len(cb_val)):
216 | cb.append(cmap(cb_val[j]))
217 | cmap_cus2 = colors.LinearSegmentedColormap.from_list('my_cb', list(
218 | zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000)
219 | cmap_cus2.set_bad(color='None')
220 | ax0.imshow(tmp_disp.data[0, :, :], extent=ext, transform=ccrs.UTM(32), vmin=min(col_bounds), vmax=max(col_bounds), cmap=cmap_cus2,
221 | interpolation=None, zorder=3, alpha=0.85)
222 | ax0.gridlines(draw_labels=False, dms=True, x_inline=False, y_inline=False)
223 | ax0.text(0.5, 1.025, label, transform=ax0.transAxes, ha='center', va='bottom', fontweight='bold', fontsize=9, zorder=20)
224 |
225 | if add_panel_letter is not None:
226 | ax0.text(-0.1, 1.2, add_panel_letter, transform=ax0.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
227 | zorder=20)
228 |
229 | if pos_colorbar is None:
230 | pos = [-0.5, -0.15, 1, 0.05]
231 | # pos = [1.05, 0.2, 0.05, 0.6]
232 | else:
233 | pos = pos_colorbar
234 |
235 | if add_colorbar:
236 | cbaxes = ax0.inset_axes(pos, zorder=10)
237 |
238 | norm = colors.Normalize(vmin=min(col_bounds), vmax=max(col_bounds))
239 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
240 | sm.set_array([])
241 | cb = plt.colorbar(sm, cax=cbaxes, ticks=col_bounds, orientation='horizontal', extend='both', shrink=0.2)
242 | cb.set_label(label_colorbar)
243 |
244 |
245 | # Start figure
246 | fig = plt.figure(figsize=(7, 11))
247 | grid = plt.GridSpec(40, 35, wspace=0.1, hspace=0.1)
248 |
249 | # First, an horizontal axis on top to plot the sample histograms
250 |
251 | ax = fig.add_subplot(grid[:6, 18:])
252 |
253 | list_nb_pixel = []
254 | for i in range(len(bins_slope)-1):
255 | ind_pixel = np.logical_and(df_bp.run.values == 'stationary_random', df_bp.slope_category== str(bins_slope[i])+'–'+str(bins_slope[i+1]))
256 | nb_pixel = np.count_nonzero(ind_pixel)
257 | list_nb_pixel.append(nb_pixel)
258 | ax.fill_between([i-0.3, i+0.3], [0]*2, [nb_pixel], facecolor='black')
259 |
260 | ax.vlines(np.arange(0.5, len(bins_slope)-1), ymin=-5, ymax=np.max(list_nb_pixel)*1.1, colors='tab:gray', linestyles='dashed', linewidths=0.75)
261 |
262 | ax.set_xticks([])
263 | ax.set_ylabel('Sample count')
264 | ax.set_ylim((100, np.max(list_nb_pixel)*1.1))
265 | ax.yaxis.tick_right()
266 | ax.yaxis.set_label_position("right")
267 | ax.spines['top'].set_visible(False)
268 | ax.spines['left'].set_visible(False)
269 | ax.set_yscale('log')
270 | ax.set_xlim((-0.5, len(bins_slope)-1.5))
271 | ax.text(0.025, 0.9, 'c', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
272 | zorder=20)
273 |
274 | # Boxplots of slope error depending on slope
275 | ax = fig.add_subplot(grid[6:23, 18:])
276 |
277 | sns.boxplot(ax=ax, x="slope_category", y="err_slope", hue="run", hue_order=names,
278 | data=df_bp_sub, palette={names[0]:'white', names[1]:'darkgrey', names[2]:'lightgreen' ,names[3]:'darkgreen'},
279 | fliersize=0, linewidth=1)
280 | ax.vlines(np.arange(0.5, len(bins_slope)-1), ymin=-5, ymax=40, colors='tab:gray', linestyles='dashed', linewidths=0.75)
281 |
282 | ax.set_ylim((-0.5, 17.25))
283 | ax.set_xlabel('Slope categories (degrees)')
284 | ax.set_ylabel('Uncertainty in slope (1$\sigma$, degrees)')
285 | ax.legend(loc='upper right')
286 | ax.set_xticklabels([])
287 | ax.yaxis.tick_right()
288 | ax.yaxis.set_label_position("right")
289 | ax.set_xlim((-0.5, len(bins_slope)-1.5))
290 |
291 | # Boxplots of aspect error depending on slope
292 | ax = fig.add_subplot(grid[23:, 18:])
293 |
294 | sns.boxplot(ax=ax, x="slope_category", y="err_northeast", hue="run", hue_order=names,
295 | data=df_bp_northeast_sub, palette={names[0]:'white', names[1]:'darkgrey', names[2]:'lightgreen' ,names[3]:'darkgreen'},
296 | fliersize=0, linewidth=1)
297 | ax.vlines(np.arange(0.5, len(bins_slope)-1), ymin=-1, ymax=2, colors='tab:gray', linestyles='dashed', linewidths=0.5)
298 | ax.set_ylim((-0.05, 1.05))
299 | l = ax.legend()
300 | l.remove()
301 |
302 | # ax.text(0.025, 0.96, 'd', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
303 | # zorder=20)
304 | ax.set_xlabel('Slope categories (degrees)')
305 | ax.set_ylabel('Maximum of uncertainty in northness or eastness (1$\sigma$)')
306 | ax.yaxis.tick_right()
307 | ax.yaxis.set_label_position("right")
308 | ax.set_xticklabels(list_bins_slope_2)
309 | ax.set_xlim((-0.5, len(bins_slope)-1.5))
310 |
311 | # Finally, the minimaps
312 | color1 = colors.to_rgba('black')
313 | color2 = colors.to_rgba('white')
314 | cmap = colors.LinearSegmentedColormap.from_list('my_cmap_hs', [color1, color2], 256)
315 | col_bounds = np.array([0., 127.5, 255.])
316 | add_submap(fig, slices_grid=(slice(1, 9), slice(0, 8)), array=hs.data[0, :, :], cmap=cmap, col_bounds=col_bounds, label='Hillshade',
317 | add_colorbar=False, add_panel_letter='a')
318 |
319 | cmap = plt.get_cmap('Reds', 100)
320 | col_bounds = np.array([0., 20., 40., 60., 80.])
321 | add_submap(fig, slices_grid=(slice(1, 9), slice(8, 16)), array=slope, cmap=cmap, col_bounds=col_bounds, label='Slope',
322 | label_colorbar='Slope (degrees)')
323 |
324 | col_bounds = np.array([-1., 0., 1.])
325 | cmap = plt.get_cmap('RdBu')
326 | add_submap(fig, slices_grid=(slice(11, 19), slice(0, 8)), array=np.cos(aspect * np.pi / 180), cmap=cmap, col_bounds=col_bounds,
327 | label='Northness', add_colorbar=False)
328 | add_submap(fig, slices_grid=(slice(11, 19), slice(8, 16)), array=np.sin(aspect * np.pi / 180), cmap=cmap, col_bounds=col_bounds,
329 | label_colorbar='Northess or Eastness', label='Eastness')
330 |
331 | col_bounds = np.array([0., 5, 10.])
332 | cmap = plt.get_cmap('Purples')
333 | add_submap(fig, slices_grid=(slice(22, 30), slice(0, 8)), array=list_slope_map[0], cmap=cmap, col_bounds=col_bounds,
334 | label='Homosc.,\nno corr.', add_colorbar=False, add_panel_letter='b')
335 |
336 | add_submap(fig, slices_grid=(slice(22, 30), slice(8, 16)), array=list_slope_map[5], cmap=cmap, col_bounds=col_bounds,
337 | label_colorbar='Uncertainty in slope (1$\sigma$, degrees)', label='Heterosc.,\nlong-range')
338 |
339 | col_bounds = np.array([0., 0.25, 0.5])
340 | cmap = plt.get_cmap('Greens')
341 | add_submap(fig, slices_grid=(slice(33, 41), slice(0, 8)), array=list_maxnortheast_map[0], cmap=cmap, col_bounds=col_bounds,
342 | label='Homosc.,\nno corr.', add_colorbar=False)
343 |
344 | add_submap(fig, slices_grid=(slice(33, 41), slice(8, 16)), array=list_maxnortheast_map[5], cmap=cmap, col_bounds=col_bounds,
345 | label_colorbar='Maximum uncertainty in\nnorthness or eastness (1$\sigma$)', label='Heterosc.,\nlong-range')
346 |
347 | # Save to file
348 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_6_final.png', dpi=400)
349 |
--------------------------------------------------------------------------------
/figures/fig_s10_qqplot_normalfit_montblanc.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S10: Q-Q plot and normal fit after standardization of dh for the Mont-Blanc case study"""
2 | import numpy as np
3 | import xdem
4 | import geoutils as gu
5 | from scipy.stats import probplot, norm
6 | import matplotlib.pyplot as plt
7 |
8 | fn_ddem = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_Pleiades-SPOT6_Mont-Blanc_NK_Deramp.tif'
9 | fn_pleiades = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
10 | fn_shp = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_merged/11_rgi60_CentralEurope/11_rgi60_CentralEurope.shp'
11 | fn_forest = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/outlines/forest_Mont-Blanc_ESACCI_delainey.shp'
12 |
13 | pleia_ddem = gu.Raster(fn_ddem)
14 | ref_dem = gu.Raster(fn_pleiades)
15 | glaciers_outlines = gu.Vector(fn_shp)
16 | forest_outlines = gu.Vector(fn_forest)
17 | mask_glacier = glaciers_outlines.create_mask(pleia_ddem)
18 | mask_forest = forest_outlines.create_mask(pleia_ddem)
19 |
20 | # Remove forest, very large outliers
21 | pleia_ddem.data[mask_forest] = np.nan
22 | pleia_ddem.data[np.abs(pleia_ddem.data)>200] = np.nan
23 |
24 | # pleia_ddem.data[mask_glacier] = np.nan
25 |
26 | slope, planc, profc = xdem.terrain.get_terrain_attribute(ref_dem, attribute=['slope', 'planform_curvature',
27 | 'profile_curvature'])
28 | maxabsc = np.maximum(np.abs(planc), np.abs(profc))
29 |
30 | del ref_dem
31 |
32 | # # Filter large outliers per category
33 | bins_slope = [0, 2.5, 5, 10, 15, 20, 30, 40, 50, 70, 90]
34 | # bins_curv = [-10, -8, -6, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 6, 8, 10]
35 | bins_curv = [0, 0.1, 0.2, 0.5, 1, 1.5, 2, 3, 4, 5, 6, 7, 8, 10, 20, 50]
36 | for i in range(len(bins_slope) - 1):
37 | # Subset by slope category
38 | subset = np.logical_and(slope.data >= bins_slope[i], slope.data < bins_slope[i + 1])
39 | dh_sub = pleia_ddem.data[subset]
40 | # Remove very large outliers of the category
41 | med_sub = np.nanmedian(dh_sub)
42 | nmad_sub = xdem.spatialstats.nmad(dh_sub)
43 | # Remove outliers
44 | pleia_ddem.data[np.logical_and(subset, np.abs(pleia_ddem.data-med_sub) > 5 * nmad_sub)] = np.nan
45 | for i in range(len(bins_curv) - 1):
46 | # Subset by slope category
47 | subset = np.logical_and(maxabsc >= bins_curv[i], maxabsc < bins_curv[i + 1])
48 | dh_sub = pleia_ddem.data[subset]
49 | # Remove very large outliers of the category
50 | med_sub = np.nanmedian(dh_sub)
51 | nmad_sub = xdem.spatialstats.nmad(dh_sub)
52 | # Remove outliers
53 | pleia_ddem.data[np.logical_and(subset, np.abs(pleia_ddem.data-med_sub) > 5 * nmad_sub)] = np.nan
54 |
55 |
56 | pleia_ddem_sta = pleia_ddem.data[~mask_glacier]
57 | slope_sta = slope.data[~mask_glacier]
58 | maxabsc_sta = maxabsc[~mask_glacier]
59 |
60 | df_sub = xdem.spatialstats.nd_binning(pleia_ddem_sta, list_var=[slope_sta, maxabsc_sta], list_var_names=['slope', 'maxc'], list_var_bins=(bins_slope, bins_curv))
61 |
62 | fn = xdem.spatialstats.interp_nd_binning(df_sub, list_var_names=['slope', 'maxc'], statistic='nmad', min_count=30)
63 |
64 | maxabsc[maxabsc>50] = 50
65 | dh_err = fn((slope.data, maxabsc))
66 |
67 | pleia_ddem_gla = pleia_ddem.data[mask_glacier]
68 |
69 | std_dh = pleia_ddem.data.data/dh_err
70 | std_dh[np.abs(std_dh- np.nanmedian(std_dh))>5*xdem.spatialstats.nmad(std_dh)] = np.nan
71 |
72 | std_dh_sta = std_dh[~mask_glacier]
73 | std_dh_gla = std_dh[mask_glacier]
74 |
75 | subsample_dh_sta = gu.spatial_tools.subsample_raster(pleia_ddem_sta, subsample=500000, random_state=42)
76 | subsample_dh_gla = gu.spatial_tools.subsample_raster(pleia_ddem_gla, subsample=500000, random_state=42)
77 |
78 | subsample_std_sta = gu.spatial_tools.subsample_raster(std_dh_sta, subsample=500000, random_state=42)
79 | subsample_std_gla = gu.spatial_tools.subsample_raster(std_dh_gla, subsample=500000, random_state=42)
80 |
81 |
82 | # 1/ First, we plot the Q-Q plot for elevation differences
83 | fig = plt.figure(figsize=(7, 7))
84 |
85 | grid = plt.GridSpec(22, 23, wspace=0.1, hspace=0.1)
86 |
87 | ax = fig.add_subplot(grid[0:10, 0:10])
88 |
89 | probplot(x=subsample_dh_sta, plot=plt, fit=False)
90 |
91 | probplot(x=subsample_dh_gla, plot=plt, fit=False)
92 |
93 | ax.get_lines()[0].set_markerfacecolor('tab:brown')
94 | ax.get_lines()[0].set_markeredgecolor('tab:brown')
95 | ax.get_lines()[0].set_markersize(4.0)
96 |
97 | # ax.get_lines()[1].set_color('black')
98 |
99 | ax.get_lines()[1].set_markerfacecolor('tab:cyan')
100 | ax.get_lines()[1].set_markeredgecolor('tab:cyan')
101 | ax.get_lines()[1].set_markersize(4.0)
102 |
103 | ax.plot([-5, 5], [-5, 5], color='black', zorder=1)
104 |
105 | ax.set_title('')
106 | ax.set_xlabel('Theoretical normal quantiles')
107 | ax.set_ylabel('Quantiles of ordered\nelevation differences')
108 |
109 | ax.plot([], [], color='tab:brown', label='Stable terrain', linewidth=5)
110 | ax.plot([], [], color='tab:cyan', label='Moving terrain', linewidth=5)
111 | ax.plot([], [], color='black', label='1:1 line')
112 |
113 | ax.legend(loc='lower right')
114 |
115 | ax.text(0.05, 0.95, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
116 | zorder=20)
117 |
118 | # 2/ Then with the standardized elevation differences
119 | ax = fig.add_subplot(grid[12:, 0:10])
120 |
121 | probplot(x=subsample_std_sta, plot=plt, fit=False)
122 |
123 | probplot(x=subsample_std_gla, plot=plt, fit=False)
124 |
125 | ax.get_lines()[0].set_markerfacecolor('tab:brown')
126 | ax.get_lines()[0].set_markeredgecolor('tab:brown')
127 | ax.get_lines()[0].set_markersize(4.0)
128 |
129 | # ax.get_lines()[1].set_color('black')
130 |
131 | ax.get_lines()[1].set_markerfacecolor('tab:cyan')
132 | ax.get_lines()[1].set_markeredgecolor('tab:cyan')
133 | ax.get_lines()[1].set_markersize(4.0)
134 | ax.set_xlabel('Theoretical normal quantiles')
135 | ax.set_ylabel('Quantiles of ordered\nstandard score of\nelevation differences')
136 |
137 | ax.plot([-5, 5], [-5, 5], color='black', zorder=1)
138 |
139 | ax.set_title('')
140 | ax.text(0.05, 0.95, 'c', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
141 | zorder=20)
142 |
143 | # 3/ Histogram comparison with a normal fit for the elevation differences
144 | ax = fig.add_subplot(grid[0:10, 13:18])
145 |
146 | n, bins, patches = ax.hist(subsample_dh_sta, color='tab:brown', alpha=0.6, bins=50, density=True, range=(-20, 20))
147 | ax.hist(subsample_dh_gla, color='tab:cyan', alpha=0.6, bins=50, density=True, range=(-20, 20))
148 |
149 | mu_dh, sigma_dh = norm.fit(subsample_dh_sta)
150 |
151 | y = norm.pdf(bins, mu_dh, sigma_dh)
152 | ax.plot(bins, y, 'black', linewidth=1, linestyle='dashed', label='Normal\nfit')
153 | ax.text(0.1, 0.95, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
154 | zorder=20)
155 | ax.set_xlim((-20, 0))
156 | ax.set_ylabel('Probability density')
157 | ax.set_xlabel('Elevation differences (m)', x=1, ha='center')
158 | ax.legend(loc='center left')
159 |
160 | ax = fig.add_subplot(grid[0:10, 18:])
161 |
162 | ax.hist(subsample_dh_sta, color='tab:brown', alpha=0.6, bins=50, density=True, range=(-20, 20))
163 | ax.hist(subsample_dh_gla, color='tab:cyan', alpha=0.6, bins=50, density=True, range=(-20, 20))
164 | ax.plot(bins, y, 'black', linewidth=1, linestyle='dashed')
165 |
166 | ax.set_xlim((0.01, 20))
167 | ax.set_yscale('log')
168 | ax.yaxis.tick_right()
169 | ax.yaxis.set_label_position("right")
170 |
171 | # 4/ Same for the standardized differences
172 | ax = fig.add_subplot(grid[12:, 13:18])
173 |
174 | n, bins, patches = ax.hist(subsample_std_sta, color='tab:brown', alpha=0.6, bins=50, density=True)
175 | ax.hist(subsample_std_gla, color='tab:cyan', alpha=0.6, bins=50, density=True)
176 |
177 | mu_std, sigma_std = norm.fit(subsample_std_sta)
178 |
179 | y = norm.pdf(bins, mu_std, sigma_std)
180 | ax.plot(bins, y, 'black', linewidth=1, linestyle='dashed')
181 | ax.text(0.1, 0.95, 'd', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
182 | zorder=20)
183 | # ax.set_yscale('log')
184 | ax.set_xlim((-5, 0))
185 | ax.set_ylabel('Probability density')
186 | ax.set_xlabel('Standardized elevation differences', x=1, ha='center')
187 |
188 | ax = fig.add_subplot(grid[12:, 18:])
189 |
190 | ax.hist(subsample_std_sta, color='tab:brown', alpha=0.6, bins=50, density=True, range=(-20, 20))
191 | ax.hist(subsample_std_gla, color='tab:cyan', alpha=0.6, bins=50, density=True, range=(-20, 20))
192 | ax.plot(bins, y, 'black', linewidth=1, linestyle='dashed')
193 |
194 | ax.set_xlim((0.01, 5))
195 | ax.set_yscale('log')
196 | ax.yaxis.tick_right()
197 | ax.yaxis.set_label_position("right")
198 |
199 | # Save to file
200 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S10_final.png', dpi=400)
201 |
202 |
--------------------------------------------------------------------------------
/figures/fig_s11_vario_estimator_robustness_montblanc.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S11: effect of variogram estimator robustness for the Mont-Blanc case study"""
2 | import numpy as np
3 | import pandas as pd
4 | import xdem
5 | import geoutils as gu
6 | import matplotlib.pyplot as plt
7 | import skgstat as skg
8 |
9 | fn_ddem = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_Pleiades-SPOT6_Mont-Blanc_NK_Deramp.tif'
10 | fn_pleiades = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
11 | fn_shp = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_merged/11_rgi60_CentralEurope/11_rgi60_CentralEurope.shp'
12 | fn_forest = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/outlines/forest_Mont-Blanc_ESACCI_delainey.shp'
13 |
14 | pleia_ddem = gu.Raster(fn_ddem)
15 | ref_dem = gu.Raster(fn_pleiades)
16 | glaciers_outlines = gu.Vector(fn_shp)
17 | forest_outlines = gu.Vector(fn_forest)
18 | mask_glacier = glaciers_outlines.create_mask(pleia_ddem)
19 | mask_forest = forest_outlines.create_mask(pleia_ddem)
20 |
21 | # Remove forest, very large outliers
22 | pleia_ddem.data[mask_forest] = np.nan
23 | pleia_ddem.data[np.abs(pleia_ddem.data)>500] = np.nan
24 |
25 | pleia_ddem.data[mask_glacier] = np.nan
26 |
27 | slope, planc, profc = xdem.terrain.get_terrain_attribute(ref_dem, attribute=['slope', 'planform_curvature',
28 | 'profile_curvature'])
29 | maxabsc = np.maximum(np.abs(planc), np.abs(profc))
30 |
31 | # First, without outlier filtering
32 |
33 | fn_dowd_nofilt = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_sta_dowd_nofilt.csv'
34 | fn_matheron_nofilt = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_sta_matheron_nofilt.csv'
35 |
36 | # df_vgm_sta_dowd_nofilt = xdem.spatialstats.sample_empirical_variogram(pleia_ddem.data.data, pleia_ddem.res[0], subsample=50, n_variograms=10, runs=20, estimator='dowd', n_jobs=3)
37 | # df_vgm_sta_matheron_nofilt = xdem.spatialstats.sample_empirical_variogram(pleia_ddem.data.data, pleia_ddem.res[0], subsample=50, n_variograms=10, runs=20, estimator='matheron', n_jobs=3)
38 | # df_vgm_sta_dowd_nofilt.to_csv(fn_dowd_nofilt, index=False)
39 | # df_vgm_sta_matheron_nofilt.to_csv(fn_matheron_nofilt, index=False)
40 |
41 | df_vgm_sta_dowd_nofilt = pd.read_csv(fn_dowd_nofilt)
42 | # Normalizing to semi-variance
43 | df_vgm_sta_dowd_nofilt.exp /= 2
44 | df_vgm_sta_dowd_nofilt.err_exp /= 2
45 | df_vgm_sta_matheron_nofilt = pd.read_csv(fn_matheron_nofilt)
46 |
47 | # # Filter large outliers per category
48 | bins_slope = [0, 2.5, 5, 10, 15, 20, 30, 40, 50, 70, 90]
49 | # # bins_curv = [-10, -8, -6, -4, -2, -1, -0.5, 0, 0.5, 1, 2, 4, 6, 8, 10]
50 | bins_curv = [0, 0.2, 0.5, 1, 2, 3, 4, 6, 10, 20, 50]
51 | for i in range(len(bins_slope) - 1):
52 | # Subset by slope category
53 | subset = np.logical_and(slope.data >= bins_slope[i], slope.data < bins_slope[i + 1])
54 | dh_sub = pleia_ddem.data[subset]
55 | # Remove very large outliers of the category
56 | med_sub = np.nanmedian(dh_sub)
57 | nmad_sub = xdem.spatialstats.nmad(dh_sub)
58 | # Remove outliers
59 | pleia_ddem.data[np.logical_and(subset, np.abs(pleia_ddem.data-med_sub) > 7 * nmad_sub)] = np.nan
60 | for i in range(len(bins_curv) - 1):
61 | # Subset by slope category
62 | subset = np.logical_and(maxabsc >= bins_curv[i], maxabsc < bins_curv[i + 1])
63 | dh_sub = pleia_ddem.data[subset]
64 | # Remove very large outliers of the category
65 | med_sub = np.nanmedian(dh_sub)
66 | nmad_sub = xdem.spatialstats.nmad(dh_sub)
67 | # Remove outliers
68 | pleia_ddem.data[np.logical_and(subset, np.abs(pleia_ddem.data-med_sub) > 7 * nmad_sub)] = np.nan
69 |
70 |
71 | # Then, with outlier filtering
72 |
73 | fn_dowd_filt = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_sta_dowd_filt.csv'
74 | fn_matheron_filt = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_sta_matheron_filt.csv'
75 |
76 | # df_vgm_sta_dowd = xdem.spatialstats.sample_empirical_variogram(pleia_ddem.data.data, pleia_ddem.res[0], subsample=50, n_variograms=10, runs=20, estimator='dowd', n_jobs=3)
77 | # df_vgm_sta_matheron = xdem.spatialstats.sample_empirical_variogram(pleia_ddem.data.data, pleia_ddem.res[0], subsample=50, n_variograms=10, runs=20, estimator='matheron', n_jobs=3)
78 | # df_vgm_sta_dowd.to_csv(fn_dowd_filt, index=False)
79 | # df_vgm_sta_matheron.to_csv(fn_matheron_filt, index=False)
80 |
81 | df_vgm_sta_dowd = pd.read_csv(fn_dowd_filt)
82 | # Normalizing to semi-variance
83 | df_vgm_sta_dowd.exp /= 2
84 | df_vgm_sta_dowd.err_exp /= 2
85 | df_vgm_sta_matheron = pd.read_csv(fn_matheron_filt)
86 |
87 |
88 | # FIGURE
89 | fig = plt.figure(figsize=(12,5.5))
90 |
91 | ylabel = "Elevation variance (m²)"
92 | xlabel = 'Spatial lag (m)'
93 | xscale_range_split = [120, 640, 3600]
94 | xscale = 'linear'
95 | list_fit_fun = None
96 | list_fit_fun_label = None
97 | xlim = None
98 | ylim = (0, 55)
99 | list_df = [df_vgm_sta_matheron_nofilt, df_vgm_sta_dowd_nofilt]
100 | col_df = ['tab:orange', 'tab:blue']
101 | label_df = ['Matheron estimator', 'Dowd estimator']
102 | df0 = list_df[0]
103 |
104 | init_gridsize = [10, 20]
105 | # Create parameters to split x axis into different linear scales
106 | # If there is no split, get parameters for a single subplot
107 | if xscale_range_split is None:
108 | nb_subpanels = 1
109 | if xscale == 'log':
110 | xmin = [np.min(df0.bins) / 2]
111 | else:
112 | xmin = [0]
113 | xmax = [np.max(df0.bins)]
114 | xgridmin = [0]
115 | xgridmax = [init_gridsize[0]]
116 | gridsize = init_gridsize
117 | # Otherwise, derive a list for each subplot
118 | else:
119 | # Add initial zero if not in input
120 | if xscale_range_split[0] != 0:
121 | if xscale == 'log':
122 | first_xmin = np.min(df0.bins) / 2
123 | else:
124 | first_xmin = 0
125 | xscale_range_split = [first_xmin] + xscale_range_split
126 | # Add maximum distance if not in input
127 | if xscale_range_split[-1] != np.max(df0.bins):
128 | xscale_range_split.append(15000)
129 |
130 | # Scale grid size by the number of subpanels
131 | nb_subpanels = len(xscale_range_split) - 1
132 | gridsize = init_gridsize.copy()
133 | gridsize[1] = 18
134 | gridsize[0] *= nb_subpanels
135 | # Create list of parameters to pass to ax/grid objects of subpanels
136 | xmin, xmax, xgridmin, xgridmax = ([] for i in range(4))
137 | for i in range(nb_subpanels):
138 | xmin.append(xscale_range_split[i])
139 | xmax.append(xscale_range_split[i + 1])
140 | xgridmin.append(init_gridsize[0] * i)
141 | xgridmax.append(init_gridsize[0] * (i + 1))
142 |
143 | # Need a grid plot to show the sample count and the statistic
144 | grid = plt.GridSpec(gridsize[1], gridsize[0], wspace=0.1, hspace=0.1)
145 |
146 | # Loop over each subpanel
147 | for k in [1,0,2,3]:
148 | # First, an axis to plot the sample histogram
149 | ax0 = fig.add_subplot(grid[:3, xgridmin[k]:xgridmax[k]])
150 | ax0.set_xscale(xscale)
151 | ax0.set_xticks([])
152 |
153 | # Plot the histogram manually with fill_between
154 | interval_var = [0] + list(df0.bins)
155 | for i in range(len(df0)):
156 | width = interval_var[i+1] - interval_var[i]
157 | mid = interval_var[i] + width/2
158 | count = list_df[0]['count'].values[i]
159 | count_gla = list_df[1]['count'].values[i]
160 | ax0.fill_between([mid-width/3, mid], [0] * 2, [count] * 2,
161 | facecolor='tab:orange', alpha=1,
162 | edgecolor='black', linewidth=0.5)
163 | ax0.fill_between([mid, mid+width/3], [0] * 2, [count_gla] * 2,
164 | facecolor='tab:blue', alpha=1,
165 | edgecolor='black', linewidth=0.5)
166 | ax0.vlines(mid-width/2, ymin=[0], ymax=1.2*max(list_df[0]['count'].values), colors='tab:gray', linestyles='dashed', linewidths=0.5)
167 | if k == 0:
168 | ax0.set_ylabel('Pairwise\nsample\ncount')
169 | # Scientific format to avoid undesired additional space on the label side
170 | ax0.ticklabel_format(axis='y', style='sci', scilimits=(0, 0))
171 | else:
172 | ax0.set_yticks([])
173 | # Ignore warnings for log scales
174 | ax0.set_xlim((xmin[k], xmax[k]))
175 | ax0.set_ylim((0, 1.2*max(list_df[0]['count'].values)))
176 |
177 | # Now, plot the statistic of the data
178 | ax = fig.add_subplot(grid[3:10, slice(xgridmin[k],xgridmax[k])])
179 |
180 | # Get the bins center
181 | for i, df in enumerate(list_df):
182 | bins_center = np.subtract(df.bins, np.diff([0] + df.bins.tolist()) / 2)
183 | ax.errorbar(bins_center, df.exp, yerr=df.err_exp, fmt='x', color=col_df[i])
184 |
185 | for i in range(len(df0)):
186 | width = interval_var[i + 1] - interval_var[i]
187 | mid = interval_var[i] + width / 2
188 | ax.vlines(mid - width / 2, ymin=[0], ymax=2*max(df0.exp), colors='tab:gray', linestyles='dashed', linewidths=0.5)
189 |
190 | # ax.hlines(1, xmin=xmin[k], xmax=xmax[k], colors='black', linestyles='dotted')
191 | # If a list of functions is passed, plot the modelled variograms
192 | if list_fit_fun is not None:
193 | for i, fit_fun in enumerate(list_fit_fun):
194 | x = np.linspace(xmin[k], xmax[k], 1000)
195 |
196 | def vgm_short(h):
197 | fn = skg.models.spherical(h, fit_fun[0], fit_fun[1])
198 | return fn
199 |
200 | def vgm_long(h):
201 | fn = skg.models.spherical(h, fit_fun[2], fit_fun[3])
202 | return fn
203 |
204 | def vgm_sum(h):
205 | fn = skg.models.spherical(h, fit_fun[0], fit_fun[1]) + skg.models.spherical(h, fit_fun[2], fit_fun[3])
206 | return fn
207 |
208 | colors_terrain = ['tab:brown', 'tab:cyan']
209 |
210 | ax.plot(x, vgm_short(x), linestyle='dashdot', color=colors_terrain[i], zorder=30, linewidth=1)
211 | ax.plot(x, vgm_sum(x), linestyle='dashed', color=colors_terrain[i], zorder=30, linewidth=1.5)
212 | # ax.plot(x, vgm_long(x), linestyle='dashdot', color=colors[i], label = 'Long-range model', zorder=30, linewidth=1)
213 | if i == 0:
214 | ax.errorbar([], [], [], color='black', label='Empirical variogram', fmt='x')
215 | ax.plot([], [], linestyle='dashdot', color='black', label='Modelled variogram: short-range')
216 | ax.plot([], [], linestyle='dashed', color='black', label='Modelled variogram: short- and long-range',
217 | linewidth=1.5)
218 | ax.plot([], [], color='tab:brown', label='Stable terrain')
219 | ax.plot([], [], color='tab:cyan', label='Unstable terrain')
220 |
221 |
222 | ax.set_xscale(xscale)
223 | ax.set_xticks([])
224 |
225 | if xlim is None:
226 | ax.set_xlim((xmin[k], xmax[k]))
227 | else:
228 | ax.set_xlim(xlim)
229 |
230 | if ylim is not None:
231 | ax.set_ylim(ylim)
232 | else:
233 | ax.set_ylim((0, np.nanmax(df_vgm_sta_matheron_nofilt.exp) + np.nanmean(df_vgm_sta_matheron_nofilt.err_exp)))
234 |
235 | if k == nb_subpanels - 1:
236 | ax.errorbar([], [], [], color='tab:orange', label="Matheron's estimator with raw data", fmt='o')
237 | ax.errorbar([], [], [], color='tab:blue',
238 | label="Dowd's estimator with raw data", fmt='o')
239 | # handles, labels = plt.gca().get_legend_handles_labels()
240 | # order = [4, 0, 1, 2, 3]
241 | # ax.legend([handles[idx] for idx in order], [labels[idx] for idx in order], loc='lower right', ncol=2)
242 | ax.legend(loc='center right')
243 | if k == 0:
244 | ax.set_ylabel(ylabel)
245 | ax.text(0.1, 0.95, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
246 |
247 | else:
248 | ax.set_yticks([])
249 |
250 |
251 | list_df = [df_vgm_sta_matheron, df_vgm_sta_dowd]
252 | list_fit_fun = None
253 | col_df = ['tab:orange', 'tab:blue']
254 | label_df = ['Matheron estimator', 'Dowd estimator']
255 | ylim = (0, 11)
256 |
257 |
258 | for k in [2,3,0,1]:
259 |
260 | # Now, plot the statistic of the data
261 | ax = fig.add_subplot(grid[10:, slice(xgridmin[k],xgridmax[k])])
262 |
263 | # Get the bins center
264 | list_fmt = ['o', 'o']
265 | for i, df in enumerate(list_df):
266 | if i == 0:
267 | bins_center = np.subtract(df.bins, np.diff([0] + df.bins.tolist()) / 2)
268 | else:
269 | bins_center = np.subtract(df.bins, np.diff([0] + df.bins.tolist()) / 2) + (xmax[k]-xmin[k])/20
270 | ax.errorbar(bins_center, df.exp, yerr=df.err_exp, fmt=list_fmt[i], color=col_df[i])
271 |
272 | for i in range(len(df0)):
273 | width = interval_var[i + 1] - interval_var[i]
274 | mid = interval_var[i] + width / 2
275 | ax.vlines(mid - width / 2, ymin=[0], ymax=2*max(df0.exp), colors='tab:gray', linestyles='dashed', linewidths=0.5)
276 |
277 | # ax.hlines(1, xmin=xmin[k], xmax=xmax[k], colors='black', linestyles='dotted')
278 | # If a list of functions is passed, plot the modelled variograms
279 | list_linestyle = ['dashed', 'dashdot']
280 | if list_fit_fun is not None:
281 | for i, fit_fun in enumerate(list_fit_fun):
282 | x = np.linspace(xmin[k], xmax[k], 1000)
283 |
284 | def vgm_short(h):
285 | fn = skg.models.spherical(h, fit_fun[0], fit_fun[1])
286 | return fn
287 |
288 | def vgm_long(h):
289 | fn = skg.models.spherical(h, fit_fun[2], fit_fun[3])
290 | return fn
291 |
292 | def vgm_sum(h):
293 | fn = skg.models.spherical(h, fit_fun[0], fit_fun[1]) + skg.models.spherical(h, fit_fun[2], fit_fun[3])
294 | return fn
295 |
296 | colors_terrain = ['tab:brown', 'tab:brown']
297 |
298 | # ax.plot(x, vgm_short(x), linestyle='dashdot', color=colors_terrain[i], zorder=30, linewidth=1)
299 | ax.plot(x, vgm_sum(x), linestyle=list_linestyle[i], color=colors_terrain[i], zorder=30, linewidth=1.5)
300 | # ax.plot(x, vgm_long(x), linestyle='dashdot', color=colors[i], label = 'Long-range model', zorder=30, linewidth=1)
301 | if i == 0:
302 | ax.plot([], [], linestyle='dashed', color='black', label='Modelled variogram sampled\nfrom standardized $dh$',
303 | linewidth=1.5)
304 | ax.plot([], [], linestyle='dashdot', color='black', label='Modelled variogram divided by\naverage variance sampled from $dh$',
305 | linewidth=1.5)
306 |
307 |
308 | ax.set_xscale(xscale)
309 | if nb_subpanels > 1 and k == (nb_subpanels - 1):
310 | ax.xaxis.set_ticks(np.linspace(xmin[k], xmax[k], 3))
311 | elif nb_subpanels > 1:
312 | ax.xaxis.set_ticks(np.linspace(xmin[k], xmax[k], 3)[:-1])
313 |
314 | if xlim is None:
315 | ax.set_xlim((xmin[k], xmax[k]))
316 | else:
317 | ax.set_xlim(xlim)
318 |
319 | if ylim is not None:
320 | ax.set_ylim(ylim)
321 | else:
322 | ax.set_ylim((0, np.nanmax(df_vgm_sta_matheron.exp) + np.nanmean(df_vgm_sta_matheron.err_exp)))
323 |
324 | if k == nb_subpanels - 1:
325 | pass
326 | # ax.annotate(text='', xy=(12500, 0.93), xytext=(7000, 0.82), arrowprops=dict(arrowstyle='-|>', connectionstyle=None,
327 | # shrinkA=0, shrinkB=0,
328 | # patchA=None, patchB=None, linewidth=1, facecolor='black'), zorder=30)
329 | # ax.text(7000, 0.8, '30-50% smaller error\nof variogram estimation\nusing standardized $dh$,\nmostly at long ranges', ha='center', va='top',
330 | # bbox= dict(facecolor='white', boxstyle='round', alpha=0.8, linewidth=0.5), zorder=31)
331 | if k == 3:
332 | ax.errorbar([], [], [], color='tab:orange', label="Matheron's estimator with filtered data", fmt='o')
333 | ax.errorbar([], [], [], color='tab:blue',
334 | label="Dowd's estimator with filtered data", fmt='o')
335 |
336 | # handles, labels = plt.gca().get_legend_handles_labels()
337 | # order = [0, 1, 2, 3]
338 | # ax.legend([handles[idx] for idx in order], [labels[idx] for idx in order], loc='lower left', bbox_to_anchor=(-0.75, 0.05), ncol=2)
339 | ax.legend(loc='center right')
340 | if k == 1:
341 | ax.set_xlabel('Spatial lag (m)', x=1, ha='center')
342 | if k == 0:
343 | ax.set_ylabel(ylabel)
344 | ax.text(0.1, 0.95, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
345 | else:
346 | ax.set_yticks([])
347 |
348 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S11_final.png', dpi=400)
349 |
350 |
--------------------------------------------------------------------------------
/figures/fig_s13_pairwise_sampling_random_vs_ring.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S13: improve pairwise sampling for variogram estimation on grid data"""
2 | import numpy as np
3 | import skgstat as skg
4 | import matplotlib.pyplot as plt
5 | import matplotlib.patches as mpatches
6 |
7 | # Define random state
8 | rnd = np.random.RandomState(np.random.MT19937(np.random.SeedSequence(42)))
9 |
10 | shape = (500, 500)
11 | x = np.arange(0, shape[0])
12 | y = np.arange(0, shape[1])
13 | xx, yy = np.meshgrid(x, y)
14 | vals = np.random.normal(0, 1, size=shape)
15 |
16 | # Flatten everything because we don't care about the 2D at this point
17 | coords = np.dstack((xx.flatten(), yy.flatten())).squeeze()
18 |
19 | # Completely random subsetting
20 | rnd = np.random.RandomState(np.random.MT19937(np.random.SeedSequence(42)))
21 | subset = rnd.choice(len(coords), 200, replace=False)
22 | coords_sub = coords[subset, :]
23 |
24 | extent=(x[0], x[-1], y[0], y[-1])
25 | ratio_subsample = 0.2
26 | samples = int(200 / 3.72)
27 | res = np.mean([(extent[1] - extent[0])/(shape[0]-1),(extent[3] - extent[2])/(shape[1]-1)])
28 | V_r = skg.Variogram(coordinates=coords_sub, values=vals.flatten()[subset], normalize=False)
29 |
30 | # Disk equidistant subsetting
31 | rnd = np.random.RandomState(np.random.MT19937(np.random.SeedSequence(42)))
32 | rems_mp = skg.RasterEquidistantMetricSpace(coords, shape=shape, extent=extent, samples=samples, rnd=rnd, runs=1, verbose=True)
33 | V = skg.Variogram(rems_mp, values=vals.flatten(), normalize=False)
34 | coords_eq = coords[rems_mp.eqidx[0]]
35 | coords_center = coords[rems_mp.cidx[0]]
36 | rnd = np.random.RandomState(np.random.MT19937(np.random.SeedSequence(42)))
37 | idx_center = rnd.choice(len(coords), size=1, replace=False)
38 | center = rems_mp._centers[0]
39 | radius = np.sqrt(1. / ratio_subsample * samples / np.pi) * res
40 |
41 | equidistant_radii = [0.]
42 | increasing_rad = radius
43 | max_dist = np.sqrt((extent[1] - extent[0])**2 + (extent[3] - extent[2])**2)
44 | while increasing_rad < max_dist:
45 | equidistant_radii.append(increasing_rad)
46 | increasing_rad *= np.sqrt(2)
47 | equidistant_radii.append(max_dist)
48 |
49 | V_r2 = skg.Variogram(coordinates=coords_sub, values=vals.flatten()[subset], normalize=False, bin_func=equidistant_radii[1:])
50 | rnd = np.random.RandomState(np.random.MT19937(np.random.SeedSequence(42)))
51 | rems_mp = skg.RasterEquidistantMetricSpace(coords, shape=shape, extent=extent, samples=samples, rnd=rnd, runs=1, verbose=True)
52 | V2 = skg.Variogram(rems_mp, values=vals.flatten(), normalize=False, bin_func=equidistant_radii[1:])
53 | V2_bins = [np.sum(np.logical_and(V2.distance>=equidistant_radii[i], V2.distance1000]) - df_std_sta.exp.values[df_std_sta.bins.values>1000])**2) for f in list_f]
248 | list_rss_short = [sum((f(df_std_sta.bins.values[df_std_sta.bins.values<=1000]) - df_std_sta.exp.values[df_std_sta.bins.values<1000])**2) for f in list_f]
--------------------------------------------------------------------------------
/figures/fig_s15_table_s3_s4_sensitivity_form_vgm.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S15 and Table S3-4: sensitivity with the form of variogram models for the Mont-Blanc case study"""
2 | import numpy as np
3 | import pandas as pd
4 | import xdem
5 | import matplotlib.pyplot as plt
6 |
7 | # Open empirical variograms
8 | df_std_sta = pd.read_csv('/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_std_sta.csv')
9 |
10 | df_all_patches = pd.read_csv('/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_patches_sta_gla.csv')
11 |
12 | df_std_sta = df_std_sta[df_std_sta.bins<30000]
13 | df_std_sta.err_exp /= np.sqrt(100)/2
14 |
15 | std_fac_sta = np.nanmean(df_std_sta.exp.values[-3:])
16 | df_std_sta.exp /= std_fac_sta
17 | df_std_sta.err_exp /= std_fac_sta
18 |
19 | name_types = ['spherical', 'exponential', 'gaussian']
20 | list_types = ['Sph', 'Exp', 'Gau']
21 | list_fun, list_params, list_labels = ([] for i in range(3))
22 | for i in range(len(list_types)):
23 | for j in range(len(list_types)):
24 |
25 | fun, params = xdem.spatialstats.fit_sum_model_variogram(list_model=[list_types[i], list_types[j]], empirical_variogram=df_std_sta,
26 | bounds=[(0, 1000), (0, 1), (1000, 20000), (0, 1)],
27 | p0=[500, 0.5, 15000, 0.5])
28 |
29 | list_fun.append(fun)
30 | list_params.append(params)
31 | list_labels.append('Short: '+name_types[i]+'\nLong: '+name_types[j])
32 |
33 |
34 | # Initiate figure
35 | fig = plt.figure(figsize=(12,5.5))
36 |
37 | ylabel = "Standardized\nelevation variance"
38 | xlabel = 'Spatial lag (m)'
39 | xscale_range_split = [120, 640, 3600]
40 | xscale = 'linear'
41 | list_fit_fun = list_fun
42 | list_fit_fun_label = list_labels
43 | xlim = None
44 | ylim = (0, 1.15)
45 | list_df = [df_std_sta]
46 | col_df = ['tab:orange', 'tab:red', 'tab:brown', 'tab:blue', 'tab:green', 'tab:olive', 'lightgrey', 'darkgrey', 'black']
47 | label_df = ['Stable', 'Unstable']
48 | df0 = list_df[0]
49 |
50 |
51 | init_gridsize = [10, 20]
52 | # Create parameters to split x axis into different linear scales
53 | # If there is no split, get parameters for a single subplot
54 | if xscale_range_split is None:
55 | nb_subpanels = 1
56 | if xscale == 'log':
57 | xmin = [np.min(df0.bins) / 2]
58 | else:
59 | xmin = [0]
60 | xmax = [np.max(df0.bins)]
61 | xgridmin = [0]
62 | xgridmax = [init_gridsize[0]]
63 | gridsize = init_gridsize
64 | # Otherwise, derive a list for each subplot
65 | else:
66 | # Add initial zero if not in input
67 | if xscale_range_split[0] != 0:
68 | if xscale == 'log':
69 | first_xmin = np.min(df0.bins) / 2
70 | else:
71 | first_xmin = 0
72 | xscale_range_split = [first_xmin] + xscale_range_split
73 | # Add maximum distance if not in input
74 | if xscale_range_split[-1] != np.max(df0.bins):
75 | xscale_range_split.append(15000)
76 |
77 | # Scale grid size by the number of subpanels
78 | nb_subpanels = len(xscale_range_split) - 1
79 | gridsize = init_gridsize.copy()
80 | gridsize[1] = 18
81 | gridsize[0] *= nb_subpanels
82 | # Create list of parameters to pass to ax/grid objects of subpanels
83 | xmin, xmax, xgridmin, xgridmax = ([] for i in range(4))
84 | for i in range(nb_subpanels):
85 | xmin.append(xscale_range_split[i])
86 | xmax.append(xscale_range_split[i + 1])
87 | xgridmin.append(init_gridsize[0] * i)
88 | xgridmax.append(init_gridsize[0] * (i + 1))
89 |
90 | # Need a grid plot to show the sample count and the statistic
91 | grid = plt.GridSpec(gridsize[1], gridsize[0], wspace=0.1, hspace=0.1)
92 |
93 | # Loop over each subpanel
94 | for k in [1,0,2,3]:
95 | # First, an axis to plot the sample histogram
96 | ax0 = fig.add_subplot(grid[:3, xgridmin[k]:xgridmax[k]])
97 | ax0.set_xscale(xscale)
98 | ax0.set_xticks([])
99 |
100 | # Plot the histogram manually with fill_between
101 | interval_var = [0] + list(df0.bins)
102 | for i in range(len(df0)):
103 | width = interval_var[i+1] - interval_var[i]
104 | mid = interval_var[i] + width/2
105 | count = list_df[0]['count'].values[i]
106 | ax0.fill_between([mid-width/3, mid+width/3], [0] * 2, [count] * 2,
107 | facecolor='black', alpha=1,
108 | edgecolor='black', linewidth=0.5)
109 | ax0.vlines(mid-width/2, ymin=[0], ymax=1.2*max(list_df[0]['count'].values), colors='tab:gray', linestyles='dashed', linewidths=0.5)
110 | if k == 0:
111 | ax0.set_ylabel('Pairwise\nsample\ncount')
112 | # Scientific format to avoid undesired additional space on the label side
113 | ax0.ticklabel_format(axis='y', style='sci', scilimits=(0, 0))
114 | else:
115 | ax0.set_yticks([])
116 | # Ignore warnings for log scales
117 | ax0.set_xlim((xmin[k], xmax[k]))
118 | ax0.set_ylim((0, 1.2*max(list_df[0]['count'].values)))
119 |
120 | # Now, plot the statistic of the data
121 | ax = fig.add_subplot(grid[3:10, slice(xgridmin[k],xgridmax[k])])
122 |
123 | # Get the bins center
124 | for i, df in enumerate(list_df):
125 | bins_center = np.subtract(df.bins, np.diff([0] + df.bins.tolist()) / 2)
126 | ax.errorbar(bins_center, df.exp, yerr=df.err_exp, fmt='x', color='black')
127 |
128 | for i in range(len(df0)):
129 | width = interval_var[i + 1] - interval_var[i]
130 | mid = interval_var[i] + width / 2
131 | ax.vlines(mid - width / 2, ymin=[0], ymax=2*max(df0.exp), colors='tab:gray', linestyles='dashed', linewidths=0.5)
132 |
133 | # ax.hlines(1, xmin=xmin[k], xmax=xmax[k], colors='black', linestyles='dotted')
134 | # If a list of functions is passed, plot the modelled variograms
135 | if list_fit_fun is not None:
136 | for i, fit_fun in enumerate(list_fit_fun):
137 | x = np.linspace(xmin[k], xmax[k], 1000)
138 |
139 | ax.plot(x, fit_fun(x), linestyle='dashdot', color=col_df[i], zorder=30, linewidth=1.5, label=list_fit_fun_label[i])
140 |
141 | ax.set_xscale(xscale)
142 | ax.set_xticks([])
143 |
144 | if xlim is None:
145 | ax.set_xlim((xmin[k], xmax[k]))
146 | else:
147 | ax.set_xlim(xlim)
148 |
149 | if ylim is not None:
150 | ax.set_ylim(ylim)
151 | else:
152 | ax.set_ylim((0, np.nanmax(df_std_sta.exp) + np.nanmean(df_std_sta.err_exp)))
153 |
154 | if k == nb_subpanels - 1:
155 | handles, labels = plt.gca().get_legend_handles_labels()
156 | order = [0, 1, 2, 3, 4, 5, 6, 7, 8]
157 | ax.legend([handles[idx] for idx in order], [labels[idx] for idx in order], loc='lower right', ncol=3)
158 | if k == 0:
159 | ax.set_ylabel(ylabel)
160 | ax.text(0.1, 0.95, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
161 |
162 | else:
163 | ax.set_yticks([])
164 |
165 |
166 | ylim = (0.9, 1.025)
167 | # Loop over each subpanel
168 | for k in [1,0,2,3]:
169 | # Now, plot the statistic of the data
170 | ax = fig.add_subplot(grid[11:, slice(xgridmin[k],xgridmax[k])])
171 |
172 | # Get the bins center
173 | for i, df in enumerate(list_df):
174 | bins_center = np.subtract(df.bins, np.diff([0] + df.bins.tolist()) / 2)
175 | ax.scatter(bins_center, df.exp, marker='x', color='black')
176 |
177 | for i in range(len(df0)):
178 | width = interval_var[i + 1] - interval_var[i]
179 | mid = interval_var[i] + width / 2
180 | ax.vlines(mid - width / 2, ymin=[0], ymax=2*max(df0.exp), colors='tab:gray', linestyles='dashed', linewidths=0.5)
181 |
182 | # ax.hlines(1, xmin=xmin[k], xmax=xmax[k], colors='black', linestyles='dotted')
183 | # If a list of functions is passed, plot the modelled variograms
184 | if list_fit_fun is not None:
185 | for i, fit_fun in enumerate(list_fit_fun):
186 | x = np.linspace(xmin[k], xmax[k], 1000)
187 |
188 | ax.plot(x, fit_fun(x), linestyle='dashdot', color=col_df[i], zorder=30, linewidth=1.5, label=list_fit_fun_label[i])
189 |
190 | ax.set_xscale(xscale)
191 | if k == 1:
192 | ax.set_xlabel(xlabel, x=1, ha='center')
193 |
194 | # ax.set_xticks([])
195 |
196 | if xlim is None:
197 | ax.set_xlim((xmin[k], xmax[k]))
198 | else:
199 | ax.set_xlim(xlim)
200 |
201 | if ylim is not None:
202 | ax.set_ylim(ylim)
203 | else:
204 | ax.set_ylim((0, np.nanmax(df_std_sta.exp) + np.nanmean(df_std_sta.err_exp)))
205 |
206 | if k == nb_subpanels - 1:
207 | ax.scatter([], [], color='black', label='Empirical variogram', marker='x')
208 | handles, labels = plt.gca().get_legend_handles_labels()
209 | order = [9]
210 | ax.legend([handles[idx] for idx in order], [labels[idx] for idx in order], loc='lower right', ncol=1)
211 | if k == 0:
212 | ax.set_ylabel(ylabel)
213 | ax.text(0.1, 0.95, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
214 |
215 | else:
216 | ax.set_yticks([])
217 |
218 |
219 | # Save to file
220 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S15_final.png', dpi=400)
221 |
222 | # Tables S3 and S4
223 | list_rss = [sum((f(df_std_sta.bins.values) - df_std_sta.exp.values)**2) for f in list_fun]
224 | list_rss_long = [sum((f(df_std_sta.bins.values[df_std_sta.bins.values>1000]) - df_std_sta.exp.values[df_std_sta.bins.values>1000])**2) for f in list_fun]
225 | list_rss_short = [sum((f(df_std_sta.bins.values[df_std_sta.bins.values<=1000]) - df_std_sta.exp.values[df_std_sta.bins.values<1000])**2) for f in list_fun]
--------------------------------------------------------------------------------
/figures/fig_s16_spatial_derivative_approx.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S16: theoretical approximation for variogram integration"""
2 | from typing import Callable
3 | import matplotlib.pyplot as plt
4 | import numpy as np
5 | from geoutils import Raster, Vector
6 | import xdem
7 | from scipy.spatial.distance import pdist
8 | import skgstat
9 | import pandas as pd
10 | import cartopy.crs as ccrs
11 | from cartopy.io.shapereader import Reader
12 | from cartopy.feature import ShapelyFeature
13 |
14 | # Open data
15 | fn_dem = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
16 | r = Raster(fn_dem).reproject(dst_res=200)
17 |
18 | # Shapes with area equal to that of the Mer de Glace
19 | fn_shp_disk = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/disk_mdg_area.shp'
20 | fn_shp_rectangle = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/rectangle_mdg_area.shp'
21 | fn_shp_mdg = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/mdg.shp'
22 |
23 | fn_shp_rgi_reg11 = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_renamed/11_rgi60_CentralEurope/region_11_rgi60_CentralEurope.shp'
24 | mdg_id = 'RGI60-11.03643'
25 |
26 | disk = Vector(fn_shp_disk)
27 | rectangle = Vector(fn_shp_rectangle)
28 | rgi_reg11 = Vector(fn_shp_rgi_reg11)
29 | mdg = Vector(rgi_reg11.ds[rgi_reg11.ds['RGIId'].isin([mdg_id])])
30 |
31 | area_mdg = 24.179
32 |
33 | mask_disk = disk.create_mask(r)
34 | mask_rectangle = rectangle.create_mask(r)
35 | mask_mdg = mdg.create_mask(r)
36 |
37 | coords_r = np.array(r.coords())
38 | disk_coords = coords_r[:, mask_disk.squeeze()]
39 | rectangle_coords = coords_r[:, mask_rectangle.squeeze()]
40 | mdg_coords = coords_r[:, mask_mdg.squeeze()]
41 | # # radius of circle of this area
42 | # r = np.sqrt(area/np.pi)
43 | # # side of square of this area
44 | # l = np.sqrt(area)
45 | # # sides of 1x9 rectangle of this area
46 | # short_l = l/3
47 | # long_l = l*3
48 |
49 | list_shape = ['Disk', 'Mer de Glace', 'Rectangle']
50 | list_coords = [disk_coords, mdg_coords, rectangle_coords]
51 |
52 | # Exact solution
53 | def double_sum_covar(coords: np.ndarray, areas: np.ndarray, errors: list[np.ndarray],
54 | vgm_funcs: list[Callable]):
55 | """
56 | Double sum of covariance for euclidean coordinates
57 | :param coords: Spatial support (typically, pixel) coordinates
58 | :param areas: Area of supports
59 | :param errors: Standard errors of supports
60 | :param vgm_funcs: Variogram function
61 | :return:
62 | """
63 |
64 | n = len(coords)
65 | pds = pdist(coords)
66 | var = 0
67 | for i in range(n):
68 | for j in range(n):
69 |
70 | # For index calculation of the pairwise distance, see https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.distance.pdist.html
71 | if i == j:
72 | d = 0
73 | elif i < j:
74 | ind = n * i + j - ((i + 2) * (i + 1)) // 2
75 | d = pds[ind]
76 | else:
77 | ind = n * j + i - ((j + 2) * (j + 1)) // 2
78 | d = pds[ind]
79 |
80 | for k in range(len(vgm_funcs)):
81 | var += errors[k][i] * errors[k][j] * (1 - vgm_funcs[k](d)) * areas[i] * areas[j]
82 |
83 | total_area = sum(areas)
84 | se_dsc = np.sqrt(var / total_area ** 2)
85 |
86 | return se_dsc
87 |
88 |
89 | # Approximate solution
90 | def double_sum_covar_quick(coords: np.ndarray, errors: list[np.ndarray],
91 | vgm_funcs: list[Callable], nb_subsample=100):
92 | """
93 | Double sum of covariance for euclidean coordinates
94 | :param coords: Spatial support (typically, pixel) coordinates
95 | :param errors: Standard errors of supports
96 | :param vgm_funcs: Variogram function
97 | :param nb_subsample: Number of points used to subset the integration
98 | :return:
99 | """
100 |
101 | n = len(coords)
102 |
103 | rand_points = np.random.choice(n, size=min(nb_subsample, n), replace=False)
104 | pds = pdist(coords)
105 |
106 | var = 0
107 | for ind_sub in range(nb_subsample):
108 | for j in range(n):
109 |
110 | i = rand_points[ind_sub]
111 | # For index calculation of the pairwise distance, see https://docs.scipy.org/doc/scipy/reference/generated/scipy.spatial.distance.pdist.html
112 | if i == j:
113 | d = 0
114 | elif i < j:
115 | ind = n * i + j - ((i + 2) * (i + 1)) // 2
116 | d = pds[ind]
117 | else:
118 | ind = n * j + i - ((j + 2) * (j + 1)) // 2
119 | d = pds[ind]
120 |
121 | for k in range(len(vgm_funcs)):
122 | var += errors[k][i] * errors[k][j] * (1 - vgm_funcs[k](d))
123 |
124 | total_area = n * nb_subsample
125 | se_dsc = np.sqrt(var / total_area)
126 |
127 | return se_dsc
128 |
129 | list_ranges = [400*1.2**i for i in np.arange(30)]
130 |
131 | list_df = []
132 | for i in range(len(list_ranges)):
133 |
134 | corr_range = list_ranges[i]
135 |
136 | print('Working on correlation range: '+str(corr_range))
137 |
138 | def vgm_func_short(h):
139 | return skgstat.models.spherical(h, corr_range, 1)
140 |
141 | # For MDG, rectangle and disk
142 | neff_rolstad = xdem.spatialstats.neff_circ(area=area_mdg * 1000000, list_vgm=[(corr_range, 'Sph', 1)])
143 | err_rolstad = 1/np.sqrt(neff_rolstad)
144 |
145 | list_err_true, list_err_approx = ([] for i in range(2))
146 |
147 | for c in list_coords:
148 |
149 | print('Working on full double sum...')
150 | err_true = double_sum_covar(coords=c.T, areas=np.ones(len(c.T)), errors=[np.ones(len(c.T))], vgm_funcs=[vgm_func_short])
151 |
152 | print('Working on approximate double sum...')
153 | err_approx = double_sum_covar_quick(coords=c.T, errors=[np.ones(len(c.T))], vgm_funcs=[vgm_func_short], nb_subsample=100)
154 |
155 | list_err_true.append(err_true)
156 | list_err_approx.append(err_approx)
157 |
158 | df_tmp = pd.DataFrame()
159 | df_tmp = df_tmp.assign(range=[corr_range], err_rolstad=[err_rolstad], err_true_disk=[list_err_true[0]], err_true_mdg=[list_err_true[1]],
160 | err_true_rect=[list_err_true[2]], err_approx_disk=[list_err_approx[0]], err_approx_mdg=[list_err_approx[1]],
161 | err_approx_rect=[list_err_approx[2]])
162 | list_df.append(df_tmp)
163 |
164 | df = pd.concat(list_df)
165 |
166 |
167 | # Initiate figure
168 | fig = plt.figure(figsize=(10, 8))
169 |
170 | # First, an horizontal axis on top to plot the sample histograms
171 | ax = plt.gca()
172 |
173 | ax.scatter(df['range'], df.err_rolstad, marker='x', color='black')
174 | ax.scatter(df['range'], df.err_true_disk, marker='o', color='tab:orange')
175 | ax.scatter(df['range'], df.err_true_rect, marker='o', color='tab:blue')
176 | ax.scatter(df['range'], df.err_true_mdg, marker='o', color='tab:olive')
177 | ax.scatter(df['range'], df.err_approx_disk, marker='<', color='tab:orange')
178 | ax.scatter(df['range'], df.err_approx_rect, marker='<', color='tab:blue')
179 | ax.scatter(df['range'], df.err_approx_mdg, marker='<', color='tab:olive')
180 |
181 | ax.scatter([], [], marker='x', color='black', label='Approx. of Rolstad et al. (2009)')
182 | ax.scatter([], [], marker='o', color='black', label='Exact integration')
183 | ax.scatter([], [], marker='<', color='black', label='Approx. of this study')
184 |
185 | p0 =ax.plot([], [], color='tab:orange', label='Disk shape')
186 | p1 =ax.plot([], [], color='tab:blue', label='Rectangular shape')
187 | p2 = ax.plot([], [], color='tab:olive', label='Mer de Glace shape')
188 | ax.legend(loc='lower right', ncol=2)
189 |
190 | # ax.set_yscale('logit')
191 | ax.set_xscale('log')
192 | ax.set_xlabel('Correlation range of variogram (m)')
193 | ax.set_ylabel('Standardized uncertainty\nin the spatial average')
194 |
195 | ax.text(0.5, 0.95, 'Shapes with the same area (24 km²)', transform=ax.transAxes, ha='center', va='top', fontweight='bold', fontsize=12)
196 |
197 | ax = fig.add_axes([0, 0.4, 0.6, 0.6], projection=ccrs.UTM(32))
198 |
199 | ax.set_extent((r.bounds.left, r.bounds.right, r.bounds.bottom, r.bounds.top), crs=ccrs.UTM(32))
200 | ax.spines['geo'].set_visible(False)
201 | ax.patch.set_visible(False)
202 |
203 | shape_feature = ShapelyFeature(Reader(fn_shp_disk).geometries(), ccrs.UTM(32), edgecolor='tab:orange', alpha=1,
204 | facecolor='None', linewidth=2, zorder=30)
205 | ax.add_feature(shape_feature)
206 |
207 |
208 | ax = fig.add_axes([0.4, 0.15, 0.6, 0.6], projection=ccrs.UTM(32))
209 |
210 | ax.set_extent((r.bounds.left, r.bounds.right, r.bounds.bottom, r.bounds.top), crs=ccrs.UTM(32))
211 | ax.spines['geo'].set_visible(False)
212 | ax.patch.set_visible(False)
213 |
214 | shape_feature = ShapelyFeature(Reader(fn_shp_rectangle).geometries(), ccrs.UTM(32), edgecolor='tab:blue', alpha=1,
215 | facecolor='None', linewidth=2, zorder=30)
216 | ax.add_feature(shape_feature)
217 |
218 |
219 | ax = fig.add_axes([0.4, 0.3, 0.6, 0.6], projection=ccrs.UTM(32))
220 |
221 | ax.set_extent((r.bounds.left, r.bounds.right, r.bounds.bottom, r.bounds.top), crs=ccrs.UTM(32))
222 | ax.spines['geo'].set_visible(False)
223 | ax.patch.set_visible(False)
224 | shape_feature = ShapelyFeature(Reader(fn_shp_mdg).geometries(), ccrs.PlateCarree(), edgecolor='tab:olive', alpha=1,
225 | facecolor='None', linewidth=2, zorder=30)
226 | ax.add_feature(shape_feature)
227 |
228 | # Save to file
229 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S16_final.png', dpi=400)
--------------------------------------------------------------------------------
/figures/fig_s17_sim_correlated_error_field_montblanc.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S17: simulated correlated error fields around the Mont-Blanc summit"""
2 | import gstools as gs
3 | import matplotlib.pyplot as plt
4 | import numpy as np
5 | from geoutils import Raster
6 | import xdem
7 | import time
8 | import pandas as pd
9 | import matplotlib.colors as colors
10 | import cartopy.crs as ccrs
11 | import matplotlib.patches as mpatches
12 |
13 | # Open data
14 | fn_dem = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
15 | n_sim = 1
16 | fn_hs = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Mont-Blanc_2017-10-25_DEM_5m_hillshade.tif'
17 | r = Raster(fn_dem)
18 |
19 | # Around Mont-Blanc
20 | crop_ext = [333500, 5076000, 335500, 5078000]
21 | r.crop(crop_ext)
22 | hs = Raster(fn_hs)
23 | hs.crop(crop_ext)
24 |
25 | fn_hetsce = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_heteroscedas_slope_curv.csv'
26 | fn_vgm = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_std_sta.csv'
27 |
28 | df_h = pd.read_csv(fn_hetsce)
29 | df_v = pd.read_csv(fn_vgm)
30 | df_v = df_v[df_v.bins<30000]
31 | df_v.err_exp /= np.sqrt(100)/2
32 | std_fac = np.nanmean(df_v.exp.values[-3:])
33 | df_v.exp /= std_fac
34 | df_v.err_exp /= std_fac
35 | dem = np.copy(r.data.data).squeeze()
36 | dem[dem==r.nodata] = np.nan
37 |
38 | slope, aspect, planc, profc = xdem.terrain.get_terrain_attribute(dem, resolution=r.res[0], attribute=['slope', 'aspect', 'planform_curvature',
39 | 'profile_curvature'])
40 | maxabsc = np.maximum(np.abs(planc), np.abs(profc))
41 |
42 | shape = np.shape(dem)
43 | # Grid/Raster of 1000 x 1000 pixels
44 | x = np.arange(0, shape[0]) * r.res[0]
45 | y = np.arange(0, shape[1]) * r.res[0]
46 |
47 | _, params = xdem.spatialstats.fit_sum_model_variogram(list_model=['Gau', 'Gau', 'Gau'], empirical_variogram=df_v,
48 | bounds=[(0, 200), (0, 9), (500, 5000), (0, 9), (2000, 15000), (0,9)],
49 | p0=[100, 1.5, 2000,1.5, 5000,1.5])
50 |
51 | fn = xdem.spatialstats.interp_nd_binning(df_h, list_var_names=['slope_mid', 'maxc_mid'], statistic='nmad', min_count=30)
52 | maxabsc[maxabsc>50] = 50
53 | dh_err = fn((slope.data, maxabsc))
54 |
55 | nmad_stable = 1.60
56 | # nmad_stable = np.nanmedian(dh_err)
57 |
58 | # Need to specify the rescale factor to match skgstat and gstools
59 | model_s_alone = gs.Gaussian(dim=2, var=1, len_scale=params[0], rescale=2)
60 |
61 | model_s = gs.Gaussian(dim=2, var=params[1], len_scale=params[0], rescale=2)
62 | model_l = gs.Gaussian(dim=2, var=params[3], len_scale=params[2], rescale=2)
63 | model_l2 = gs.Gaussian(dim=2, var=params[5], len_scale=params[4], rescale=2)
64 |
65 | sim_slope_dems, sim_aspect_dems = (np.empty((6, n_sim,) + shape, dtype=np.float32) for i in range(2))
66 |
67 | i=1
68 | print('Working on simulation '+str(i+1))
69 |
70 | print('Generating random field...')
71 |
72 | t0 = time.time()
73 |
74 | # Using GSTools, let's generate a correlated signal at two different length: 5 and 100 (spherical)
75 | srf_s_alone = gs.SRF(model_s_alone, mode_no=100)
76 | srf_s = gs.SRF(model_s, mode_no=100)
77 | srf_l = gs.SRF(model_l, mode_no=100)
78 | srf_l2 = gs.SRF(model_l2, mode_no=100)
79 |
80 | # We combine the two random correlated fields (e.g, short-range could represent resolution, and long-range the noise)
81 | field_s_alone = srf_s_alone.structured([x, y])
82 |
83 | field_s = srf_s((x, y), mesh_type='structured')
84 | field_l = srf_l((x, y), mesh_type='structured')
85 | field_l2 = srf_l2((x, y), mesh_type='structured')
86 |
87 | # Stationary variance with purely random noise
88 | pixel_noise = np.random.normal(0, 1, size=np.shape(dem))
89 | noisy_stationary_dem = dem + pixel_noise * nmad_stable
90 |
91 | # Heteroscedasticity with purely random noise
92 | noisy_hetsce_dem = dem + pixel_noise * dh_err
93 |
94 | # Stationary variance with correlated noise (short, and short+long range)
95 | noisy_stationary_sr_dem = dem + nmad_stable * field_s_alone
96 | noisy_stationary_lr_dem = dem + nmad_stable * field_s + + nmad_stable * (field_l + field_l2)
97 |
98 | # Heteroscedasticity with correlated noise
99 | # !! Careful !! The long-range noise is scaled to the average variance, as it is not linked to heteroscedasticity
100 | noisy_hetsce_sr_dem = dem + dh_err * field_s_alone
101 | noisy_hetsce_lr_dem = dem + dh_err * field_s + nmad_stable * (field_l + field_l2)
102 |
103 | # Function to plot a submap
104 | def add_submap(fig, slices_grid, array, cmap, col_bounds, label, pos_colorbar=None, add_colorbar=True, label_colorbar=None, add_panel_letter=None, add_scale=False):
105 |
106 | ax0 = fig.add_subplot(grid[slices_grid[0], slices_grid[1]], projection=ccrs.UTM(32), label=label)
107 |
108 | tmp_disp = r.copy()
109 | ext = [tmp_disp.bounds[0], tmp_disp.bounds[2], tmp_disp.bounds[1], tmp_disp.bounds[3]]
110 |
111 | tmp_disp.data[0, :, :] = array
112 |
113 | cb = []
114 | cb_val = np.linspace(0, 1, len(col_bounds))
115 | for j in range(len(cb_val)):
116 | cb.append(cmap(cb_val[j]))
117 | cmap_cus2 = colors.LinearSegmentedColormap.from_list('my_cb', list(
118 | zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000)
119 | cmap_cus2.set_bad(color='None')
120 | ax0.imshow(tmp_disp.data[0, :, :], extent=ext, transform=ccrs.UTM(32), vmin=min(col_bounds), vmax=max(col_bounds), cmap=cmap_cus2,
121 | interpolation=None, zorder=3, alpha=0.85)
122 | # ax0.gridlines(draw_labels=False, dms=True, x_inline=False, y_inline=False)
123 | ax0.text(0.5, 1.025, label, transform=ax0.transAxes, ha='center', va='bottom', fontweight='bold', fontsize=9, zorder=20)
124 |
125 | if add_panel_letter is not None:
126 | ax0.text(0.05, 0.95, add_panel_letter, transform=ax0.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
127 | zorder=20)
128 |
129 | if pos_colorbar is None:
130 | # pos = [0.2, -0.15, 0.6, 0.05]
131 | pos = [1.05, 0.2, 0.05, 0.6]
132 | else:
133 | pos = pos_colorbar
134 |
135 | if add_scale:
136 | y_extent = tmp_disp.bounds.top - tmp_disp.bounds.bottom
137 | x_extent = tmp_disp.bounds.right - tmp_disp.bounds.left
138 | ax0.add_patch(mpatches.Rectangle((crop_ext[2] - x_extent / 20 - 1000, crop_ext[1] + y_extent / 5), 500, 75,
139 | edgecolor='black', facecolor='black', transform=ccrs.UTM(32), zorder=10,
140 | linewidth=0.5))
141 | ax0.add_patch(mpatches.Rectangle((crop_ext[2] - x_extent / 20 - 500, crop_ext[1] + y_extent / 5), 500, 75,
142 | edgecolor='black', facecolor='white', transform=ccrs.UTM(32), zorder=10,
143 | linewidth=0.5))
144 | ax0.text(crop_ext[2] - x_extent / 20 - 1000, crop_ext[1] + y_extent / 5 - 20, '0', ha='center', va='top',
145 | transform=ccrs.UTM(32), zorder=10)
146 | ax0.text(crop_ext[2] - x_extent / 20 - 500, crop_ext[1] + y_extent / 5 - 20, '0.5', ha='center', va='top',
147 | transform=ccrs.UTM(32), zorder=10)
148 | ax0.text(crop_ext[2] - x_extent / 20 - 500, crop_ext[1] + y_extent / 5 - 150, 'km', ha='center', va='top',
149 | transform=ccrs.UTM(32), zorder=10)
150 | ax0.text(crop_ext[2] - x_extent / 20 - 0, crop_ext[1] + y_extent / 5 - 20, '1', ha='center', va='top',
151 | transform=ccrs.UTM(32), zorder=10)
152 |
153 | if add_colorbar:
154 | cbaxes = ax0.inset_axes(pos, zorder=10)
155 |
156 | norm = colors.Normalize(vmin=min(col_bounds), vmax=max(col_bounds))
157 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
158 | sm.set_array([])
159 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-2, -1, 0, 1, 2], orientation='vertical', extend='both', shrink=0.2)
160 | cb.set_label(label_colorbar)
161 |
162 |
163 | # Initiate figure and plot submaps
164 | fig = plt.figure(figsize=(6, 12))
165 | grid = plt.GridSpec(40, 20, wspace=0.1, hspace=0.1)
166 |
167 | col_bounds = np.array([-2., -0.5, 0., 0.5, 2.])
168 | cmap = plt.get_cmap('RdYlBu')
169 | add_submap(fig, slices_grid=(slice(1, 9), slice(0, 10)), array=pixel_noise * nmad_stable, cmap=cmap, col_bounds=col_bounds,
170 | label='Homosc.,\nno corr.', add_colorbar=False, add_panel_letter='a', add_scale=True)
171 | add_submap(fig, slices_grid=(slice(11, 19), slice(0, 10)), array=nmad_stable * field_s_alone, cmap=cmap, col_bounds=col_bounds,
172 | label='Homosc.,\nshort-range', add_colorbar=False, add_panel_letter='c')
173 |
174 | add_submap(fig, slices_grid=(slice(31, 39), slice(0, 10)), array=nmad_stable * field_s + nmad_stable * field_l, cmap=cmap, col_bounds=col_bounds,
175 | label='Homosc.,\nlong-range', add_colorbar=False, add_panel_letter='g')
176 |
177 | add_submap(fig, slices_grid=(slice(1, 9), slice(10, 20)), array=pixel_noise * dh_err, cmap=cmap, col_bounds=col_bounds,
178 | label='Heterosc.,\nno corr.', add_colorbar=False, add_panel_letter='b')
179 | add_submap(fig, slices_grid=(slice(11, 19), slice(10, 20)), array=dh_err * field_s_alone, cmap=cmap, col_bounds=col_bounds,
180 | label='Heterosc.,\nshort-range', label_colorbar='Simulated random elevation error (m)', pos_colorbar=[1.05, -0.7, 0.075, 1.4], add_panel_letter='d')
181 | add_submap(fig, slices_grid=(slice(31, 39), slice(10, 20)), array=dh_err * field_s + dh_err * field_l, cmap=cmap, col_bounds=col_bounds,
182 | label='Heterosc.,\nlong-range', add_colorbar=False, add_panel_letter='h')
183 |
184 | add_submap(fig, slices_grid=(slice(21, 29), slice(0, 10)), array=nmad_stable * field_l, cmap=cmap, col_bounds=col_bounds,
185 | label='Homosc.,\nonly long-range', add_colorbar=False, add_panel_letter='e')
186 | add_submap(fig, slices_grid=(slice(21, 29), slice(10, 20)), array=dh_err * field_l, cmap=cmap, col_bounds=col_bounds,
187 | label='Heterosc.,\nonly long-range', add_colorbar=False, add_panel_letter='f')
188 |
189 | # Save to file
190 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S17_final.png', dpi=300)
--------------------------------------------------------------------------------
/figures/fig_s18_sim_assymetry_slope_aspect_errors.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S18: assymetry of slope and aspect errors for the Mont-Blanc summit"""
2 | import gstools as gs
3 | import matplotlib.pyplot as plt
4 | import numpy as np
5 | from geoutils import Raster
6 | import xdem
7 | import time
8 | import pandas as pd
9 |
10 | # Open data
11 | fn_dem = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
12 | n_sim = 200
13 | r = Raster(fn_dem)
14 |
15 | # Crop around Mont-Blanc
16 | crop_ext = [334500, 5077000, 335000, 5077500]
17 | r.crop(crop_ext)
18 |
19 | fn_hetsce = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_heteroscedas_slope_curv.csv'
20 | fn_vgm = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_std_sta.csv'
21 |
22 | df_h = pd.read_csv(fn_hetsce)
23 | df_v = pd.read_csv(fn_vgm)
24 | df_v = df_v[df_v.bins<30000]
25 | df_v.err_exp /= np.sqrt(100)/2
26 | std_fac = np.nanmean(df_v.exp.values[-3:])
27 | df_v.exp /= std_fac
28 | df_v.err_exp /= std_fac
29 | dem = np.copy(r.data.data).squeeze()
30 | dem[dem==r.nodata] = np.nan
31 |
32 | slope, aspect, planc, profc = xdem.terrain.get_terrain_attribute(dem, resolution=r.res[0], attribute=['slope', 'aspect', 'planform_curvature',
33 | 'profile_curvature'])
34 | maxabsc = np.maximum(np.abs(planc), np.abs(profc))
35 |
36 | shape = np.shape(dem)
37 | # Grid/Raster of 1000 x 1000 pixels
38 | x = np.arange(0, shape[0]) * r.res[0]
39 | y = np.arange(0, shape[1]) * r.res[0]
40 |
41 | _, params = xdem.spatialstats.fit_sum_model_variogram(list_model=['Gau', 'Gau', 'Gau'], empirical_variogram=df_v,
42 | bounds=[(0, 200), (0, 9), (500, 5000), (0, 9), (2000, 15000), (0,9)],
43 | p0=[100, 1.5, 2000,1.5, 5000,1.5])
44 |
45 | fn = xdem.spatialstats.interp_nd_binning(df_h, list_var_names=['slope_mid', 'maxc_mid'], statistic='nmad', min_count=30)
46 | maxabsc[maxabsc>50] = 50
47 | dh_err = fn((slope.data, maxabsc))
48 |
49 | nmad_stable = 1.60
50 | # nmad_stable = np.nanmedian(dh_err)
51 |
52 | # Need to specify the rescale factor to match skgstat and gstools
53 | model_s_alone = gs.Gaussian(dim=2, var=1, len_scale=params[0], rescale=2)
54 |
55 | model_s = gs.Gaussian(dim=2, var=params[1], len_scale=params[0], rescale=2)
56 | model_l = gs.Gaussian(dim=2, var=params[3], len_scale=params[2], rescale=2)
57 | model_l2 = gs.Gaussian(dim=2, var=params[5], len_scale=params[4], rescale=2)
58 |
59 | sim_slope_dems, sim_aspect_dems = (np.empty((6, n_sim,) + shape, dtype=np.float32) for i in range(2))
60 |
61 | for i in range(n_sim):
62 |
63 | print('Working on simulation '+str(i+1))
64 |
65 | print('Generating random field...')
66 |
67 | t0 = time.time()
68 |
69 | # Using GSTools, let's generate a correlated signal at two different length: 5 and 100 (spherical)
70 | srf_s_alone = gs.SRF(model_s_alone, mode_no=100)
71 | srf_s = gs.SRF(model_s, mode_no=100)
72 | srf_l = gs.SRF(model_l, mode_no=100)
73 | srf_l2 = gs.SRF(model_l2, mode_no=100)
74 |
75 | # We combine the two random correlated fields (e.g, short-range could represent resolution, and long-range the noise)
76 | field_s_alone = srf_s_alone.structured([x, y])
77 |
78 | field_s = srf_s((x, y), mesh_type='structured')
79 | field_l = srf_l((x, y), mesh_type='structured')
80 | field_l2 = srf_l2((x, y), mesh_type='structured')
81 |
82 | # Stationary variance with purely random noise
83 | pixel_noise = np.random.normal(0, 1, size=np.shape(dem))
84 | noisy_stationary_dem = dem + pixel_noise * nmad_stable
85 |
86 | # Heteroscedasticity with purely random noise
87 | noisy_hetsce_dem = dem + pixel_noise * dh_err
88 |
89 | # Stationary variance with correlated noise (short, and short+long range)
90 | noisy_stationary_sr_dem = dem + nmad_stable * field_s_alone
91 | noisy_stationary_lr_dem = dem + nmad_stable * field_s + nmad_stable * (field_l + field_l2)
92 |
93 | # Heteroscedasticity with correlated noise
94 | # !! Careful !! The long-range noise is scaled to the average variance, as it is not linked to heteroscedasticity
95 | noisy_hetsce_sr_dem = dem + dh_err * field_s_alone
96 | noisy_hetsce_lr_dem = dem + dh_err * field_s + nmad_stable * (field_l + field_l2)
97 |
98 | t1 = time.time()
99 |
100 | print('Elapsed: {:.1f} seconds'.format(t1-t0))
101 |
102 | print('Deriving slopes...')
103 |
104 | slope_stationary, aspect_stationary = xdem.terrain.get_terrain_attribute(noisy_stationary_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
105 | slope_hetsce, aspect_hetsce = xdem.terrain.get_terrain_attribute(noisy_hetsce_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
106 | slope_stationary_sr, aspect_stationary_sr = xdem.terrain.get_terrain_attribute(noisy_stationary_sr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
107 | slope_stationary_lr, aspect_stationary_lr = xdem.terrain.get_terrain_attribute(noisy_stationary_lr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
108 | slope_hetsce_sr, aspect_hetsce_sr = xdem.terrain.get_terrain_attribute(noisy_hetsce_sr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
109 | slope_hetsce_lr, aspect_hetsce_lr = xdem.terrain.get_terrain_attribute(noisy_hetsce_lr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
110 |
111 | t2 = time.time()
112 | print('Elapsed: {:.1f} seconds'.format(t2-t1))
113 |
114 | sim_slope_dems[0, i, :, :] = slope_stationary
115 | sim_slope_dems[1, i, :, :] = slope_hetsce
116 | sim_slope_dems[2, i, :, :] = slope_stationary_sr
117 | sim_slope_dems[3, i, :, :] = slope_stationary_lr
118 | sim_slope_dems[4, i, :, :] = slope_hetsce_sr
119 | sim_slope_dems[5, i, :, :] = slope_hetsce_lr
120 |
121 | sim_aspect_dems[0, i, :, :] = aspect_stationary
122 | sim_aspect_dems[1, i, :, :] = aspect_hetsce
123 | sim_aspect_dems[2, i, :, :] = aspect_stationary_sr
124 | sim_aspect_dems[3, i, :, :] = aspect_stationary_lr
125 | sim_aspect_dems[4, i, :, :] = aspect_hetsce_sr
126 | sim_aspect_dems[5, i, :, :] = aspect_hetsce_lr
127 |
128 |
129 |
130 | northness = np.cos(sim_aspect_dems * np.pi / 180)
131 | eastness = np.sin(sim_aspect_dems * np.pi / 180)
132 |
133 | ## Initiate figure
134 | fig = plt.figure(figsize=(6.5, 6.5))
135 |
136 | grid = plt.GridSpec(22, 23, wspace=0.1, hspace=0.1)
137 |
138 | ax = fig.add_subplot(grid[0:10, 0:10])
139 |
140 | # Slope error distribution on low slopes
141 | ind = np.logical_and(slope>0, slope<20)
142 |
143 | statio = sim_slope_dems[0, :, ind].flatten()
144 | hetsce_lr = sim_slope_dems[5, :, ind].flatten()
145 | ax.hist(statio, alpha=0.5, facecolor='white', bins=50, edgecolor='tab:gray', linewidth=0.5, density=True, label='Homosc.,\nno corr.')
146 | ax.hist(hetsce_lr, alpha=0.5, facecolor='tab:green', bins=50, edgecolor='tab:gray', linewidth=0.5, density=True, label='Heterosc.,\nlong-range')
147 | ax.text(0.5, 0.8, '0° < Initial slope < 20°', transform=ax.transAxes, ha='center', va='top')
148 | ax.text(0.05, 0.95, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
149 | zorder=20)
150 | ax.set_ylabel('Density')
151 | ax.set_xlabel('Simulated slope (degrees)')
152 | ax.legend(loc='center right')
153 |
154 | # Slope error distribution on high slopes
155 | ax = fig.add_subplot(grid[0:10, 13:])
156 |
157 | ind = np.logical_and(slope>70, slope<90)
158 |
159 | statio = sim_slope_dems[0, :, ind].flatten()
160 | hetsce_lr = sim_slope_dems[5, :, ind].flatten()
161 | ax.hist(statio, alpha=0.5, facecolor='white', bins=50, edgecolor='tab:gray', linewidth=0.5, density=True)
162 | ax.hist(hetsce_lr, alpha=0.5, facecolor='tab:green', bins=50, edgecolor='tab:gray', linewidth=0.5, density=True)
163 | ax.text(0.5, 0.8, '70° < Initial slope < 90°', transform=ax.transAxes, ha='center', va='top')
164 | ax.text(0.05, 0.95, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
165 | zorder=20)
166 | ax.set_ylabel('Density')
167 | ax.set_ylim((0, 0.16))
168 | ax.set_xlabel('Simulated slope (degrees)')
169 |
170 | # Aspect error distribution on low slopes
171 | ax = fig.add_subplot(grid[12:, 0:10])
172 |
173 | ind = np.logical_and(slope>0, slope<20)
174 |
175 | statio = northness[0, :, ind].flatten()
176 | hetsce_lr = northness[5, :, ind].flatten()
177 | ax.hist(statio, alpha=0.5, facecolor='white', bins=50, edgecolor='tab:gray', linewidth=0.5, density=True)
178 | ax.hist(hetsce_lr, alpha=0.5, facecolor='tab:green', bins=50, edgecolor='tab:gray', linewidth=0.5, density=True)
179 | ax.text(0.5, 0.8, '0° < Initial slope < 20°', transform=ax.transAxes, ha='center', va='top')
180 | ax.text(0.05, 0.95, 'c', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
181 | zorder=20)
182 | ax.set_ylabel('Density')
183 | ax.set_xlabel('Simulated northness')
184 |
185 | # Aspect error distribution on high slopes
186 | ax = fig.add_subplot(grid[12:, 13:])
187 |
188 | ind = np.logical_and(slope>70, slope<90)
189 |
190 | statio = northness[0, :, ind].flatten()
191 | hetsce_lr = northness[5, :, ind].flatten()
192 | ax.hist(statio, alpha=0.5, facecolor='white', bins=50, edgecolor='tab:gray', linewidth=0.5, density=True)
193 | ax.hist(hetsce_lr, alpha=0.5, facecolor='tab:green', bins=50, edgecolor='tab:gray', linewidth=0.5, density=True)
194 | ax.text(0.5, 0.8, '70° < Initial slope < 90°', transform=ax.transAxes, ha='center', va='top')
195 | ax.text(0.05, 0.95, 'd', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
196 | zorder=20)
197 | ax.set_ylabel('Density')
198 | ax.set_xlabel('Simulated northness')
199 |
200 | # Save to file
201 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S18_final.png', dpi=400)
--------------------------------------------------------------------------------
/figures/fig_s19_sim_3x3kernel_slope_error.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S19: impact of short-range correlation close to a 3x3 kernel size on slope errors"""
2 | import gstools as gs
3 | import matplotlib.pyplot as plt
4 | import numpy as np
5 | from geoutils import Raster
6 | import xdem
7 | import time
8 | import pandas as pd
9 | import seaborn as sns
10 |
11 | # Open data
12 | fn_dem = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
13 | n_sim = 200
14 | r = Raster(fn_dem)
15 |
16 | # Crop around Mont-Blanc
17 | crop_ext = [333500, 5076000, 335500, 5078000]
18 | r.crop(crop_ext)
19 |
20 | dem = np.copy(r.data.data).squeeze()
21 | dem[dem==r.nodata] = np.nan
22 |
23 | slope, aspect, planc, profc = xdem.terrain.get_terrain_attribute(dem, resolution=r.res[0], attribute=['slope', 'aspect', 'planform_curvature',
24 | 'profile_curvature'])
25 | maxabsc = np.maximum(np.abs(planc), np.abs(profc))
26 |
27 | shape = np.shape(dem)
28 | # Grid/Raster of 1000 x 1000 pixels
29 | x = np.arange(0, shape[0]) * r.res[0]
30 | y = np.arange(0, shape[1]) * r.res[0]
31 |
32 |
33 | nmad_stable = 1.60
34 | # nmad_stable = np.nanmedian(dh_err)
35 |
36 | # Need to specify the rescale factor to match skgstat and gstools
37 |
38 | list_len_scale=[1.25, 2.5, 5, 10, 15, 25, 50, 100]
39 | list_model_s = []
40 | for len_scale in list_len_scale:
41 | model_s = gs.Gaussian(dim=2, var=1, len_scale=len_scale, rescale=2)
42 | list_model_s.append(model_s)
43 |
44 | sim_slope_dems, sim_aspect_dems = (np.empty((8, n_sim,) + shape, dtype=np.float32) for i in range(2))
45 |
46 | for i in range(n_sim):
47 |
48 | print('Working on simulation '+str(i+1))
49 |
50 | print('Generating random field...')
51 |
52 | t0 = time.time()
53 |
54 | for j in range(len(list_len_scale)):
55 | # Using GSTools, let's generate a correlated signal at two different length: 5 and 100 (spherical)
56 | srf_s_alone = gs.SRF(list_model_s[j], mode_no=100)
57 |
58 | # We combine the two random correlated fields (e.g, short-range could represent resolution, and long-range the noise)
59 | field_s_alone = srf_s_alone.structured([x, y])
60 |
61 | # Stationary variance with correlated noise (short, and short+long range)
62 | noisy_stationary_sr_dem = dem + nmad_stable * field_s_alone
63 |
64 | t1 = time.time()
65 |
66 | print('Elapsed: {:.1f} seconds'.format(t1-t0))
67 |
68 | print('Deriving slopes...')
69 |
70 | slope_stationary_sr, aspect_stationary_sr = xdem.terrain.get_terrain_attribute(noisy_stationary_sr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
71 |
72 | t2 = time.time()
73 | print('Elapsed: {:.1f} seconds'.format(t2-t1))
74 |
75 | sim_slope_dems[j, i, :, :] = slope_stationary_sr
76 |
77 | run_names = []
78 | for i in range(len(list_len_scale)):
79 | if i<1:
80 | run_names.append('Short range: {:.2f}'.format(list_len_scale[i]/5.)+' pixels')
81 | elif i<3:
82 | run_names.append('Short range: {:.1f}'.format(list_len_scale[i]/5.)+' pixels')
83 | else:
84 | run_names.append('Short range: {:.0f}'.format(list_len_scale[i] / 5.) + ' pixels')
85 |
86 | list_bin_edges=[(0, 20), (70, 90)]
87 | list_df_bp = []
88 |
89 | for i in range(len(list_len_scale)):
90 |
91 |
92 | slope_1sig = (np.nanpercentile(sim_slope_dems[i, :, :, :], 84, axis=0)\
93 | - np.nanpercentile(sim_slope_dems[i, :, :, :], 16, axis=0)) / 2
94 |
95 | for j in range(2):
96 | # Subset by slope category
97 | subset = np.logical_and(slope >= list_bin_edges[j][0], slope < list_bin_edges[j][1])
98 | sub_slope = slope_1sig[subset]
99 | # Store in dataframe
100 | df_subset = pd.DataFrame()
101 | df_subset = df_subset.assign(err_slope=sub_slope, run=[run_names[i]]*len(sub_slope))
102 | bin_name = str(list_bin_edges[j][0]) + '–' + str(list_bin_edges[j][1])
103 | df_subset['slope_category'] = bin_name
104 | list_df_bp.append(df_subset)
105 |
106 | df_bp = pd.concat(list_df_bp)
107 |
108 |
109 | # Initiate figure
110 | fig = plt.figure(figsize=(6, 6))
111 |
112 | grid = plt.GridSpec(22, 23, wspace=0.1, hspace=0.1)
113 |
114 | ax = fig.add_subplot(grid[:, :])
115 |
116 | sns.boxplot(ax=ax, x="slope_category", y="err_slope", hue="run", hue_order=run_names,
117 | data=df_bp, #palette={run_names[0]:'white', run_names[1]:'lightgrey', run_names[2]:'darkgrey' ,run_names[3]:'tab:green'},
118 | fliersize=0, linewidth=1)
119 | ax.vlines(0.5, ymin=-0.5, ymax=12, colors='tab:gray', linestyles='dashed', linewidths=0.75)
120 | ax.set_xlim((-0.5, 1.5))
121 | ax.set_ylim((-0.5, 12))
122 | ax.set_xlabel('Slope categories (degrees)')
123 | ax.set_ylabel('Uncertainty in slope (1$\sigma$, degrees)')
124 | ax.legend(loc='upper right')
125 |
126 | # Save to file
127 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S19_final.png', dpi=400)
128 |
129 |
--------------------------------------------------------------------------------
/figures/fig_s1_dem_noise_examples.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S1: example of correlated noises in DEMs"""
2 | import matplotlib.pyplot as plt
3 | import matplotlib.patches as mpatches
4 | import matplotlib.colors as colors
5 | import cartopy.crs as ccrs
6 | import geoutils as gu
7 | import xdem
8 |
9 | fn_shp = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_merged/11_rgi60_CentralEurope/11_rgi60_CentralEurope.shp'
10 |
11 | # Showing patterns of noise
12 | fn_dh_spot6_glo_noise = '/home/atom/ongoing/work_stderr_dem/noise_examples/artefact_dh/dhdt_Iceland_2020-08-08_SPOT6_vs_GLO30.tif'
13 | fn_dh_srtm_x = '/home/atom/ongoing/work_stderr_dem/noise_examples/artefact_dh/dh-HGTS-XSAR/example_noise.vrt'
14 | fn_dh_wv_tdx = '/home/atom/ongoing/work_stderr_dem/noise_examples/dh_TDX_WV.tif'
15 | fn_dh_ast_tdx = '/home/atom/ongoing/work_stderr_dem/noise_examples/AST_L1A_00311202000201156/dh_AST_TDX_Nahanni.tif'
16 |
17 | # COMMENTED: how the difference were processed from original segments
18 |
19 | # fn_wv = '/home/atom/ongoing/work_stderr_dem/final/noise_examples/SETSM_WV02_20121120_103001001DAEB200_103001001CC8DE00_seg12_2m_v3.0/SETSM_WV02_20121120_103001001DAEB200_103001001CC8DE00_seg12_2m_v3.0_dem.tif'
20 | # fn_tdx = '/home/atom/ongoing/work_stderr_dem/final/noise_examples/TDX_90m_05hem_N61W128.tif'
21 | # wv = xdem.DEM(fn_wv)
22 | # tdx = xdem.DEM(fn_tdx)
23 | # wv = wv.reproject(tdx, resampling=rio.enums.Resampling.bilinear)
24 | # dh_wv_tdx = wv - tdx
25 | # dh_wv_tdx.save('/home/atom/ongoing/work_stderr_dem/final/noise_examples/dh_TDX_WV.tif')
26 | # dh_wv_tdx.show(vmin=-10, vmax=10, cmap='Spectral')
27 |
28 | # fn_aster = '/home/atom/ongoing/work_stderr_dem/final/noise_examples/AST_L1A_00311202000201156/AST_L1A_00311202000201156_Z.tif'
29 | # fn_tdx = '/home/atom/ongoing/work_stderr_dem/final/noise_examples/TDX_for_ASTER_example/tdx.vrt'
30 | #
31 | # aster = xdem.DEM(fn_aster)
32 | # tdx = xdem.DEM(fn_tdx)
33 | # tdx = tdx.reproject(aster, resampling=rio.enums.Resampling.bilinear)
34 | #
35 | # coreg = xdem.coreg.NuthKaab()
36 | # coreg.fit(reference_dem=tdx, dem_to_be_aligned=aster, verbose=True)
37 | # aster_aligned = coreg.apply(aster)
38 | # dh = aster_aligned - tdx
39 | # corr = gu.Raster('/home/atom/ongoing/work_stderr_dem/final/noise_examples/AST_L1A_00311202000201156/AST_L1A_00311202000201156_CORR.tif')
40 | # dh.data[corr.data<60.]=np.nan
41 | #
42 | # dh.save('/home/atom/ongoing/work_stderr_dem/final/noise_examples/AST_L1A_00311202000201156/dh_AST_TDX_Nahanni.tif')
43 |
44 |
45 | # Function for distance scale bar on maps
46 | def scale_bar(r, distance, ax, distance_labels):
47 |
48 | utm = str(dh.crs.to_epsg())[-2:]
49 | s_n = str(dh.crs.to_epsg())[-3]
50 | crs = ccrs.UTM(utm, southern_hemisphere=s_n == '7')
51 |
52 | ax.add_patch(mpatches.Rectangle((r.bounds.right - distance*1.5, r.bounds.bottom + distance*0.8), distance, 0.15*distance,
53 | edgecolor='black',facecolor='black',transform=crs,zorder=10,linewidth=0.5))
54 | ax.add_patch(mpatches.Rectangle((r.bounds.right - distance*1.5 - distance, r.bounds.bottom + distance*0.8), distance, 0.15*distance,
55 | edgecolor='black',facecolor='white',transform=crs ,zorder=10,linewidth=0.5))
56 | ax.text(r.bounds.right - distance*1.5 - distance, r.bounds.bottom + distance*0.7,distance_labels[0],ha='center',va='top',transform=crs,zorder=10)
57 | ax.text(r.bounds.right - distance*1.5, r.bounds.bottom + distance*0.7, distance_labels[1],ha='center',va='top',transform=crs,zorder=10)
58 | ax.text(r.bounds.right - distance*1.5, r.bounds.bottom + distance*0.4,distance_labels[3],ha='center',va='top',transform=crs,zorder=10)
59 | ax.text(r.bounds.right - distance*1.5 + distance, r.bounds.bottom + distance*0.7,distance_labels[2],ha='center',va='top',transform=crs,zorder=10)
60 |
61 | # 1/ First, ASTER undulations
62 | fig = plt.figure(figsize=(12, 12))
63 |
64 | ax = fig.add_axes([0.025,0.6,0.45,0.35],
65 | projection=ccrs.UTM(9), label='ASTER')
66 |
67 |
68 | dh = gu.Raster(fn_dh_ast_tdx)
69 | crop_ext = [525000, 6653000, 606000, 6713000]
70 | dh.crop(crop_ext)
71 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh)
72 |
73 | plt_extent = [crop_ext[0], crop_ext[2], crop_ext[1], crop_ext[3]]
74 |
75 | cmap = plt.get_cmap('RdYlBu').copy()
76 | cmap.set_bad(color='None')
77 |
78 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(9), cmap=cmap,
79 | interpolation=None, zorder=2, vmin=-15, vmax=15)
80 | ax.text(0.5, 1.02, 'ASTER: cross-track bias and along-track undulations', transform=ax.transAxes, ha='center', va='bottom', fontweight='bold')
81 | ax.text(-0.05, 1.05, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
82 |
83 | ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
84 |
85 | scale_bar(dh, 10000, ax=ax, distance_labels=['0', '10', '20', 'km'])
86 |
87 | cbaxes = ax.inset_axes([0.2, -0.075, 0.6, 0.035], zorder=10)
88 |
89 | norm = colors.Normalize(vmin=-15, vmax=15)
90 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
91 | sm.set_array([])
92 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-15, -0, 15], orientation='horizontal', extend='both', shrink=0.2)
93 | cb.set_label('Elevation difference (m)')
94 |
95 |
96 | # 2/ Second, SRTM-X undulations
97 | ax = fig.add_axes([0.525,0.6,0.45,0.35],
98 | projection=ccrs.UTM(45), label='SRTMX')
99 |
100 |
101 | dh = gu.Raster(fn_dh_srtm_x)
102 | crop_ext = [680000, 3765000, 1138000, 4105000]
103 | dh.crop(crop_ext)
104 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh)
105 |
106 | plt_extent = [crop_ext[0], crop_ext[2], crop_ext[1], crop_ext[3]]
107 |
108 | cmap = plt.get_cmap('RdYlBu').copy()
109 | cmap.set_bad(color='None')
110 |
111 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(45), cmap=cmap,
112 | interpolation=None, zorder=2, vmin=-5, vmax=5)
113 | ax.text(0.5, 1.02, 'SRTM-X: along-track undulations', transform=ax.transAxes, ha='center', va='bottom', fontweight='bold')
114 | ax.text(-0.05, 1.05, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
115 |
116 | ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
117 |
118 | scale_bar(dh, 50000, ax=ax, distance_labels=['0', '50', '100', 'km'])
119 |
120 | cbaxes = ax.inset_axes([0.2, -0.075, 0.6, 0.035], zorder=10)
121 |
122 | norm = colors.Normalize(vmin=-5, vmax=5)
123 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
124 | sm.set_array([])
125 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-5, -0, 5], orientation='horizontal', extend='both', shrink=0.2)
126 | cb.set_label('Elevation difference (m)')
127 |
128 | # 3/ Third, Worldview artefacts
129 | ax = fig.add_axes([0.025,0.1,0.45,0.35],
130 | projection=ccrs.UTM(9), label='WV')
131 |
132 |
133 | dh = gu.Raster(fn_dh_wv_tdx)
134 | crop_ext = [580000, 6861500, 583000, 6864000]
135 | dh.crop(crop_ext)
136 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh)
137 |
138 | plt_extent = [crop_ext[0], crop_ext[2], crop_ext[1], crop_ext[3]]
139 |
140 | cmap = plt.get_cmap('RdYlBu').copy()
141 | cmap.set_bad(color='None')
142 |
143 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(9), cmap=cmap,
144 | interpolation=None, zorder=2, vmin=-50, vmax=50)
145 |
146 | ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
147 | ax.text(-0.05, 1.05, 'c', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
148 | ax.text(0.5, 1.02, 'ArcticDEM: processing artefacts', transform=ax.transAxes, ha='center', va='bottom', fontweight='bold')
149 |
150 | scale_bar(dh, 250, ax=ax, distance_labels=['0', '250', '500', 'm'])
151 |
152 | cbaxes = ax.inset_axes([0.2, -0.075, 0.6, 0.035], zorder=10)
153 |
154 | norm = colors.Normalize(vmin=-50, vmax=50)
155 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
156 | sm.set_array([])
157 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-50, 0, 50], orientation='horizontal', extend='both', shrink=0.2)
158 | cb.set_label('Elevation difference (m)')
159 |
160 | # 4/ Finally, SPOT6 artefacts
161 | ax = fig.add_axes([0.525,0.1,0.45,0.35],
162 | projection=ccrs.UTM(28), label='SPOT6')
163 |
164 | dh = gu.Raster(fn_dh_spot6_glo_noise)
165 | dh.set_ndv(-9999)
166 | crop_ext = [449000, 7115000, 459000, 7123000]
167 | dh.crop(crop_ext)
168 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh)
169 |
170 | plt_extent = [crop_ext[0], crop_ext[2], crop_ext[1], crop_ext[3]]
171 |
172 | cmap = plt.get_cmap('RdYlBu').copy()
173 | cmap.set_bad(color='None')
174 |
175 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(28), cmap=cmap,
176 | interpolation=None, zorder=2, vmin=-10, vmax=10)
177 | ax.text(0.5, 1.02, 'SPOT-6: processing artefacts', transform=ax.transAxes, ha='center', va='bottom', fontweight='bold')
178 | ax.text(-0.05, 1.05, 'd', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
179 |
180 | ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
181 | scale_bar(dh, 1000, ax=ax, distance_labels=['0', '1', '2', 'km'])
182 |
183 | cbaxes = ax.inset_axes([0.2, -0.075, 0.6, 0.035], zorder=10)
184 |
185 | norm = colors.Normalize(vmin=-10, vmax=10)
186 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
187 | sm.set_array([])
188 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-10, -0, 10], orientation='horizontal', extend='both', shrink=0.2)
189 | cb.set_label('Elevation difference (m)')
190 |
191 | # Save to file
192 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S1_final.png', dpi=400)
--------------------------------------------------------------------------------
/figures/fig_s20_sim_curv_influence_slope_errors.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S20: impact of curvature on slope errors"""
2 | import gstools as gs
3 | import matplotlib.pyplot as plt
4 | import numpy as np
5 | from geoutils import Raster
6 | import xdem
7 | import time
8 | import pandas as pd
9 | import seaborn as sns
10 |
11 | # Open data
12 | fn_dem = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
13 | n_sim = 200
14 | fn_hs = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Mont-Blanc_2017-10-25_DEM_5m_hillshade.tif'
15 | r = Raster(fn_dem)
16 |
17 | # Crop around Mont-Blanc
18 | crop_ext = [333500, 5076000, 335500, 5078000]
19 | r.crop(crop_ext)
20 | hs = Raster(fn_hs)
21 | hs.crop(crop_ext)
22 |
23 | fn_hetsce = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_heteroscedas_slope_curv.csv'
24 | fn_vgm = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_vgm_std_sta.csv'
25 |
26 | df_h = pd.read_csv(fn_hetsce)
27 | df_v = pd.read_csv(fn_vgm)
28 | df_v = df_v[df_v.bins<30000]
29 | df_v.err_exp /= np.sqrt(100)/2
30 | std_fac = np.nanmean(df_v.exp.values[-3:])
31 | df_v.exp /= std_fac
32 | df_v.err_exp /= std_fac
33 | dem = np.copy(r.data.data).squeeze()
34 | dem[dem==r.nodata] = np.nan
35 |
36 | slope, aspect, planc, profc = xdem.terrain.get_terrain_attribute(dem, resolution=r.res[0], attribute=['slope', 'aspect', 'planform_curvature',
37 | 'profile_curvature'])
38 | maxabsc = np.maximum(np.abs(planc), np.abs(profc))
39 |
40 | shape = np.shape(dem)
41 | # Grid/Raster of 1000 x 1000 pixels
42 | x = np.arange(0, shape[0]) * r.res[0]
43 | y = np.arange(0, shape[1]) * r.res[0]
44 |
45 | _, params = xdem.spatialstats.fit_sum_model_variogram(list_model=['Gau', 'Gau', 'Gau'], empirical_variogram=df_v,
46 | bounds=[(0, 200), (0, 9), (500, 5000), (0, 9), (2000, 15000), (0,9)],
47 | p0=[100, 1.5, 2000,1.5, 5000,1.5])
48 |
49 | fn = xdem.spatialstats.interp_nd_binning(df_h, list_var_names=['slope_mid', 'maxc_mid'], statistic='nmad', min_count=30)
50 | maxabsc[maxabsc>50] = 50
51 | dh_err = fn((slope.data, maxabsc))
52 |
53 | nmad_stable = 1.60
54 | # nmad_stable = np.nanmedian(dh_err)
55 |
56 | # Need to specify the rescale factor to match skgstat and gstools
57 | model_s_alone = gs.Gaussian(dim=2, var=1, len_scale=params[0], rescale=2)
58 |
59 | model_s = gs.Gaussian(dim=2, var=params[1], len_scale=params[0], rescale=2)
60 | model_l = gs.Gaussian(dim=2, var=params[3], len_scale=params[2], rescale=2)
61 | model_l2 = gs.Gaussian(dim=2, var=params[5], len_scale=params[4], rescale=2)
62 |
63 | sim_slope_dems, sim_aspect_dems = (np.empty((6, n_sim,) + shape, dtype=np.float32) for i in range(2))
64 |
65 | for i in range(n_sim):
66 |
67 | print('Working on simulation '+str(i+1))
68 |
69 | print('Generating random field...')
70 |
71 | t0 = time.time()
72 |
73 | # Using GSTools, let's generate a correlated signal at two different length: 5 and 100 (spherical)
74 | srf_s_alone = gs.SRF(model_s_alone, mode_no=100)
75 | srf_s = gs.SRF(model_s, mode_no=100)
76 | srf_l = gs.SRF(model_l, mode_no=100)
77 | srf_l2 = gs.SRF(model_l2, mode_no=100)
78 |
79 | # We combine the two random correlated fields (e.g, short-range could represent resolution, and long-range the noise)
80 | field_s_alone = srf_s_alone.structured([x, y])
81 |
82 | field_s = srf_s((x, y), mesh_type='structured')
83 | field_l = srf_l((x, y), mesh_type='structured')
84 | field_l2 = srf_l2((x, y), mesh_type='structured')
85 |
86 | # Stationary variance with purely random noise
87 | pixel_noise = np.random.normal(0, 1, size=np.shape(dem))
88 | noisy_stationary_dem = dem + pixel_noise * nmad_stable
89 |
90 | # Heteroscedasticity with purely random noise
91 | noisy_hetsce_dem = dem + pixel_noise * dh_err
92 |
93 | # Stationary variance with correlated noise (short, and short+long range)
94 | noisy_stationary_sr_dem = dem + nmad_stable * field_s_alone
95 | noisy_stationary_lr_dem = dem + nmad_stable * field_s + nmad_stable * (field_l + field_l2)
96 |
97 | # Heteroscedasticity with correlated noise
98 | noisy_hetsce_sr_dem = dem + dh_err * field_s_alone
99 | noisy_hetsce_lr_dem = dem + dh_err * field_s + nmad_stable * (field_l + field_l2)
100 |
101 | t1 = time.time()
102 |
103 | print('Elapsed: {:.1f} seconds'.format(t1-t0))
104 |
105 | print('Deriving slopes...')
106 |
107 | slope_stationary, aspect_stationary = xdem.terrain.get_terrain_attribute(noisy_stationary_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
108 | slope_hetsce, aspect_hetsce = xdem.terrain.get_terrain_attribute(noisy_hetsce_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
109 | slope_stationary_sr, aspect_stationary_sr = xdem.terrain.get_terrain_attribute(noisy_stationary_sr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
110 | slope_stationary_lr, aspect_stationary_lr = xdem.terrain.get_terrain_attribute(noisy_stationary_lr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
111 | slope_hetsce_sr, aspect_hetsce_sr = xdem.terrain.get_terrain_attribute(noisy_hetsce_sr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
112 | slope_hetsce_lr, aspect_hetsce_lr = xdem.terrain.get_terrain_attribute(noisy_hetsce_lr_dem, resolution=r.res[0], attribute=['slope', 'aspect'])
113 |
114 | t2 = time.time()
115 | print('Elapsed: {:.1f} seconds'.format(t2-t1))
116 |
117 | sim_slope_dems[0, i, :, :] = slope_stationary
118 | sim_slope_dems[1, i, :, :] = slope_hetsce
119 | sim_slope_dems[2, i, :, :] = slope_stationary_sr
120 | sim_slope_dems[3, i, :, :] = slope_stationary_lr
121 | sim_slope_dems[4, i, :, :] = slope_hetsce_sr
122 | sim_slope_dems[5, i, :, :] = slope_hetsce_lr
123 |
124 | sim_aspect_dems[0, i, :, :] = aspect_stationary
125 | sim_aspect_dems[1, i, :, :] = aspect_hetsce
126 | sim_aspect_dems[2, i, :, :] = aspect_stationary_sr
127 | sim_aspect_dems[3, i, :, :] = aspect_stationary_lr
128 | sim_aspect_dems[4, i, :, :] = aspect_hetsce_sr
129 | sim_aspect_dems[5, i, :, :] = aspect_hetsce_lr
130 |
131 |
132 | bins_slope = [0, 5, 10, 15, 20, 30, 40, 50, 70, 90]
133 | bins_curv = [0, 0.2, 0.5, 1, 2, 3, 4, 6, 10, 20, 50]
134 |
135 | northness = np.cos(sim_aspect_dems * np.pi / 180)
136 | eastness = np.sin(sim_aspect_dems * np.pi / 180)
137 |
138 | list_slope_map, list_maxnortheast_map, list_df_bp, list_df_bp_northeast = ([] for i in range(4))
139 | run_names = ['stationary_random', 'hetsce_random', 'stationary_shortrange', 'stationary_longrange', 'hetsce_shortrange', 'hetsce_longrange']
140 | for i in range(6):
141 |
142 |
143 | slope_1sig = (np.nanpercentile(sim_slope_dems[i, :, :, :], 84, axis=0)\
144 | - np.nanpercentile(sim_slope_dems[i, :, :, :], 16, axis=0)) / 2
145 | northness_1sig = (np.nanpercentile(northness[i, :, :, :], 84, axis=0)\
146 | - np.nanpercentile(northness[i, :, :, :], 16, axis=0)) / 2
147 | eastness_1sig = (np.nanpercentile(eastness[i, :, :, :], 84, axis=0)\
148 | - np.nanpercentile(eastness[i, :, :, :], 16, axis=0)) / 2
149 | maxnortheast_1sig = np.maximum(northness_1sig, eastness_1sig)
150 |
151 | for j in range(len(bins_curv) - 1):
152 | # Subset by slope category
153 | subset = np.logical_and(maxabsc >= bins_curv[j], maxabsc < bins_curv[j + 1])
154 | sub_slope = slope_1sig[subset]
155 | sub_northeast = maxnortheast_1sig[subset]
156 | # Store in dataframe
157 | df_subset = pd.DataFrame()
158 | df_subset = df_subset.assign(err_slope=sub_slope, run=[run_names[i]]*len(sub_slope))
159 | bin_name = str(bins_curv[j]) + '–' + str(bins_curv[j + 1])
160 | df_subset['curv_category'] = bin_name
161 | list_df_bp.append(df_subset)
162 |
163 | df_subset_northeast = pd.DataFrame()
164 | df_subset_northeast = df_subset_northeast.assign(err_northeast=sub_northeast, run=[run_names[i]] * len(sub_slope))
165 | bin_name = str(bins_curv[j]) + '–' + str(bins_curv[j + 1])
166 | df_subset_northeast['curv_category'] = bin_name
167 | list_df_bp_northeast.append(df_subset_northeast)
168 |
169 | list_slope_map.append(slope_1sig)
170 | list_maxnortheast_map.append(maxnortheast_1sig)
171 |
172 | df_bp = pd.concat(list_df_bp)
173 | df_bp_northeast = pd.concat(list_df_bp_northeast)
174 |
175 | ## Main panel: boxplot of uncertainty with slope categories
176 | orig_names = ['stationary_random', 'stationary_longrange','hetsce_random', 'hetsce_longrange']
177 | df_bp_sub = df_bp[df_bp.run.isin(orig_names)]
178 | df_bp_northeast_sub = df_bp_northeast[df_bp_northeast.run.isin(orig_names)]
179 |
180 | names = ['Homosc., no corr.', 'Homosc., long-range', 'Heterosc., no corr.', 'Heterosc., long-range']
181 | for i, oname in enumerate(orig_names):
182 | df_bp_sub.loc[df_bp_sub.run == oname, 'run'] = names[i]
183 | df_bp_northeast_sub.loc[df_bp_northeast_sub.run == oname, 'run'] = names[i]
184 |
185 |
186 | # Initiate figure
187 | fig = plt.figure(figsize=(10, 10))
188 | grid = plt.GridSpec(40, 20, wspace=0.1, hspace=0.1)
189 |
190 | # First, an horizontal axis on top to plot the sample histograms
191 |
192 | ax = fig.add_subplot(grid[:6, :])
193 |
194 | list_nb_pixel = []
195 | for i in range(len(bins_slope)-1):
196 | ind_pixel = np.logical_and(df_bp.run.values == 'stationary_random', df_bp.curv_category== str(bins_curv[i])+'–'+str(bins_curv[i+1]))
197 | nb_pixel = np.count_nonzero(ind_pixel)
198 | list_nb_pixel.append(nb_pixel)
199 | ax.fill_between([i-0.3, i+0.3], [0]*2, [nb_pixel], facecolor='black')
200 |
201 | ax.vlines(np.arange(0.5, len(bins_curv)-1), ymin=-5, ymax=np.max(list_nb_pixel)*1.1, colors='tab:gray', linestyles='dashed', linewidths=0.75)
202 |
203 | ax.set_xticks([])
204 | ax.set_ylabel('Sample\ncount')
205 | ax.set_ylim((100, np.max(list_nb_pixel)*1.1))
206 | # ax.yaxis.tick_right()
207 | # ax.yaxis.set_label_position("right")
208 | ax.spines['top'].set_visible(False)
209 | ax.spines['right'].set_visible(False)
210 | ax.set_yscale('log')
211 | ax.set_xlim((-0.5, len(bins_slope)-1.5))
212 |
213 | ax = fig.add_subplot(grid[6:23, :])
214 |
215 | sns.boxplot(ax=ax, x="curv_category", y="err_slope", hue="run", hue_order=names,
216 | data=df_bp_sub, palette={names[0]:'white', names[1]:'darkgrey', names[2]:'lightgreen' ,names[3]:'darkgreen'},
217 | fliersize=0, linewidth=1)
218 | ax.vlines(np.arange(0.5, len(bins_slope)-1), ymin=-5, ymax=40, colors='tab:gray', linestyles='dashed', linewidths=0.75)
219 |
220 | ax.set_ylim((-0.5, 11.5))
221 | ax.set_xlabel('Slope categories (degrees)')
222 | ax.set_ylabel('Uncertainty in slope (1$\sigma$, degrees)')
223 | ax.legend(loc='upper center', ncol=2)
224 | ax.set_xticklabels([])
225 | ax.text(0.025, 0.96, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
226 | zorder=20)
227 | # ax.yaxis.tick_right()
228 | # ax.yaxis.set_label_position("right")
229 | ax.set_xlim((-0.5, len(bins_slope)-1.5))
230 |
231 | ax = fig.add_subplot(grid[23:, :])
232 |
233 | sns.boxplot(ax=ax, x="curv_category", y="err_northeast", hue="run", hue_order=names,
234 | data=df_bp_northeast_sub, palette={names[0]:'white', names[1]:'darkgrey', names[2]:'lightgreen' ,names[3]:'darkgreen'},
235 | fliersize=0, linewidth=1)
236 | ax.vlines(np.arange(0.5, len(bins_slope)-1), ymin=-1, ymax=2, colors='tab:gray', linestyles='dashed', linewidths=0.5)
237 | ax.set_ylim((-0.05, 0.55))
238 | l = ax.legend()
239 | l.remove()
240 |
241 | # ax.text(0.025, 0.96, 'd', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
242 | # zorder=20)
243 | ax.set_xlabel('Maximum absolute curvature categories (10$^{2}$ m$^{-1}$)')
244 | ax.set_ylabel('Maximum of uncertainty in\nnorthness or eastness (1$\sigma$)')
245 | # ax.yaxis.tick_right()
246 | # ax.yaxis.set_label_position("right")
247 | ax.set_xlim((-0.5, len(bins_slope)-1.5))
248 | ax.text(0.025, 0.96, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14,
249 | zorder=20)
250 |
251 | # Save to file
252 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S20_final.png', dpi=400)
--------------------------------------------------------------------------------
/figures/fig_s21_vario_artifical_undulations.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S21: artificial undulations to constrain correlated errors in swath direction"""
2 | import xdem
3 | import numpy as np
4 | import pandas as pd
5 | import matplotlib.pyplot as plt
6 | import skgstat as skg
7 | from matplotlib.patches import Patch
8 | from matplotlib.legend_handler import HandlerPatch
9 |
10 | # Simulate undulations scenario 1
11 | ampli = 1
12 | res = 1
13 | freq = 10
14 |
15 | nx, ny = (100, 100)
16 | shape = (nx, ny)
17 | x = np.arange(nx)
18 | y = np.arange(ny)
19 | xx, yy = np.meshgrid(x, y)
20 |
21 | along_angle = 0
22 |
23 | altrack = -xx * np.sin(np.deg2rad(along_angle)) + yy * np.cos(np.deg2rad(along_angle))
24 | dh_simu = ampli * np.sin(altrack * res * 2 * np.pi / freq)
25 |
26 | coords = np.dstack((xx.flatten(), yy.flatten())).squeeze()
27 | vals = dh_simu.flatten()
28 |
29 | # Estimate variogram
30 | V = skg.Variogram(coords, vals, n_lags=100)
31 | bins, exps = V.get_empirical()
32 | counts = V.bin_count
33 |
34 | df = pd.DataFrame()
35 | df = df.assign(bins=bins, exp=exps, counts=counts, err_exp=np.nan*np.ones(len(bins)))
36 | df = df[df.bins<=115]
37 |
38 | fun, params = xdem.spatialstats.fit_sum_model_variogram(['Sph'], df)
39 | dfr = df.copy()
40 | dfr.exp = df.exp.rolling(5, min_periods=1).min().drop_duplicates()
41 | dfr[np.logical_and(dfr.bins>1,dfr.bins<=5)] = np.nan
42 | fun_double, params_double = xdem.spatialstats.fit_sum_model_variogram(['Sph', 'Sph'], dfr)
43 |
44 | # Simulate undulations scenario 2
45 | freq = 30
46 |
47 | altrack2 = -xx * np.sin(np.deg2rad(along_angle)) + yy * np.cos(np.deg2rad(along_angle))
48 | dh_simu2 = ampli * np.sin(altrack2 * res * 2 * np.pi / freq)
49 | vals2 = dh_simu2.flatten()
50 |
51 | # Estimate variogram
52 | V2 = skg.Variogram(coords, vals2, n_lags=100)
53 | bins2, exps2 = V2.get_empirical()
54 | counts2 = V2.bin_count
55 |
56 | df2 = pd.DataFrame()
57 | df2 = df2.assign(bins=bins2, exp=exps2, counts=counts2, err_exp=np.nan*np.ones(len(bins)))
58 | df2 = df2[df2.bins<=115]
59 |
60 | fun2, params2 = xdem.spatialstats.fit_sum_model_variogram(['Sph'], df2)
61 |
62 | df2r = df2.copy()
63 | df2r.exp = df2.exp.rolling(15, min_periods=1).min().drop_duplicates()
64 | df2r[np.logical_and(df2r.bins>1,df2r.bins<=30)] = np.nan
65 | fun2_double, params2_double = xdem.spatialstats.fit_sum_model_variogram(['Sph', 'Sph'], df2r)
66 |
67 | xmin = 0
68 | xmax = max(df.bins)
69 |
70 | # Initiate figure
71 | fig = plt.figure(figsize=(12, 12))
72 | grid = plt.GridSpec(22, 22, wspace=0.1, hspace=0.1)
73 |
74 | # Plot artificial undulations, first scenario
75 | ax = fig.add_subplot(grid[0:10, 0:10])
76 |
77 | ax.imshow(dh_simu, cmap='RdYlBu', vmin=-1, vmax=1)
78 | ax.text(-0.025, 1.05, 'a', transform=ax.transAxes, ha='right', va='bottom', fontweight='bold', fontsize=14)
79 |
80 | # Plot artificial undulations, second scenario
81 | ax = fig.add_subplot(grid[12:, 0:10])
82 | ax.text(-0.025, 1.05, 'b', transform=ax.transAxes, ha='right', va='bottom', fontweight='bold', fontsize=14)
83 |
84 | ax.imshow(dh_simu2, cmap='RdYlBu')
85 |
86 | # Plot variograms
87 | ax0 = fig.add_subplot(grid[:3, 12:])
88 |
89 | ax0.set_xticks([])
90 |
91 | # Plot the histogram manually with fill_between
92 | interval_var = [0] + list(df.bins)
93 | for i in range(len(df)):
94 | width = interval_var[i + 1] - interval_var[i]
95 | mid = interval_var[i] + width / 2
96 | count = df['counts'].values[i]
97 | ax0.fill_between([mid - width / 2, mid + width /2], [0] * 2, [count] * 2,
98 | facecolor='black', alpha=1,
99 | edgecolor='none', linewidth=0.5)
100 | # ax0.vlines(mid - width / 2, ymin=[0], ymax=1.2 * max(df['counts'].values), colors='tab:gray',
101 | # linestyles='dashed', linewidths=0.5)
102 | ax0.set_ylabel('Pairwise\nsample\ncount')
103 | # Scientific format to avoid undesired additional space on the label side
104 | ax0.ticklabel_format(axis='y', style='sci', scilimits=(0, 0))
105 |
106 | # Ignore warnings for log scales
107 | ax0.set_xlim((xmin, xmax))
108 | ax0.set_ylim((0, 1.2 * max(df['counts'].values)))
109 | ax0.text(0.05, 0.95, 'c', transform=ax0.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
110 |
111 | ax1 = fig.add_subplot(grid[3:, 12:])
112 |
113 | bins_model = np.linspace(0, 150, 1000)
114 |
115 | bins_center = np.subtract(df.bins, np.diff([0] + df.bins.tolist()) / 2)
116 |
117 | bins_center_r = np.subtract(dfr.bins, np.diff([0] + dfr.bins.tolist()) / 2)
118 | bins_center_r2 = np.subtract(df2r.bins, np.diff([0] + df2r.bins.tolist()) / 2)
119 |
120 | ax1.scatter(bins_center, df.exp, marker='x', color='tab:blue')
121 | ax1.scatter(bins_center_r, dfr.exp, marker='x', s=80, color='tab:blue', linewidths=3)
122 | ax1.scatter(bins_center, df2.exp, marker='x', color='tab:orange')
123 | ax1.scatter(bins_center_r2, df2r.exp, marker='x', s=80, color='tab:orange', linewidths=3)
124 | ax1.plot(bins_model, fun(bins_model), color='tab:blue')
125 | ax1.plot(bins_model, fun_double(bins_model), color='tab:blue', linestyle='dashed')
126 | ax1.plot(bins_model, fun2(bins_model), color='tab:orange')
127 | ax1.plot(bins_model, fun2_double(bins_model), color='tab:orange', linestyle='dashed')
128 | ax1.hlines(0.5, xmin=xmin, xmax=xmax, linestyles='dotted', colors='black', label='Global variance')
129 | ax1.set_xlim((xmin, xmax))
130 | ax1.set_ylabel('Variance')
131 | ax1.set_xlabel('Spatial lag')
132 |
133 | ax1.scatter([], [], marker='x', label='Empirical variogram', color='black')
134 | ax1.scatter([], [], marker='x', s=80, linewidths=3, label='Rolling minimum values', color='black')
135 | ax1.plot([], [], linestyle='solid', label='Double range fit on all values', color='black')
136 | ax1.plot([], [], linestyle='dashed', label='Double range fit on minimum values', color='black')
137 |
138 | handles, labels = ax1.get_legend_handles_labels()
139 | order = [3, 4, 0, 1, 2]
140 | l0 = ax1.legend([handles[idx] for idx in order], [labels[idx] for idx in order], loc='lower right')
141 | # l0.set_zorder(30)
142 |
143 | p0 = Patch(facecolor='tab:blue', edgecolor='None', label='Variogram of panel a')
144 | p1 = Patch(facecolor='tab:orange', edgecolor='None', label='Variogram of panel b')
145 |
146 | ax1.legend([p0, p1], ['Variogram of panel a', 'Variogram of panel b'],
147 | handler_map={p0 : HandlerPatch(), p1: HandlerPatch()},
148 | framealpha=1, loc='upper right')
149 | # l1.set_zorder(30)
150 | ax1.add_artist(l0)
151 |
152 | # ax1.legend(loc='lower right')
153 |
154 | # Save to file
155 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S21_final.png', dpi=300)
--------------------------------------------------------------------------------
/figures/fig_s2_shift_tilt_pleiades_spot6_montblanc.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S2: shift and tilt between Pléiades and SPOT-6 DEMs of the Mont-Blanc case study"""
2 | import matplotlib.pyplot as plt
3 | import matplotlib.colors as colors
4 | import cartopy.crs as ccrs
5 | import geoutils as gu
6 | import xdem
7 |
8 | fn_shp = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_merged/11_rgi60_CentralEurope/11_rgi60_CentralEurope.shp'
9 | fn_dh_nk = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_shift_nk_Pleiades.tif'
10 | fn_dh_deramp = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_shift_deramp_Pleiades.tif'
11 |
12 | # First, let's plot the shift
13 | fig = plt.figure(figsize=(6, 4.25))
14 |
15 | ax = fig.add_axes([0,0.15,0.5,0.8],
16 | projection=ccrs.UTM(32), label='Mont-Blanc')
17 |
18 | dh = gu.Raster(fn_dh_nk)
19 | plt_extent=[dh.bounds.left, dh.bounds.right, dh.bounds.bottom, dh.bounds.top]
20 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh)
21 |
22 | cmap = plt.get_cmap('RdYlBu')
23 | cmap.set_bad(color='None')
24 |
25 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
26 | interpolation=None, zorder=2, vmin=-21, vmax=-1)
27 |
28 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
29 | ax.text(0.05, 0.95, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
30 |
31 | cbaxes = ax.inset_axes([0.2, -0.05, 0.6, 0.025], zorder=10)
32 |
33 | norm = colors.Normalize(vmin=-21, vmax=-1)
34 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
35 | sm.set_array([])
36 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-21, -16, -11, -6, -1], orientation='horizontal', extend='both', shrink=0.2)
37 | cb.set_label('Elevation difference (m)')
38 |
39 | # Then, plot the tilt
40 | ax = fig.add_axes([0.5,0.15,0.5,0.8],
41 | projection=ccrs.UTM(32), label='Mont-Blanc')
42 |
43 | dh_deramp = gu.Raster(fn_dh_deramp)
44 | plt_extent=[dh_deramp.bounds.left, dh_deramp.bounds.right, dh_deramp.bounds.bottom, dh_deramp.bounds.top]
45 |
46 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh_deramp)
47 | cmap = plt.get_cmap('RdYlBu')
48 | cmap.set_bad(color='None')
49 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
50 | interpolation=None, zorder=2, vmin=-2, vmax=2)
51 |
52 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
53 | ax.text(0.05, 0.95, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
54 |
55 | cbaxes = ax.inset_axes([0.2, -0.05, 0.6, 0.025], zorder=10)
56 |
57 | norm = colors.Normalize(vmin=-2, vmax=2)
58 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
59 | sm.set_array([])
60 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-2, -1, -0, 1, 2], orientation='horizontal', extend='both', shrink=0.2)
61 | cb.set_label('Elevation difference (m)')
62 |
63 | # Save to file
64 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S2_final.png', dpi=400)
--------------------------------------------------------------------------------
/figures/fig_s3_dh_zdh_pleiades_spot6_montblanc.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S3: SPOT6-Pléiades elevation difference and standard score for the Mont-Blanc case study"""
2 | import matplotlib.pyplot as plt
3 | import matplotlib.patches as mpatches
4 | import matplotlib.colors as colors
5 | import pandas as pd
6 | import numpy as np
7 | import cartopy.crs as ccrs
8 | import geoutils as gu
9 | from cartopy.io.shapereader import Reader
10 | from cartopy.feature import ShapelyFeature
11 | import xdem
12 |
13 | fn_shp = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_merged/11_rgi60_CentralEurope/11_rgi60_CentralEurope.shp'
14 | fn_df_sub = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_heteroscedas_slope_curv.csv'
15 | fn_dh = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_Pleiades-SPOT6_Mont-Blanc_NK_Deramp.tif'
16 | fn_forest_shp_simplified='/home/atom/ongoing/work_stderr_dem/case_study_montblanc/outlines/forest_Mont-Blanc_ESACCI_delainey.shp'
17 | fn_pleiades = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
18 |
19 | crop_ext = [339180, 5086760, 340680, 5087460]
20 |
21 | # 1/ First, plot the elevation difference with transparent forest/glacier outlines on the full map
22 | fig = plt.figure(figsize=(6, 5))
23 |
24 | ax = fig.add_axes([0.025,0.375,0.45,0.6],
25 | projection=ccrs.UTM(32), label='Mont-Blanc')
26 |
27 | dh = gu.Raster(fn_dh)
28 | plt_extent=[dh.bounds.left, dh.bounds.right, dh.bounds.bottom, dh.bounds.top]
29 | dh_arr = gu.spatial_tools.get_array_and_mask(dh)[0]
30 |
31 | cmap = plt.get_cmap('RdYlBu')
32 | cmap.set_bad(color='None')
33 |
34 | shape_feature = ShapelyFeature(Reader(fn_shp).geometries(), ccrs.PlateCarree(), edgecolor='None', alpha=0.2,
35 | facecolor='tab:grey', linewidth=1, zorder=4)
36 | ax.add_feature(shape_feature)
37 | shape_feature = ShapelyFeature(Reader(fn_forest_shp_simplified).geometries(), ccrs.UTM(32), edgecolor='None', alpha=0.2,
38 | facecolor='tab:green', linewidth=1, zorder=3)
39 | ax.add_feature(shape_feature)
40 |
41 | ax.imshow(dh_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
42 | interpolation=None, zorder=2, vmin=-4, vmax=4)
43 |
44 | # ax.gridlines(draw_labels=False, dms=True, x_inline=False, y_inline=False)
45 | ax.text(0.025, 0.975, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
46 |
47 | ax.add_patch(mpatches.Rectangle((crop_ext[0], crop_ext[1]), crop_ext[2] - crop_ext[0], crop_ext[3] - crop_ext[1],
48 | edgecolor='black',facecolor='None',transform=ccrs.UTM(32),zorder=10,linewidth=1))
49 | ax.text(crop_ext[0]+0.5*(crop_ext[2]-crop_ext[0]), crop_ext[1]-500, 'panels b & d', fontweight='bold', ha='center', va='top')
50 |
51 |
52 | legendax = ax.inset_axes([0.9, -0.58, 0.2, 0.12], zorder=5)
53 |
54 | legendax.set_xlim((-0.1, 0.6))
55 | legendax.set_ylim((-0.1, 0.6))
56 | legendax.set_xticks([])
57 | legendax.set_yticks([])
58 | legendax.spines['top'].set_visible(False)
59 | legendax.spines['left'].set_visible(False)
60 | legendax.spines['right'].set_visible(False)
61 | legendax.spines['bottom'].set_visible(False)
62 |
63 | legendax.add_patch(mpatches.Rectangle((0, 0), 0.2 ,0.2 , edgecolor='black',facecolor='tab:grey', alpha=0.3, zorder=10, linewidth=0.5))
64 | legendax.text(0.25, 0.1, 'Glaciers', va='center', ha='left')
65 | legendax.add_patch(mpatches.Rectangle((0, 0.3), 0.2 ,0.2 , edgecolor='black',facecolor='tab:green', alpha=0.3, zorder=10, linewidth=0.5))
66 | legendax.text(0.25, 0.4, 'Forests', va='center', ha='left')
67 |
68 | # 2/ Then, a subset zoomed on the Mer de Glace tongue
69 | ax = fig.add_axes([0,0.12,0.5,0.225],
70 | projection=ccrs.UTM(32), label='Mont-Blanc_sub')
71 |
72 | dh.crop(crop_ext)
73 | plt_extent=[dh.bounds.left, dh.bounds.right, dh.bounds.bottom, dh.bounds.top]
74 | dh_arr = gu.spatial_tools.get_array_and_mask(dh)[0]
75 | shape_feature = ShapelyFeature(Reader(fn_shp).geometries(), ccrs.PlateCarree(), edgecolor='None', alpha=0.2,
76 | facecolor='tab:grey', linewidth=1, zorder=4)
77 | ax.add_feature(shape_feature)
78 |
79 | ax.imshow(dh_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
80 | interpolation=None, zorder=2, vmin=-4, vmax=4)
81 | ax.text(0.025, 0.975, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
82 |
83 |
84 | cbaxes = ax.inset_axes([0.2, -0.1, 0.6, 0.06], zorder=10)
85 |
86 | norm = colors.Normalize(vmin=-4, vmax=4)
87 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
88 | sm.set_array([])
89 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-4, -2, 0, 2, 4], orientation='horizontal', extend='both', shrink=0.2)
90 | cb.set_label('Elevation difference (m)')
91 |
92 | # 3/ We standardize and replot the full map
93 | ax = fig.add_axes([0.525,0.375,0.45,0.6],
94 | projection=ccrs.UTM(32), label='Mont-Blanc')
95 |
96 | df_sub = pd.read_csv(fn_df_sub)
97 | fn = xdem.spatialstats.interp_nd_binning(df_sub, list_var_names=['slope_mid', 'maxc_mid'], statistic='nmad', min_count=30)
98 |
99 | ref_dem = gu.Raster(fn_pleiades)
100 |
101 | slope, planc, profc = xdem.terrain.get_terrain_attribute(ref_dem, attribute=['slope', 'planform_curvature',
102 | 'profile_curvature'])
103 | maxabsc = np.maximum(np.abs(planc), np.abs(profc))
104 | slope_arr = gu.spatial_tools.get_array_and_mask(slope)[0]
105 |
106 | maxabsc[maxabsc>50] = 50
107 | dh_err = fn((slope_arr, maxabsc[0, :, :]))
108 | dh = gu.Raster(fn_dh)
109 | plt_extent=[dh.bounds.left, dh.bounds.right, dh.bounds.bottom, dh.bounds.top]
110 |
111 | dh_arr = gu.spatial_tools.get_array_and_mask(dh)[0]
112 | std_dh = dh_arr/dh_err
113 | # std_dh[np.abs(std_dh)>7*xdem.spatialstats.nmad(std_dh)] = np.nan
114 | std_dh /= xdem.spatialstats.nmad(std_dh)
115 | cmap = plt.get_cmap('RdYlBu')
116 | cmap.set_bad(color='None')
117 |
118 | shape_feature = ShapelyFeature(Reader(fn_shp).geometries(), ccrs.PlateCarree(), edgecolor='None', alpha=0.2,
119 | facecolor='tab:grey', linewidth=1, zorder=4)
120 | ax.add_feature(shape_feature)
121 | shape_feature = ShapelyFeature(Reader(fn_forest_shp_simplified).geometries(), ccrs.UTM(32), edgecolor='None', alpha=0.2,
122 | facecolor='tab:green', linewidth=1, zorder=3)
123 | ax.add_feature(shape_feature)
124 |
125 | ax.imshow(std_dh[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
126 | interpolation=None, zorder=2, vmin=-3, vmax=3)
127 |
128 | # ax.gridlines(draw_labels=False, dms=True, x_inline=False, y_inline=False)
129 | ax.text(0.025, 0.975, 'c', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
130 |
131 | # 4/ And plot the standardized elevation difference for the subset also
132 | ax = fig.add_axes([0.5,0.12,0.5,0.225],
133 | projection=ccrs.UTM(32), label='Mont-Blanc_sub')
134 |
135 | std_dh_r = dh.copy(new_array=std_dh[None, :, :])
136 | std_dh_r.crop(crop_ext)
137 | plt_extent=[std_dh_r.bounds.left, std_dh_r.bounds.right, std_dh_r.bounds.bottom, std_dh_r.bounds.top]
138 |
139 | std_dh_arr = gu.spatial_tools.get_array_and_mask(std_dh_r)[0]
140 | shape_feature = ShapelyFeature(Reader(fn_shp).geometries(), ccrs.PlateCarree(), edgecolor='None', alpha=0.2,
141 | facecolor='tab:grey', linewidth=1, zorder=4)
142 | ax.add_feature(shape_feature)
143 |
144 | ax.imshow(std_dh_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
145 | interpolation=None, zorder=2, vmin=-3, vmax=3)
146 | ax.text(0.025, 0.975, 'd', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
147 |
148 | cbaxes = ax.inset_axes([0.2, -0.1, 0.6, 0.06], zorder=10)
149 |
150 | norm = colors.Normalize(vmin=-3, vmax=3)
151 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
152 | sm.set_array([])
153 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-3, -1.5, 0, 1.5, 3], orientation='horizontal', extend='both', shrink=0.2)
154 | cb.set_label('Standardized elevation difference')
155 |
156 | # Save to file
157 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S3_final.png', dpi=300)
--------------------------------------------------------------------------------
/figures/fig_s4_slope_curv_pleiades_montblanc.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S4: slope and curvature from the Pléiades DEM of the Mont-Blanc case study"""
2 | import matplotlib.pyplot as plt
3 | import matplotlib.colors as colors
4 | import numpy as np
5 | import cartopy.crs as ccrs
6 | import geoutils as gu
7 | from cartopy.io.shapereader import Reader
8 | from cartopy.feature import ShapelyFeature
9 | import xdem
10 |
11 | fn_ddem = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/dh_Pleiades-SPOT6_Mont-Blanc_NK_Deramp.tif'
12 | fn_pleiades = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/Pleiades_Mont-Blanc_2017-10-25_DEM_5m.tif'
13 | fn_shp = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_merged/11_rgi60_CentralEurope/11_rgi60_CentralEurope.shp'
14 | fn_forest = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/outlines/forest_Mont-Blanc_ESACCI_delainey.shp'
15 |
16 | pleia_ddem = gu.Raster(fn_ddem)
17 | ref_dem = gu.Raster(fn_pleiades)
18 | glaciers_outlines = gu.Vector(fn_shp)
19 | forest_outlines = gu.Vector(fn_forest)
20 | mask_glacier = glaciers_outlines.create_mask(pleia_ddem)
21 | mask_forest = forest_outlines.create_mask(pleia_ddem)
22 |
23 | # Remove forest, very large outliers
24 | pleia_ddem.data[np.abs(pleia_ddem.data)>500] = np.nan
25 |
26 | slope, planc, profc = xdem.terrain.get_terrain_attribute(ref_dem, attribute=['slope', 'planform_curvature',
27 | 'profile_curvature'])
28 | maxabsc = np.maximum(np.abs(planc), np.abs(profc))
29 |
30 | # 1/ First, plot slope
31 | fig = plt.figure(figsize=(6, 4.5))
32 |
33 | ax = fig.add_axes([0,0.15,0.5,0.8],
34 | projection=ccrs.UTM(32), label='Slope')
35 |
36 | plt_extent=[slope.bounds.left, slope.bounds.right, slope.bounds.bottom, slope.bounds.top]
37 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(slope)
38 |
39 | cmap = plt.get_cmap('Reds')
40 | cmap.set_bad(color='None')
41 |
42 | shape_feature = ShapelyFeature(Reader(fn_shp).geometries(), ccrs.PlateCarree(), edgecolor='black', alpha=0.85,
43 | facecolor='None', linewidth=0.75, zorder=20)
44 | ax.add_feature(shape_feature)
45 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap,
46 | interpolation=None, zorder=2, vmin=0, vmax=90)
47 |
48 | # ax.gridlines(draw_labels=True, dms=False, x_inline=False, y_inline=False)
49 | ax.text(0.05, 0.95, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
50 |
51 |
52 | cbaxes = ax.inset_axes([0.2, -0.05, 0.6, 0.025], zorder=10)
53 |
54 | norm = colors.Normalize(vmin=0, vmax=90)
55 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
56 | sm.set_array([])
57 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[0, 45, 90], orientation='horizontal', extend='both', shrink=0.2)
58 | cb.set_label('Slope (degrees)')
59 |
60 | # 2/ Then, plot maximum curvature
61 | ax = fig.add_axes([0.5,0.15,0.5,0.8],
62 | projection=ccrs.UTM(32), label='Curvature')
63 |
64 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(maxabsc)
65 |
66 | col_bounds = np.array([0, 1, 5, 50])
67 | cb = []
68 | cb_val = np.linspace(0, 1, len(col_bounds))
69 | cmap = plt.get_cmap('Purples')
70 | for j in range(len(cb_val)):
71 | cb.append(cmap(cb_val[j]))
72 | cmap_cus2 = colors.LinearSegmentedColormap.from_list('my_cb', list(
73 | zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000)
74 |
75 | # cmap2._lut[0, -1] = 0.0 # We made transparent de 10 first levels of hillshade,
76 | cmap_cus2.set_bad(color='None')
77 |
78 | shape_feature = ShapelyFeature(Reader(fn_shp).geometries(), ccrs.PlateCarree(), edgecolor='black', alpha=0.85,
79 | facecolor='None', linewidth=0.75, zorder=20)
80 | ax.add_feature(shape_feature)
81 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(32), cmap=cmap_cus2,
82 | interpolation=None, zorder=2, vmin=0, vmax=50)
83 |
84 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
85 | ax.text(0.05, 0.95, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
86 |
87 | cbaxes = ax.inset_axes([0.2, -0.05, 0.6, 0.025], zorder=10)
88 |
89 | norm = colors.Normalize(vmin=0, vmax=50)
90 | sm = plt.cm.ScalarMappable(cmap=cmap_cus2, norm=norm)
91 | sm.set_array([])
92 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[0, 5, 20, 50], orientation='horizontal', extend='both', shrink=0.2)
93 | cb.set_label('Maximum curvature (10$^{2}$ m$^{-1}$)')
94 |
95 | # Save to file
96 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S4_final.png', dpi=400)
--------------------------------------------------------------------------------
/figures/fig_s5_dh_qcorr_aster_spot5_npi.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S5: ASTER-SPOT elevation difference and quality of stereo-correlation for the NPI case study"""
2 | import matplotlib.pyplot as plt
3 | import matplotlib.colors as colors
4 | import numpy as np
5 | import cartopy.crs as ccrs
6 | import geoutils as gu
7 | from cartopy.io.shapereader import Reader
8 | from cartopy.feature import ShapelyFeature
9 | import xdem
10 |
11 | fn_ddem = '/home/atom/ongoing/work_stderr_dem/case_study_npi/dh_ASTER-SPOT5_NPI_NK_Deramp.tif'
12 | fn_shp = '/home/atom/data/inventory_products/RGI/00_rgi60_neighb_merged/17_rgi60_SouthernAndes/17_rgi60_SouthernAndes.shp'
13 | fn_corr='/home/atom/ongoing/work_stderr_dem/case_study_npi/AST_L1A_00303182012144228/ASTER_NPI_00303182012144228_CORR.tif'
14 | fn_dem_aster = '/home/atom/ongoing/work_stderr_dem/case_study_npi/AST_L1A_00303182012144228/ASTER_NPI_00303182012144228_Z.tif'
15 |
16 | # 1/ First, plot elevation differences
17 | fig = plt.figure(figsize=(6, 5.5))
18 |
19 | ax = fig.add_axes([0,0.15,0.5,0.8],
20 | projection=ccrs.UTM(18, southern_hemisphere=True), label='Elevation_change')
21 |
22 | dh = gu.Raster(fn_ddem)
23 | dh.data[np.abs(dh.data) > 100] = np.nan
24 | plt_extent=[dh.bounds.left, dh.bounds.right, dh.bounds.bottom, dh.bounds.top]
25 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh)
26 |
27 | cmap = plt.get_cmap('RdYlBu')
28 | cmap.set_bad(color='None')
29 |
30 | shape_feature = ShapelyFeature(Reader(fn_shp).geometries(), ccrs.PlateCarree(), edgecolor='black', alpha=0.85,
31 | facecolor='None', linewidth=0.75, zorder=20)
32 | ax.add_feature(shape_feature)
33 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(18, southern_hemisphere=True), cmap=cmap,
34 | interpolation=None, zorder=2, vmin=-20, vmax=20)
35 |
36 | # ax.gridlines(draw_labels=True, dms=False, x_inline=False, y_inline=False)
37 | ax.text(0.05, 0.95, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
38 |
39 |
40 | cbaxes = ax.inset_axes([0.2, -0.05, 0.6, 0.025], zorder=10)
41 |
42 | norm = colors.Normalize(vmin=-20, vmax=20)
43 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
44 | sm.set_array([])
45 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[-20, -0, 20], orientation='horizontal', extend='both', shrink=0.2)
46 | cb.set_label('Elevation difference (m)')
47 |
48 | # 2/ Then, plot the quality of stereo-correlation from MicMac
49 | ax = fig.add_axes([0.5,0.15,0.5,0.8],
50 | projection=ccrs.UTM(18, southern_hemisphere=True), label='Correlation')
51 |
52 | dh_deramp = gu.Raster(fn_corr)
53 | plt_extent=[dh_deramp.bounds.left, dh_deramp.bounds.right, dh_deramp.bounds.bottom, dh_deramp.bounds.top]
54 |
55 | hs_arr, _ = xdem.spatialstats.get_array_and_mask(dh_deramp)
56 | cmap = plt.get_cmap('Greens')
57 | cmap.set_bad(color='None')
58 |
59 | shape_feature = ShapelyFeature(Reader(fn_shp).geometries(), ccrs.PlateCarree(), edgecolor='black', alpha=0.85,
60 | facecolor='None', linewidth=0.75, zorder=20)
61 | ax.add_feature(shape_feature)
62 | ax.imshow(hs_arr[:, :], extent=plt_extent, transform=ccrs.UTM(18, southern_hemisphere=True), cmap=cmap,
63 | interpolation=None, zorder=2, vmin=0, vmax=100)
64 |
65 | # ax.gridlines(draw_labels=False, dms=False, x_inline=False, y_inline=False)
66 | ax.text(0.05, 0.95, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
67 |
68 | cbaxes = ax.inset_axes([0.2, -0.05, 0.6, 0.025], zorder=10)
69 |
70 | norm = colors.Normalize(vmin=0, vmax=100)
71 | sm = plt.cm.ScalarMappable(cmap=cmap, norm=norm)
72 | sm.set_array([])
73 | cb = plt.colorbar(sm, cax=cbaxes, ticks=[0, 50, 100], orientation='horizontal', extend='both', shrink=0.2)
74 | cb.set_label('Quality of stereo-correlation (%)')
75 |
76 | # Save to file
77 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S5_final.png', dpi=300)
--------------------------------------------------------------------------------
/figures/fig_s8_interp_2d_hetesc_montblanc.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S8: heteroscedasticity interpolated from 2D slope/curvature binning for the Mont-Blanc case study"""
2 | import pandas as pd
3 | import numpy as np
4 | import matplotlib.pyplot as plt
5 | import matplotlib.colors as colors
6 | import xdem
7 |
8 | # Open file with estimates
9 | fn_bin = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_heteroscedas_slope_curv.csv'
10 |
11 | df_bin = pd.read_csv(fn_bin)
12 |
13 | # Interpolate
14 | fn = xdem.spatialstats.interp_nd_binning(df_bin, list_var_names=['slope_mid', 'maxc_mid'], statistic='nmad', min_count=30)
15 |
16 | vec_slope = np.linspace(0, 90, 100)
17 | vec_maxc = np.linspace(0, 15, 100)
18 |
19 | grid = plt.GridSpec(22, 22, wspace=0.1, hspace=0.1)
20 |
21 | # First, plot in linear scale
22 | fig = plt.figure(figsize=(7,4))
23 |
24 | ax = fig.add_subplot(grid[:, 0:9])
25 |
26 | cmap = plt.get_cmap('YlOrRd')
27 | col_bounds = np.array([0.8, 1.2, 2, 3.5, 5])
28 | cb = []
29 | cb_val = np.linspace(0, 1, len(col_bounds))
30 | for j in range(len(cb_val)):
31 | cb.append(cmap(cb_val[j]))
32 | cmap_cus = colors.LinearSegmentedColormap.from_list('my_cb', list(
33 | zip((col_bounds - min(col_bounds)) / (max(col_bounds - min(col_bounds))), cb)), N=1000)
34 |
35 | for i in range(len(vec_slope)-1):
36 | for j in range(len(vec_maxc)-1):
37 | stat = fn([0.5*(vec_slope[i]+vec_slope[i+1]), 0.5*(vec_maxc[j]+vec_maxc[j+1])])
38 | if np.isfinite(stat):
39 | stat_col_all = max(0.0001, min(0.9999, (stat - min(col_bounds)) / (max(col_bounds) - min(col_bounds))))
40 | col = cmap_cus(stat_col_all)
41 | else:
42 | col = 'tab:gray'
43 |
44 | ax.fill_between(x=[vec_slope[i], vec_slope[i+1]], y1=[vec_maxc[j], vec_maxc[j]],
45 | y2=[vec_maxc[j+1], vec_maxc[j+1]], facecolor=col)
46 |
47 | ax.set_ylim((0, 15))
48 | ax.set_xlim((0, 90))
49 | ax.set_ylabel('Maximum absolute curvature (10$^{2}$ m$^{-1}$)')
50 | ax.set_xlabel('Slope (degrees)')
51 |
52 |
53 | # Create an inset axis to manage the scale of the colormap
54 | cbaxes = ax.inset_axes([1.1, 0.2, 0.05, 0.6], label='cmap')
55 |
56 | # Create colormap object and plot
57 | norm = colors.Normalize(vmin=min(col_bounds), vmax=max(col_bounds))
58 | sm = plt.cm.ScalarMappable(cmap=cmap_cus, norm=norm)
59 | sm.set_array([])
60 | cb = plt.colorbar(sm, cax=cbaxes, orientation='vertical', extend='both', shrink=0.8)
61 | cb.ax.tick_params(width=0.5, length=2)
62 | cb.set_label('Dispersion of elevation differences (1$\sigma$)')
63 |
64 | ax.text(-0.025, 1.025, 'a', transform=ax.transAxes, ha='right', va='bottom',fontweight='bold', fontsize=14)
65 |
66 |
67 | # Then, plot in log scales
68 | ax = fig.add_subplot(grid[:, 13:])
69 |
70 |
71 | vec_slope = np.exp(np.linspace(np.log(0.01), np.log(90), 100))
72 | vec_maxc = np.exp(np.linspace(np.log(0.001), np.log(15), 100))
73 |
74 | for i in range(len(vec_slope)-1):
75 | for j in range(len(vec_maxc)-1):
76 | stat = fn([0.5*(vec_slope[i]+vec_slope[i+1]), 0.5*(vec_maxc[j]+vec_maxc[j+1])])
77 | if np.isfinite(stat):
78 | stat_col_all = max(0.0001, min(0.9999, (stat - min(col_bounds)) / (max(col_bounds) - min(col_bounds))))
79 | col = cmap_cus(stat_col_all)
80 | else:
81 | col = 'tab:gray'
82 |
83 | ax.fill_between(x=[vec_slope[i], vec_slope[i+1]], y1=[vec_maxc[j], vec_maxc[j]],
84 | y2=[vec_maxc[j+1], vec_maxc[j+1]], facecolor=col)
85 |
86 | ax.set_ylim((0.001, 15))
87 | ax.set_xlim((0.1, 90))
88 | ax.set_xscale('log')
89 | ax.set_yscale('log')
90 | ax.set_ylabel('Maximum absolute curvature (10$^{2}$ m$^{-1}$)')
91 | ax.set_xlabel('Slope (degrees)')
92 | ax.yaxis.tick_right()
93 | ax.yaxis.set_label_position("right")
94 |
95 | ax.text(-0.025, 1.025, 'b', transform=ax.transAxes, ha='right', va='bottom',fontweight='bold', fontsize=14)
96 |
97 | # Save to file
98 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S8_final.png', dpi=400)
--------------------------------------------------------------------------------
/figures/fig_s9_fit_heterosc_slope_curv.py:
--------------------------------------------------------------------------------
1 | """Plotting of Figure S9: 1D fit for heteroscedasticity with slope or curvature for the Mont-Blanc case study"""
2 | import pandas as pd
3 | import numpy as np
4 | import matplotlib.pyplot as plt
5 | from scipy.optimize import curve_fit
6 |
7 | # Open estimates
8 | fn_bin = '/home/atom/ongoing/work_stderr_dem/case_study_montblanc/df_heteroscedas_slope_curv.csv'
9 |
10 | df_bin = pd.read_csv(fn_bin)
11 |
12 | fig = plt.figure(figsize=(6,7))
13 |
14 | # First, fit with slope
15 | grid = plt.GridSpec(22, 23, wspace=0.1, hspace=0.1)
16 |
17 | ax = fig.add_subplot(grid[:10, :])
18 |
19 | df_slp = df_bin[np.logical_and(df_bin.nd==1, np.isfinite(df_bin.slope_mid))]
20 | df_slp = df_slp
21 | def slope_f(x,a,b):
22 | return min(df_slp.nmad)+a*np.exp(-b*x/np.pi)
23 |
24 | cof, _ = curve_fit(slope_f, df_slp.slope_mid.values[:-1], df_slp.nmad.values[:-1], method='trf')
25 |
26 | x = np.linspace(0, 90)
27 |
28 | ax.scatter(df_slp.slope_mid, df_slp.nmad, marker='x')
29 | ax.plot(x, slope_f(x, *cof), linestyle='dashed', color='black')
30 | ax.set_xlabel('Slope (degrees)')
31 | ax.set_ylabel('Dispersion (1$\sigma$) of\nelevation differences (m)')
32 |
33 | ax.text(0.5, 0.9, '$f(x) = a + b \cdot e^{-cx}$' + '\nwith a = {:.1f}, b = {:.1f} and c = {:.1f}'.format(min(df_slp.nmad),cof[0], cof[1]*np.pi), transform=ax.transAxes, ha='center', va='top')
34 | ax.text(0.025, 0.95, 'a', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
35 |
36 | # Second, fit with curvature
37 | ax = fig.add_subplot(grid[13:, :])
38 |
39 | df_maxc = df_bin[np.logical_and(df_bin.nd==1, np.isfinite(df_bin.maxc_mid))]
40 |
41 | p = np.polyfit(df_maxc.maxc_mid.values, df_maxc.nmad.values, 1)
42 | x = np.linspace(0, 1.2*max(df_maxc.maxc_mid))
43 |
44 | ax.scatter(df_maxc.maxc_mid, df_maxc.nmad, marker='x')
45 | ax.plot(x, np.polyval(p,x), linestyle='dashed', color='black')
46 | ax.set_xlabel('Maximum curvature categories (10$^{2}$ m$^{-1}$)')
47 | ax.set_ylabel('Dispersion (1$\sigma$) of\nelevation differences (m)')
48 | ax.text(0.5, 0.9, '$f(x) = a \cdot x + b$' +'\nwith a = {:.1f}, b = {:.1f}'.format(p[0], p[1]), transform=ax.transAxes, ha='center', va='top')
49 | ax.text(0.025, 0.95, 'b', transform=ax.transAxes, ha='left', va='top', fontweight='bold', fontsize=14)
50 |
51 | # Save to file
52 | plt.savefig('/home/atom/ongoing/work_stderr_dem/figures/final/Figure_S9_final.png', dpi=400)
--------------------------------------------------------------------------------