├── .gitattributes ├── .github └── workflows │ └── build-pages.yml ├── .gitignore ├── LICENSE.txt ├── README.md ├── _config.yml ├── _static └── custom.css ├── _toc.yml ├── binder └── environment.yml ├── book ├── 10minutes.md ├── calving.md ├── construction.md ├── dynamics.md ├── hydro.md ├── massbalance.md ├── shop.md ├── thickness.md ├── visualisation.md └── workflow.md ├── build.sh ├── img ├── logo.png ├── mittelbergferner.mp4 └── show_viewer.gif ├── linkcheck.sh ├── notebooks ├── 10minutes │ ├── dynamical_spinup.ipynb │ ├── machine_learning.ipynb │ ├── preprocessed_directories.ipynb │ └── run_with_gcm.ipynb ├── construction │ ├── area_length_filter.ipynb │ └── inversion_with_frontal_ablation.ipynb ├── tutorials │ ├── building_the_prepro_gdirs.ipynb │ ├── centerlines_to_shape.ipynb │ ├── deal_with_errors.ipynb │ ├── dem_sources.ipynb │ ├── distribute_flowline.ipynb │ ├── dynamical_spinup.ipynb │ ├── elevation_bands_vs_centerlines.ipynb │ ├── full_prepro_workflow.ipynb │ ├── holoviz_intro.ipynb │ ├── hydrological_output.ipynb │ ├── ingest_gridded_data_on_flowlines.ipynb │ ├── inversion.ipynb │ ├── ioggm.ipynb │ ├── kcalving_parameterization.ipynb │ ├── massbalance_calibration.ipynb │ ├── massbalance_global_params.ipynb │ ├── massbalance_perturbation.ipynb │ ├── merge_gcm_runs_and_visualize.ipynb │ ├── numeric_solvers.ipynb │ ├── observed_thickness_with_dynamic_spinup.ipynb │ ├── oggm_shop.ipynb │ ├── plot_mass_balance.ipynb │ ├── preprocessing_errors.ipynb │ ├── rgitopo_rgi6.ipynb │ ├── rgitopo_rgi7.ipynb │ ├── run_with_a_spinup_and_gcm_data.ipynb │ ├── store_and_compress_glacierdirs.ipynb │ ├── use_your_own_inventory.ipynb │ ├── where_are_the_flowlines.ipynb │ └── working_with_rgi.ipynb └── welcome.ipynb ├── push.sh └── requirements.txt /.gitattributes: -------------------------------------------------------------------------------- 1 | *.ipynb filter=nbstripout 2 | *.ipynb diff=ipynb 3 | -------------------------------------------------------------------------------- /.github/workflows/build-pages.yml: -------------------------------------------------------------------------------- 1 | name: Build Pages 2 | 3 | on: 4 | push: 5 | branches: 6 | - master 7 | - stable 8 | - v1.5.3 9 | pull_request: 10 | branches: 11 | - master 12 | - stable 13 | - v1.5.3 14 | 15 | jobs: 16 | build_pages: 17 | name: Build Pages 18 | runs-on: ubuntu-latest 19 | container: ghcr.io/oggm/oggm:latest 20 | steps: 21 | - name: Checkout 22 | uses: actions/checkout@v4 23 | - name: Fix git permission check 24 | run: git config --global --add safe.directory "$GITHUB_WORKSPACE" 25 | - name: Cache 26 | uses: TimoRoth/cache@re-eval-key 27 | with: 28 | path: /github/home/OGGM/download_cache 29 | key: oggm-download-cache-${{ hashFiles('dl_file_list.txt') }} 30 | restore-keys: | 31 | oggm-download-cache- 32 | reeval-key: true 33 | - name: Install Dependencies 34 | run: | 35 | ${PIP} install --upgrade pip setuptools wheel 36 | ${PIP} install -r requirements.txt 37 | ${PIP} uninstall -y progressbar2 38 | - name: Build Book 39 | env: 40 | STATIC_MAP_API_KEY: ${{ secrets.STATIC_MAP_API_KEY }} 41 | run: | 42 | jupyter-book build . 43 | - name: Upload Build Artifacts 44 | uses: actions/upload-artifact@v4 45 | with: 46 | name: build-output 47 | path: _build/ 48 | - name: Push Book 49 | if: github.event_name != 'pull_request' 50 | run: | 51 | ${PIP} install ghp-import 52 | git config user.name "${GITHUB_ACTOR}" 53 | git config user.email "${GITHUB_ACTOR}@bots.github.com" 54 | git fetch origin gh-pages 55 | ghp-import -n -p -f -b gh-pages -x "${GITHUB_REF##*/}" -m "Update ${GITHUB_REF##*/} docs from ${GITHUB_SHA}" _build/html 56 | - name: Shrink download cache 57 | shell: bash 58 | run: | 59 | set +o pipefail 60 | while [[ "$(du -sm /github/home/OGGM/download_cache | cut -f1)" -gt "9500" ]]; do 61 | FF="$(find /github/home/OGGM/download_cache -type f -exec du -a {} + | sort -nr | head -n1 | cut -f2)" 62 | echo "Deleting $FF" 63 | rm "$FF" 64 | done 65 | find /github/home/OGGM/download_cache -type f > "$GITHUB_WORKSPACE"/dl_file_list.txt 66 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Mine 2 | html/index.html 3 | .idea 4 | _build/ 5 | .virtual_documents/ 6 | out.* 7 | RGI60-* 8 | sandbox/ 9 | outputs 10 | ignore/ 11 | 12 | # Mac 13 | .DS_Store 14 | 15 | # Byte-compiled / optimized / DLL files 16 | __pycache__/ 17 | *.py[cod] 18 | *$py.class 19 | 20 | # C extensions 21 | *.so 22 | 23 | # Distribution / packaging 24 | .Python 25 | env/cd 26 | build/ 27 | develop-eggs/ 28 | dist/ 29 | downloads/ 30 | eggs/ 31 | .eggs/ 32 | lib/ 33 | lib64/ 34 | parts/ 35 | sdist/ 36 | var/ 37 | wheels/ 38 | *.egg-info/enodo 39 | .installed.cfg 40 | *.egg 41 | 42 | # PyInstaller 43 | # Usually these files are written by a python script from a template 44 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 45 | *.manifest 46 | *.spec 47 | 48 | # Installer logs 49 | pip-log.txt 50 | pip-delete-this-directory.txt 51 | 52 | # Unit test / coverage reports 53 | htmlcov/ 54 | .tox/ 55 | .coverage 56 | .coverage.* 57 | .cache 58 | nosetests.xml 59 | coverage.xml 60 | *.cover 61 | .hypothesis/ 62 | 63 | # Translations 64 | *.mo 65 | *.pot 66 | 67 | # Django stuff: 68 | *.log 69 | local_settings.py 70 | 71 | # Flask stuff: 72 | instance/ 73 | .webassets-cache 74 | 75 | # Scrapy stuff: 76 | .scrapy 77 | 78 | # Sphinx documentation 79 | docs/_build/ 80 | 81 | # PyBuilder 82 | target/ 83 | 84 | # Jupyter Notebook 85 | .ipynb_checkpoints 86 | 87 | # pyenv 88 | .python-version 89 | 90 | # celery beat schedule file 91 | celerybeat-schedule 92 | 93 | # SageMath parsed files 94 | *.sage.py 95 | 96 | # dotenv 97 | .env 98 | 99 | # virtualenv 100 | .venv 101 | venv/ 102 | ENV/ 103 | 104 | # Spyder project settings 105 | .spyderproject 106 | .spyproject 107 | 108 | # Rope project settings 109 | .ropeproject 110 | 111 | # mkdocs documentation 112 | /site 113 | 114 | # mypy 115 | .mypy_cache/ 116 | 117 | -------------------------------------------------------------------------------- /LICENSE.txt: -------------------------------------------------------------------------------- 1 | Copyright (c) 2014-2020, OGGM e.V. and OGGM Contributors 2 | All rights reserved. 3 | 4 | Redistribution and use in source and binary forms, with or without 5 | modification, are permitted provided that the following conditions are met: 6 | 7 | 1. Redistributions of source code must retain the above copyright notice, 8 | this list of conditions and the following disclaimer. 9 | 10 | 2. Redistributions in binary form must reproduce the above copyright notice, 11 | this list of conditions and the following disclaimer in the documentation 12 | and/or other materials provided with the distribution. 13 | 14 | 3. Neither the name of the copyright holder nor the names of its contributors 15 | may be used to endorse or promote products derived from this software 16 | without specific prior written permission. 17 | 18 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND 19 | ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED 20 | WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE 21 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE 22 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL 23 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR 24 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER 25 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, 26 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE 27 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. 28 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # OGGM tutorials 2 | 3 | Tutorial notebooks for the [OGGM](https://oggm.org) model, powered by [hub.oggm.org](https://hub.oggm.org). 4 | 5 | Web: https://tutorials.oggm.org 6 | 7 | License: [BSD-3-Clause](https://github.com/OGGM/tutorials/blob/master/LICENSE.txt) 8 | 9 | ![img](https://docs.oggm.org/en/stable/_static/logo.png) 10 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | ####################################################################################### 2 | # A default configuration that will be loaded for all jupyter books 3 | # See the documentation for help and more options: 4 | # https://jupyterbook.org/customize/config.html 5 | 6 | ####################################################################################### 7 | # Book settings 8 | title: OGGM tutorials # The title of the book. Will be placed in the left navbar. 9 | author: OGGM e.V. and OGGM Contributors # The author of the book 10 | copyright: "2014-2025" # Copyright year to be placed in the footer 11 | logo: img/logo.png # A path to the book logo 12 | repository: 13 | url: https://github.com/OGGM/tutorials 14 | path_to_book: . 15 | branch: master 16 | launch_buttons: 17 | notebook_interface: "jupyterlab" 18 | binderhub_url: "https://mybinder.org" 19 | jupyterhub_url: "https://hub.oggm.org" 20 | html: 21 | extra_css: 22 | - _static/custom.css 23 | use_repository_button: true 24 | use_issues_button: true 25 | use_edit_page_button: true 26 | announcement: | 27 |

28 | 🚧 Scheduled maintenance: the OGGM cluster will be offline April 27 (evening CEST) – April 30 (morning CEST) 2025. 29 | Learn more. 30 |

31 | extra_footer: | 32 |

33 | These notebooks are licensed under a BSD-3-Clause license. 34 |
35 | © Copyright 2014-2025. 36 |

37 | sphinx: 38 | config: 39 | html_show_copyright: false 40 | html_last_updated_fmt: '%b %d, %Y' 41 | nb_merge_streams: true 42 | html_js_files: 43 | - ['https://plausible.oggm.org/js/script.js', {'defer': 'defer', 'data-domain': 'tutorials.oggm.org'}] 44 | execute: 45 | execute_notebooks: auto # off (for tests) 46 | timeout: -1 47 | allow_errors: true 48 | exclude_patterns: [.virtual_documents/*,README.md,sandbox/*,ignore/*,notebooks/tutorials/ioggm*, ] 49 | -------------------------------------------------------------------------------- /_static/custom.css: -------------------------------------------------------------------------------- 1 | html { 2 | --pst-font-size-h1: 2.2em; /* Default: 2.625 */ 3 | --pst-font-size-h2: 1.7em; /* Default: 2.125 */ 4 | --pst-font-size-h3: 1.3em; /* Default: 1.75 */ 5 | --pst-font-size-h4: 1.0em; /* Default: ? */ 6 | } 7 | 8 | .announcement { 9 | color: white !important; 10 | padding: 0.5em 1em; 11 | margin: 0; 12 | font-weight: 500; 13 | } 14 | 15 | .announcement a { 16 | color: white !important; 17 | text-decoration: underline; 18 | } 19 | 20 | .announcement strong { 21 | font-weight: 700; 22 | } 23 | -------------------------------------------------------------------------------- /_toc.yml: -------------------------------------------------------------------------------- 1 | # Table of contents 2 | # Learn more at https://jupyterbook.org/customize/toc.html 3 | format: jb-book 4 | root: notebooks/welcome 5 | chapters: 6 | 7 | - file: book/10minutes 8 | sections: 9 | - file: notebooks/10minutes/preprocessed_directories 10 | - file: notebooks/10minutes/run_with_gcm 11 | - file: notebooks/10minutes/machine_learning 12 | - file: notebooks/10minutes/dynamical_spinup 13 | 14 | - file: book/workflow 15 | sections: 16 | - file: notebooks/tutorials/working_with_rgi 17 | - file: notebooks/tutorials/store_and_compress_glacierdirs 18 | - file: notebooks/tutorials/deal_with_errors 19 | - file: notebooks/tutorials/elevation_bands_vs_centerlines 20 | - file: notebooks/tutorials/building_the_prepro_gdirs 21 | - file: notebooks/tutorials/full_prepro_workflow 22 | 23 | - file: book/massbalance 24 | sections: 25 | - file: notebooks/tutorials/massbalance_calibration 26 | - file: notebooks/tutorials/plot_mass_balance 27 | - file: notebooks/tutorials/massbalance_global_params 28 | - file: notebooks/tutorials/massbalance_perturbation 29 | 30 | - file: book/hydro 31 | sections: 32 | - file: notebooks/tutorials/hydrological_output 33 | - url: https://oggm.org/oggm-edu-notebooks/oggm-edu/glacier_water_resources.html 34 | title: Glaciers as water resources (OGGM-Edu part 1 - idealized) 35 | - url: https://oggm.org/oggm-edu-notebooks/oggm-edu/glacier_water_resources_projections.html 36 | title: Glaciers as water resources (OGGM-Edu part 2 - projections) 37 | 38 | - file: book/dynamics 39 | sections: 40 | - file: notebooks/tutorials/run_with_a_spinup_and_gcm_data 41 | - file: notebooks/tutorials/dynamical_spinup 42 | - file: notebooks/tutorials/numeric_solvers 43 | - file: notebooks/tutorials/ioggm 44 | 45 | - file: book/thickness 46 | sections: 47 | - file: notebooks/tutorials/inversion 48 | - file: notebooks/tutorials/observed_thickness_with_dynamic_spinup 49 | 50 | - file: book/calving 51 | sections: 52 | - file: notebooks/tutorials/kcalving_parameterization 53 | 54 | - file: book/shop 55 | sections: 56 | - file: notebooks/tutorials/oggm_shop 57 | - file: notebooks/tutorials/use_your_own_inventory 58 | - file: notebooks/tutorials/ingest_gridded_data_on_flowlines 59 | - file: notebooks/tutorials/dem_sources 60 | - file: notebooks/tutorials/rgitopo_rgi6 61 | - file: notebooks/tutorials/rgitopo_rgi7 62 | 63 | - file: book/visualisation 64 | sections: 65 | - file: notebooks/tutorials/distribute_flowline 66 | - file: notebooks/tutorials/where_are_the_flowlines 67 | - file: notebooks/tutorials/centerlines_to_shape 68 | - file: notebooks/tutorials/preprocessing_errors 69 | - file: notebooks/tutorials/merge_gcm_runs_and_visualize 70 | - file: notebooks/tutorials/holoviz_intro 71 | 72 | - file: book/construction 73 | sections: 74 | - file: notebooks/construction/inversion_with_frontal_ablation 75 | - file: notebooks/construction/area_length_filter 76 | 77 | -------------------------------------------------------------------------------- /binder/environment.yml: -------------------------------------------------------------------------------- 1 | name: oggm_env 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - jupyter 6 | - jupyterlab 7 | - numpy 8 | - scipy 9 | - pandas 10 | - shapely 11 | - matplotlib 12 | - Pillow 13 | - netcdf4 14 | - scikit-image 15 | - scikit-learn 16 | - configobj 17 | - xarray 18 | - pytest 19 | - dask 20 | - bottleneck 21 | - pyproj 22 | - geos 23 | - shapely 24 | - pyshp 25 | - geopandas 26 | - rioxarray 27 | - seaborn 28 | - pytables 29 | - salem 30 | - motionless 31 | - seaborn 32 | - holoviews 33 | - geoviews 34 | - hvplot 35 | - pip 36 | - pip: 37 | - joblib 38 | - progressbar2 39 | - git+https://github.com/OGGM/oggm-edu 40 | - oggm 41 | - cartopy 42 | -------------------------------------------------------------------------------- /book/10minutes.md: -------------------------------------------------------------------------------- 1 | # 10 minutes tutorials 2 | 3 | These new tutorials are designed to illustrate one single OGGM concept at a time. They are a good way to get started with OGGM, or for returning users to learn about new features! 4 | 5 | OK, perhaps you'll need a bit more than 10 minutes. But not *much* more! 6 | 7 | - [](../notebooks/10minutes/preprocessed_directories.ipynb) (**start with this tutorial if you are new to OGGM**) 8 | - [](../notebooks/10minutes/run_with_gcm.ipynb) 9 | - [](../notebooks/10minutes/machine_learning.ipynb) 10 | - [](../notebooks/10minutes/dynamical_spinup.ipynb) 11 | -------------------------------------------------------------------------------- /book/calving.md: -------------------------------------------------------------------------------- 1 | # Calving 2 | 3 | - [](../notebooks/tutorials/kcalving_parameterization.ipynb) 4 | -------------------------------------------------------------------------------- /book/construction.md: -------------------------------------------------------------------------------- 1 | # Tutorials in (re-)construction 2 | 3 | - [](../notebooks/construction/inversion_with_frontal_ablation.ipynb) 4 | - [](../notebooks/construction/area_length_filter.ipynb) 5 | -------------------------------------------------------------------------------- /book/dynamics.md: -------------------------------------------------------------------------------- 1 | # Dynamical runs 2 | 3 | - [](../notebooks/tutorials/run_with_a_spinup_and_gcm_data.ipynb) 4 | - [](../notebooks/tutorials/dynamical_spinup.ipynb) 5 | - [](../notebooks/tutorials/numeric_solvers.ipynb) 6 | - [](../notebooks/tutorials/ioggm.ipynb) 7 | -------------------------------------------------------------------------------- /book/hydro.md: -------------------------------------------------------------------------------- 1 | # Hydrological output 2 | 3 | - [](../notebooks/tutorials/hydrological_output.ipynb) 4 | 5 | You might find the following notebooks in OGGM-Edu interesting as well! 6 | - [Glaciers as water resources: part 1 (idealized climate)](https://oggm.org/oggm-edu-notebooks/oggm-edu/glacier_water_resources.html) 7 | - [Glaciers as water resources: part 2 (projections)](https://oggm.org/oggm-edu-notebooks/oggm-edu/glacier_water_resources_projections.html) 8 | -------------------------------------------------------------------------------- /book/massbalance.md: -------------------------------------------------------------------------------- 1 | # Mass balance 2 | 3 | - [](../notebooks/tutorials/plot_mass_balance.ipynb) 4 | - [](../notebooks/tutorials/massbalance_calibration.ipynb) 5 | - [](../notebooks/tutorials/massbalance_global_params.ipynb) 6 | - [](../notebooks/tutorials/massbalance_perturbation.ipynb) 7 | -------------------------------------------------------------------------------- /book/shop.md: -------------------------------------------------------------------------------- 1 | # OGGM shop and additional data 2 | 3 | - [](../notebooks/tutorials/oggm_shop.ipynb) 4 | - [](../notebooks/tutorials/use_your_own_inventory.ipynb) 5 | - [](../notebooks/tutorials/ingest_gridded_data_on_flowlines.ipynb) 6 | - [](../notebooks/tutorials/dem_sources.ipynb) 7 | - [](../notebooks/tutorials/rgitopo_rgi6.ipynb) 8 | - [](../notebooks/tutorials/rgitopo_rgi7.ipynb) 9 | -------------------------------------------------------------------------------- /book/thickness.md: -------------------------------------------------------------------------------- 1 | # Ice thickness 2 | 3 | - [](../notebooks/tutorials/inversion.ipynb) 4 | - [](../notebooks/tutorials/observed_thickness_with_dynamic_spinup.ipynb) 5 | -------------------------------------------------------------------------------- /book/visualisation.md: -------------------------------------------------------------------------------- 1 | # Visualisation and post-processing 2 | 3 | - [](../notebooks/tutorials/distribute_flowline.ipynb) 4 | - [](../notebooks/tutorials/where_are_the_flowlines.ipynb) 5 | - [](../notebooks/tutorials/centerlines_to_shape.ipynb) 6 | - [](../notebooks/tutorials/preprocessing_errors.ipynb) 7 | - [](../notebooks/tutorials/merge_gcm_runs_and_visualize.ipynb) 8 | - [](../notebooks/tutorials/holoviz_intro.ipynb) 9 | -------------------------------------------------------------------------------- /book/workflow.md: -------------------------------------------------------------------------------- 1 | # OGGM workflow 2 | 3 | - [](../notebooks/tutorials/working_with_rgi.ipynb) 4 | - [](../notebooks/tutorials/store_and_compress_glacierdirs.ipynb) 5 | - [](../notebooks/tutorials/deal_with_errors.ipynb) 6 | - [](../notebooks/tutorials/elevation_bands_vs_centerlines.ipynb) 7 | - [](../notebooks/tutorials/building_the_prepro_gdirs.ipynb) 8 | - [](../notebooks/tutorials/full_prepro_workflow.ipynb) 9 | -------------------------------------------------------------------------------- /build.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | jupyter-book clean --html . 4 | jupyter-book build . 5 | -------------------------------------------------------------------------------- /img/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OGGM/tutorials/d077d8402a69d1861252f8be027cf082f1edfd56/img/logo.png -------------------------------------------------------------------------------- /img/mittelbergferner.mp4: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OGGM/tutorials/d077d8402a69d1861252f8be027cf082f1edfd56/img/mittelbergferner.mp4 -------------------------------------------------------------------------------- /img/show_viewer.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/OGGM/tutorials/d077d8402a69d1861252f8be027cf082f1edfd56/img/show_viewer.gif -------------------------------------------------------------------------------- /linkcheck.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | jupyter-book build . --builder linkcheck 4 | 5 | -------------------------------------------------------------------------------- /notebooks/10minutes/dynamical_spinup.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# 10 minutes to... understand the new dynamical spinup in OGGM v1.6" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "In this example, we showcase a recent addition to OGGM: the dynamical spinup during the historical period. We explain why this was added, and how you can use the dynamical spinup during your simulations." 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "**Tags:** beginner, workflow, spinup " 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": null, 27 | "metadata": { 28 | "tags": [] 29 | }, 30 | "outputs": [], 31 | "source": [ 32 | "# Libs\n", 33 | "import xarray as xr\n", 34 | "import matplotlib.pyplot as plt\n", 35 | "\n", 36 | "# Locals\n", 37 | "import oggm.cfg as cfg\n", 38 | "from oggm import utils, workflow, tasks, DEFAULT_BASE_URL\n", 39 | "from oggm.shop import gcm_climate" 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": { 45 | "tags": [] 46 | }, 47 | "source": [ 48 | "## Accessing the pre-processed directories including spinup runs" 49 | ] 50 | }, 51 | { 52 | "cell_type": "markdown", 53 | "metadata": {}, 54 | "source": [ 55 | "Let's focus on our usual glacier: Hintereisferner." 56 | ] 57 | }, 58 | { 59 | "cell_type": "code", 60 | "execution_count": null, 61 | "metadata": { 62 | "tags": [] 63 | }, 64 | "outputs": [], 65 | "source": [ 66 | "# Initialize OGGM and set up the default run parameters\n", 67 | "cfg.initialize(logging_level='WARNING')\n", 68 | "\n", 69 | "# Local working directory (where OGGM will write its output)\n", 70 | "cfg.PATHS['working_dir'] = utils.gettempdir('OGGM_gcm_run', reset=True)\n", 71 | "\n", 72 | "# RGI glacier \n", 73 | "rgi_ids = 'RGI60-11.00897'" 74 | ] 75 | }, 76 | { 77 | "cell_type": "markdown", 78 | "metadata": {}, 79 | "source": [ 80 | "To fetch the preprocessed directories including spinup, we have to tell OGGM where to find them. The default URL contains the runs with spinup:" 81 | ] 82 | }, 83 | { 84 | "cell_type": "code", 85 | "execution_count": null, 86 | "metadata": { 87 | "tags": [] 88 | }, 89 | "outputs": [], 90 | "source": [ 91 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_prepro_level=5, prepro_base_url=DEFAULT_BASE_URL)" 92 | ] 93 | }, 94 | { 95 | "cell_type": "markdown", 96 | "metadata": {}, 97 | "source": [ 98 | "## A new workflow including a recalibration" 99 | ] 100 | }, 101 | { 102 | "cell_type": "markdown", 103 | "metadata": { 104 | "tags": [] 105 | }, 106 | "source": [ 107 | "These directories are very similar to the \"old\" ones (same input data, same baseline climate...). But in addition, they include a new historical simulation run with a dynamic spinup. Let's open it and compare it to the old historical run without a spinup:" 108 | ] 109 | }, 110 | { 111 | "cell_type": "code", 112 | "execution_count": null, 113 | "metadata": { 114 | "tags": [] 115 | }, 116 | "outputs": [], 117 | "source": [ 118 | "# open the new historical run including a dynamic spinup\n", 119 | "ds_spinup = utils.compile_run_output(gdirs, input_filesuffix='_spinup_historical')\n", 120 | "\n", 121 | "# open the old historical run without a spinup\n", 122 | "ds_historical = utils.compile_run_output(gdirs, input_filesuffix='_historical')\n", 123 | "\n", 124 | "# compare area and volume evolution\n", 125 | "f, (ax1, ax2) = plt.subplots(1, 2, figsize=(14, 4))\n", 126 | "\n", 127 | "# Area\n", 128 | "ds_spinup.area.plot(ax=ax1, label='dynamic spinup')\n", 129 | "ds_historical.area.plot(ax=ax1, label='no spinup')\n", 130 | "ax1.set_title('Area [m2]')\n", 131 | "\n", 132 | "# Volume\n", 133 | "ds_spinup.volume.plot(ax=ax2, label='dynamic spinup')\n", 134 | "ds_historical.volume.plot(ax=ax2, label='no spinup')\n", 135 | "ax2.set_title('Volume [m3]')\n", 136 | "\n", 137 | "plt.legend();" 138 | ] 139 | }, 140 | { 141 | "cell_type": "markdown", 142 | "metadata": {}, 143 | "source": [ 144 | "Let's have a look at what happens here." 145 | ] 146 | }, 147 | { 148 | "cell_type": "markdown", 149 | "metadata": {}, 150 | "source": [ 151 | "### Dynamic spinup run extends further back in time" 152 | ] 153 | }, 154 | { 155 | "cell_type": "markdown", 156 | "metadata": {}, 157 | "source": [ 158 | "The first thing to notice is that the new dynamic spinup run extends further back in time, starting in 1979 compared to 2003 (the RGI date for this glacier)." 159 | ] 160 | }, 161 | { 162 | "cell_type": "code", 163 | "execution_count": null, 164 | "metadata": { 165 | "tags": [] 166 | }, 167 | "outputs": [], 168 | "source": [ 169 | "gdirs[0].rgi_date" 170 | ] 171 | }, 172 | { 173 | "cell_type": "markdown", 174 | "metadata": {}, 175 | "source": [ 176 | "We achieve this by searching for a glacier state in 1979 which evolves to match the area at the RGI date. Therefore, you can see that the areas around the RGI date (2003) are very close.\n", 177 | "\n", 178 | "However, the volumes show some difference around the RGI date, as we did not attempt to match the volume. The current workflow can match area OR volume and, by default, we decided to match area as it is a direct observation (from the RGI outlines), in contrast to a model guess for the volume (e.g. [Farinotti et al. 2019](https://www.nature.com/articles/s41561-019-0300-3))." 179 | ] 180 | }, 181 | { 182 | "cell_type": "markdown", 183 | "metadata": {}, 184 | "source": [ 185 | "### Dynamical spinup also uses a dynamically recalibrated melt factor *melt_f*" 186 | ] 187 | }, 188 | { 189 | "cell_type": "markdown", 190 | "metadata": {}, 191 | "source": [ 192 | "The second big difference is not directly visible, but during the dynamic spinup, we check that the dynamically modelled geodetic mass balance fits the given observations from [Hugonnet et al. (2021)](https://www.nature.com/articles/s41586-021-03436-z). To achieve this, we use the *melt_f* of the mass balance as a tuning variable.\n", 193 | "\n", 194 | "We need this step because the initial mass balance model calibration (see this [tutorial](../tutorials/massbalance_calibration.ipynb)) assumes constant glacier surface geometry, as defined by the RGI outline. However, the observed geodetic mass balance also contains surface geometry changes, which we only can consider during a dynamic model run.\n", 195 | "\n", 196 | "Let's check that the dynamically calibrated geodetic mass balance fits the given observations:" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": null, 202 | "metadata": { 203 | "tags": [] 204 | }, 205 | "outputs": [], 206 | "source": [ 207 | "gdir = gdirs[0]\n", 208 | "\n", 209 | "# period of geodetic mass balance\n", 210 | "ref_period = cfg.PARAMS['geodetic_mb_period']\n", 211 | "\n", 212 | "# open the observation with uncertainty\n", 213 | "df_ref_dmdtda = utils.get_geodetic_mb_dataframe().loc[gdir.rgi_id] # get the data from Hugonnet et al., 2021\n", 214 | "df_ref_dmdtda = df_ref_dmdtda.loc[df_ref_dmdtda['period'] == ref_period] # only select the desired period\n", 215 | "dmdtda_reference = df_ref_dmdtda['dmdtda'].values[0] * 1000 # get the reference dmdtda and convert into kg m-2 yr-1\n", 216 | "dmdtda_reference_error = df_ref_dmdtda['err_dmdtda'].values[0] * 1000 # corresponding uncertainty\n", 217 | "\n", 218 | "# calculate dynamic geodetic mass balance\n", 219 | "def get_dmdtda(ds):\n", 220 | " yr0_ref_mb, yr1_ref_mb = ref_period.split('_')\n", 221 | " yr0_ref_mb = int(yr0_ref_mb.split('-')[0])\n", 222 | " yr1_ref_mb = int(yr1_ref_mb.split('-')[0])\n", 223 | "\n", 224 | " return ((ds.volume.loc[yr1_ref_mb].values[0] -\n", 225 | " ds.volume.loc[yr0_ref_mb].values[0]) /\n", 226 | " gdir.rgi_area_m2 /\n", 227 | " (yr1_ref_mb - yr0_ref_mb) *\n", 228 | " cfg.PARAMS['ice_density'])\n", 229 | "\n", 230 | "print(f'Reference dmdtda 2000 to 2020 (Hugonnet 2021): {dmdtda_reference:.2f} +/- {dmdtda_reference_error:6.2f} kg m-2 yr-1')\n", 231 | "print(f'Dynamic spinup dmdtda 2000 to 2020: {float(get_dmdtda(ds_spinup)):.2f} kg m-2 yr-1')\n", 232 | "print(f\"Dynamically calibrated melt_f: {gdir.read_json('mb_calib')['melt_f']:.1f} kg m-2 day-1 °C-1\")" 233 | ] 234 | }, 235 | { 236 | "cell_type": "markdown", 237 | "metadata": {}, 238 | "source": [ 239 | "This fits quite well! The default in OGGM is to try to match the observations within 20% of the reported error by [Hugonnet et al. (2021)](https://www.nature.com/articles/s41586-021-03436-z). This is a model option, and can be changed at wish." 240 | ] 241 | }, 242 | { 243 | "cell_type": "markdown", 244 | "metadata": {}, 245 | "source": [ 246 | "### Dynamical spinup addresses \"initial shock\" problems" 247 | ] 248 | }, 249 | { 250 | "cell_type": "markdown", 251 | "metadata": {}, 252 | "source": [ 253 | "This is not really visible in the plots above, but the \"old\" method of initialisation in OGGM had another issue. It assumed dynamical steady state at the begining of the simulation (the RGI date), which was required by the bed inversion process. This could lead to artifacts (mainly in the glacier length and area, as well as velocities) during the first few years of the simulation. The dynamical spinup addresses this issue by starting the simulation in 1980. \n", 254 | "\n", 255 | "One of the way to see the importance of the spinup is to have a look at glacier velocities. Let's plot glacier volocities along the flowline in the year 2005 (the first year we have velocities from both the dynamical spinup, and without the spinup (\"cold start\" from an equilibrium):" 256 | ] 257 | }, 258 | { 259 | "cell_type": "code", 260 | "execution_count": null, 261 | "metadata": {}, 262 | "outputs": [], 263 | "source": [ 264 | "f = gdir.get_filepath('fl_diagnostics', filesuffix='_historical')\n", 265 | "with xr.open_dataset(f, group=f'fl_0') as dg:\n", 266 | " dgno = dg.load()\n", 267 | "f = gdir.get_filepath('fl_diagnostics', filesuffix='_spinup_historical')\n", 268 | "with xr.open_dataset(f, group=f'fl_0') as dg:\n", 269 | " dgspin = dg.load()\n", 270 | "\n", 271 | "year = 2005\n", 272 | "dgno.ice_velocity_myr.sel(time=year).plot(label='No spinup');\n", 273 | "dgspin.ice_velocity_myr.sel(time=year).plot(label='With spinup');\n", 274 | "plt.title(f'Velocity along the flowline at year {year}'); plt.legend();" 275 | ] 276 | }, 277 | { 278 | "cell_type": "markdown", 279 | "metadata": {}, 280 | "source": [ 281 | "Ice velocities in the spinup case are considerably lower because they take into account the current retreat and past history of the glacier, while the blue line is the velocity of a glacier just getting out of steady state." 282 | ] 283 | }, 284 | { 285 | "cell_type": "markdown", 286 | "metadata": {}, 287 | "source": [ 288 | "## Using the dynamic spinup in your workflow" 289 | ] 290 | }, 291 | { 292 | "cell_type": "markdown", 293 | "metadata": {}, 294 | "source": [ 295 | "We recommend that you use the provided preprocessed directories for your analysis. However, if you want to learn more about how the dynamic spinup works in detail or if you plan to use it in your workflow, maybe with different data, you should check out the more comprehensive tutorial: [Dynamic spinup and dynamic melt_f calibration for past simulations](../tutorials/dynamical_spinup.ipynb). And do not hesitate to [reach out](https://docs.oggm.org/en/stable/#get-in-touch) if you have any questions!" 296 | ] 297 | }, 298 | { 299 | "cell_type": "markdown", 300 | "metadata": {}, 301 | "source": [ 302 | "## What's next?\n", 303 | "\n", 304 | "- Look at the more comprehensive tutorial [Dynamic spinup and dynamic melt_f calibration for past simulations](../tutorials/dynamical_spinup.ipynb)\n", 305 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 306 | "- back to the [table of contents](../welcome.ipynb)" 307 | ] 308 | } 309 | ], 310 | "metadata": { 311 | "hide_input": false, 312 | "kernelspec": { 313 | "display_name": "Python 3 (ipykernel)", 314 | "language": "python", 315 | "name": "python3" 316 | }, 317 | "language_info": { 318 | "codemirror_mode": { 319 | "name": "ipython", 320 | "version": 3 321 | }, 322 | "file_extension": ".py", 323 | "mimetype": "text/x-python", 324 | "name": "python", 325 | "nbconvert_exporter": "python", 326 | "pygments_lexer": "ipython3", 327 | "version": "3.12.4" 328 | }, 329 | "latex_envs": { 330 | "LaTeX_envs_menu_present": true, 331 | "autoclose": false, 332 | "autocomplete": true, 333 | "bibliofile": "biblio.bib", 334 | "cite_by": "apalike", 335 | "current_citInitial": 1, 336 | "eqLabelWithNumbers": true, 337 | "eqNumInitial": 1, 338 | "hotkeys": { 339 | "equation": "Ctrl-E", 340 | "itemize": "Ctrl-I" 341 | }, 342 | "labels_anchors": false, 343 | "latex_user_defs": false, 344 | "report_style_numbering": false, 345 | "user_envs_cfg": false 346 | }, 347 | "nbTranslate": { 348 | "displayLangs": [ 349 | "*" 350 | ], 351 | "hotkey": "alt-t", 352 | "langInMainMenu": true, 353 | "sourceLang": "en", 354 | "targetLang": "fr", 355 | "useGoogleTranslate": true 356 | }, 357 | "toc": { 358 | "base_numbering": 1, 359 | "nav_menu": {}, 360 | "number_sections": false, 361 | "sideBar": true, 362 | "skip_h1_title": true, 363 | "title_cell": "Table of Contents", 364 | "title_sidebar": "Contents", 365 | "toc_cell": false, 366 | "toc_position": {}, 367 | "toc_section_display": true, 368 | "toc_window_display": false 369 | } 370 | }, 371 | "nbformat": 4, 372 | "nbformat_minor": 4 373 | } 374 | -------------------------------------------------------------------------------- /notebooks/construction/area_length_filter.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Filter the glacier length and area time series" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "In this short tutorial, we show how to deal with unwanted \"spikes\" in the length and area time series of individual glaciers. These happen because OGGM currently doesn't differentiate between snow and ice, i.e. occasional years with large snowfall can artificially increase the glacier area.\n", 15 | "\n", 16 | "While the best solution would be to deal with this in OGGM, this is currently not possible because we do not have a generally applicable solution to this problem. In the meantime, we recommend a simple workaround." 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "## Set-up " 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": {}, 30 | "outputs": [], 31 | "source": [ 32 | "import matplotlib.pyplot as plt\n", 33 | "import xarray as xr\n", 34 | "import os" 35 | ] 36 | }, 37 | { 38 | "cell_type": "code", 39 | "execution_count": null, 40 | "metadata": {}, 41 | "outputs": [], 42 | "source": [ 43 | "from oggm import cfg, utils, workflow, tasks\n", 44 | "cfg.initialize()" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": null, 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "cfg.PATHS['working_dir'] = utils.gettempdir(dirname='OGGM-Filter')" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "### Define the glaciers for the run " 61 | ] 62 | }, 63 | { 64 | "cell_type": "markdown", 65 | "metadata": {}, 66 | "source": [ 67 | "We take the Kesselwandferner in the Austrian Alps:" 68 | ] 69 | }, 70 | { 71 | "cell_type": "code", 72 | "execution_count": null, 73 | "metadata": {}, 74 | "outputs": [], 75 | "source": [ 76 | "rgi_ids = ['RGI60-11.00787']" 77 | ] 78 | }, 79 | { 80 | "cell_type": "markdown", 81 | "metadata": {}, 82 | "source": [ 83 | "### Glacier directories " 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": null, 89 | "metadata": {}, 90 | "outputs": [], 91 | "source": [ 92 | "# in OGGM v1.6 you have to explicitly indicate the url from where you want to start from\n", 93 | "# we will use here the elevation band flowlines which are much simpler than the centerlines\n", 94 | "base_url = ('https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/'\n", 95 | " 'L3-L5_files/2023.3/elev_bands/W5E5/')\n", 96 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_prepro_level=5, prepro_border=80,\n", 97 | " prepro_base_url=base_url)" 98 | ] 99 | }, 100 | { 101 | "cell_type": "markdown", 102 | "metadata": {}, 103 | "source": [ 104 | "## Run" 105 | ] 106 | }, 107 | { 108 | "cell_type": "markdown", 109 | "metadata": {}, 110 | "source": [ 111 | "We can step directly to a new experiment! This runs under a random climate representative for the recent climate (1985-2015) and a warm temperature bias:" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": null, 117 | "metadata": {}, 118 | "outputs": [], 119 | "source": [ 120 | "workflow.execute_entity_task(tasks.run_random_climate, gdirs,\n", 121 | " nyears=200, y0=2000, seed=5,\n", 122 | " output_filesuffix='_commitment');" 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "metadata": {}, 128 | "source": [ 129 | "## The problem " 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": null, 135 | "metadata": {}, 136 | "outputs": [], 137 | "source": [ 138 | "ds = utils.compile_run_output(gdirs, input_filesuffix='_commitment')\n", 139 | "ds = ds.isel(rgi_id=0) # take just the one glacier" 140 | ] 141 | }, 142 | { 143 | "cell_type": "code", 144 | "execution_count": null, 145 | "metadata": {}, 146 | "outputs": [], 147 | "source": [ 148 | "ds.area.plot();" 149 | ] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "execution_count": null, 154 | "metadata": {}, 155 | "outputs": [], 156 | "source": [ 157 | "ds.length.plot();" 158 | ] 159 | }, 160 | { 161 | "cell_type": "markdown", 162 | "metadata": {}, 163 | "source": [ 164 | "For small areas, the glacier has the unrealistic \"spikes\" described above." 165 | ] 166 | }, 167 | { 168 | "cell_type": "markdown", 169 | "metadata": {}, 170 | "source": [ 171 | "## Workaround " 172 | ] 173 | }, 174 | { 175 | "cell_type": "markdown", 176 | "metadata": {}, 177 | "source": [ 178 | "A good way to deal with the issue is to run a moving filter which keeps the smallest area or length in a given window size:" 179 | ] 180 | }, 181 | { 182 | "cell_type": "code", 183 | "execution_count": null, 184 | "metadata": {}, 185 | "outputs": [], 186 | "source": [ 187 | "roll_yrs = 5" 188 | ] 189 | }, 190 | { 191 | "cell_type": "code", 192 | "execution_count": null, 193 | "metadata": {}, 194 | "outputs": [], 195 | "source": [ 196 | "# Take the minimum out of 5 years\n", 197 | "ts = ds.area.to_series()\n", 198 | "ts = ts.rolling(roll_yrs).min()\n", 199 | "ts.iloc[0:roll_yrs] = ts.iloc[roll_yrs]" 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": null, 205 | "metadata": {}, 206 | "outputs": [], 207 | "source": [ 208 | "# Plot\n", 209 | "ds.area.plot(label='Original');\n", 210 | "ts.plot(label='Filtered');\n", 211 | "plt.legend();" 212 | ] 213 | }, 214 | { 215 | "cell_type": "markdown", 216 | "metadata": {}, 217 | "source": [ 218 | "It works the same with length:" 219 | ] 220 | }, 221 | { 222 | "cell_type": "code", 223 | "execution_count": null, 224 | "metadata": {}, 225 | "outputs": [], 226 | "source": [ 227 | "# Take the minimum out of 5 years\n", 228 | "ts = ds.length.to_series()\n", 229 | "ts = ts.rolling(roll_yrs).min()\n", 230 | "ts.iloc[0:roll_yrs] = ts.iloc[roll_yrs]\n", 231 | "# Plot\n", 232 | "ds.length.plot(label='Original');\n", 233 | "ts.plot(label='Filtered');\n", 234 | "plt.legend();" 235 | ] 236 | }, 237 | { 238 | "cell_type": "markdown", 239 | "metadata": {}, 240 | "source": [ 241 | "## What's next?\n", 242 | "\n", 243 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 244 | "- back to the [table of contents](../welcome.ipynb)" 245 | ] 246 | } 247 | ], 248 | "metadata": { 249 | "hide_input": false, 250 | "kernelspec": { 251 | "display_name": "Python 3 (ipykernel)", 252 | "language": "python", 253 | "name": "python3" 254 | }, 255 | "language_info": { 256 | "codemirror_mode": { 257 | "name": "ipython", 258 | "version": 3 259 | }, 260 | "file_extension": ".py", 261 | "mimetype": "text/x-python", 262 | "name": "python", 263 | "nbconvert_exporter": "python", 264 | "pygments_lexer": "ipython3", 265 | "version": "3.11.4" 266 | }, 267 | "toc": { 268 | "base_numbering": 1, 269 | "nav_menu": {}, 270 | "number_sections": false, 271 | "sideBar": true, 272 | "skip_h1_title": true, 273 | "title_cell": "Table of Contents", 274 | "title_sidebar": "Contents", 275 | "toc_cell": false, 276 | "toc_position": {}, 277 | "toc_section_display": true, 278 | "toc_window_display": false 279 | } 280 | }, 281 | "nbformat": 4, 282 | "nbformat_minor": 4 283 | } 284 | -------------------------------------------------------------------------------- /notebooks/construction/inversion_with_frontal_ablation.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Ice thickness inversion with frontal ablation" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "This notebook has been temporarily removed from the tutorials.\n", 15 | "\n", 16 | "The notebook worked with OGGM v1.5.3: " 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "## What's next?\n", 24 | "\n", 25 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 26 | "- back to the [table of contents](../welcome.ipynb)" 27 | ] 28 | } 29 | ], 30 | "metadata": { 31 | "celltoolbar": "Raw Cell Format", 32 | "hide_input": false, 33 | "kernelspec": { 34 | "display_name": "Python 3 (ipykernel)", 35 | "language": "python", 36 | "name": "python3" 37 | }, 38 | "language_info": { 39 | "codemirror_mode": { 40 | "name": "ipython", 41 | "version": 3 42 | }, 43 | "file_extension": ".py", 44 | "mimetype": "text/x-python", 45 | "name": "python", 46 | "nbconvert_exporter": "python", 47 | "pygments_lexer": "ipython3", 48 | "version": "3.11.4" 49 | }, 50 | "latex_envs": { 51 | "LaTeX_envs_menu_present": true, 52 | "autoclose": false, 53 | "autocomplete": true, 54 | "bibliofile": "biblio.bib", 55 | "cite_by": "apalike", 56 | "current_citInitial": 1, 57 | "eqLabelWithNumbers": true, 58 | "eqNumInitial": 1, 59 | "hotkeys": { 60 | "equation": "Ctrl-E", 61 | "itemize": "Ctrl-I" 62 | }, 63 | "labels_anchors": false, 64 | "latex_user_defs": false, 65 | "report_style_numbering": false, 66 | "user_envs_cfg": false 67 | }, 68 | "nbTranslate": { 69 | "displayLangs": [ 70 | "*" 71 | ], 72 | "hotkey": "alt-t", 73 | "langInMainMenu": true, 74 | "sourceLang": "en", 75 | "targetLang": "fr", 76 | "useGoogleTranslate": true 77 | }, 78 | "toc": { 79 | "base_numbering": 1, 80 | "nav_menu": {}, 81 | "number_sections": false, 82 | "sideBar": true, 83 | "skip_h1_title": true, 84 | "title_cell": "Table of Contents", 85 | "title_sidebar": "Contents", 86 | "toc_cell": false, 87 | "toc_position": {}, 88 | "toc_section_display": true, 89 | "toc_window_display": false 90 | } 91 | }, 92 | "nbformat": 4, 93 | "nbformat_minor": 4 94 | } 95 | -------------------------------------------------------------------------------- /notebooks/tutorials/centerlines_to_shape.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Compute smoother centerlines for shapefile output" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "This notebook describes how to compute centerlines with OGGM and write them to disk. It is meant for users who are mostly interested in the centerlines, not so much the rest of the OGGM model.\n", 15 | "\n", 16 | "We use an example of a user-provided glacier inventory and DEM (thanks to [Liss Andreassen](https://www.nve.no/hydrology/our-researchers/liss-marie-andreassen/) for providing the data)." 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "metadata": {}, 23 | "outputs": [], 24 | "source": [ 25 | "import rioxarray as rioxr\n", 26 | "import geopandas as gpd\n", 27 | "import matplotlib.pyplot as plt\n", 28 | "\n", 29 | "from oggm import cfg, utils, workflow, tasks" 30 | ] 31 | }, 32 | { 33 | "cell_type": "markdown", 34 | "metadata": {}, 35 | "source": [ 36 | "## Data preparation" 37 | ] 38 | }, 39 | { 40 | "cell_type": "markdown", 41 | "metadata": {}, 42 | "source": [ 43 | "Download the demo data. This is a subset of a regional glacier inventory and DEM in Norway:" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": null, 49 | "metadata": {}, 50 | "outputs": [], 51 | "source": [ 52 | "fpath_inventory = utils.file_downloader('https://cluster.klima.uni-bremen.de/~oggm/tutorials/Norway_Inventory_sel.zip')\n", 53 | "fpath_dem = utils.file_downloader('https://cluster.klima.uni-bremen.de/~oggm/tutorials/Norway_DEM_sel.tif')" 54 | ] 55 | }, 56 | { 57 | "cell_type": "markdown", 58 | "metadata": {}, 59 | "source": [ 60 | "Read the data and plot it:" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": null, 66 | "metadata": {}, 67 | "outputs": [], 68 | "source": [ 69 | "inventory = gpd.read_file(fpath_inventory)\n", 70 | "dem = rioxr.open_rasterio(fpath_dem)\n", 71 | "\n", 72 | "f, ax = plt.subplots(figsize=(9, 9))\n", 73 | "dem.plot(ax=ax, cmap='terrain', vmin=0);\n", 74 | "inventory.plot(ax=ax, edgecolor='k', facecolor='C1');" 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "metadata": {}, 80 | "source": [ 81 | "The resolution of the DEM is 10m:" 82 | ] 83 | }, 84 | { 85 | "cell_type": "code", 86 | "execution_count": null, 87 | "metadata": {}, 88 | "outputs": [], 89 | "source": [ 90 | "print((dem.x[1] - dem.x[0]).item())" 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": {}, 96 | "source": [ 97 | "In this inventory, one geometry has a topological error (the figure of eight where the outlines touch):" 98 | ] 99 | }, 100 | { 101 | "cell_type": "code", 102 | "execution_count": null, 103 | "metadata": {}, 104 | "outputs": [], 105 | "source": [ 106 | "inventory.loc[~inventory.is_valid].plot();" 107 | ] 108 | }, 109 | { 110 | "cell_type": "markdown", 111 | "metadata": {}, 112 | "source": [ 113 | "Let's correct it:" 114 | ] 115 | }, 116 | { 117 | "cell_type": "code", 118 | "execution_count": null, 119 | "metadata": {}, 120 | "outputs": [], 121 | "source": [ 122 | "inventory.loc[~inventory.is_valid, 'geometry'] = inventory.loc[~inventory.is_valid].buffer(0)" 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "metadata": {}, 128 | "source": [ 129 | "A final preparation step is to convert the format of the inventory to a file which resembles the RGI (see [use_your_own_inventory.ipynb](use_your_own_inventory.ipynb)):" 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": null, 135 | "metadata": {}, 136 | "outputs": [], 137 | "source": [ 138 | "# We keep the original ID for later reference\n", 139 | "gdf = utils.cook_rgidf(inventory, o1_region='08', assign_column_values={'breID':'breID'})" 140 | ] 141 | }, 142 | { 143 | "cell_type": "markdown", 144 | "metadata": {}, 145 | "source": [ 146 | "## Compute the centerlines" 147 | ] 148 | }, 149 | { 150 | "cell_type": "markdown", 151 | "metadata": {}, 152 | "source": [ 153 | "We use the standard OGGM procedure for this:" 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": null, 159 | "metadata": {}, 160 | "outputs": [], 161 | "source": [ 162 | "cfg.initialize(logging_level='WARNING')\n", 163 | "\n", 164 | "# Parameters\n", 165 | "cfg.PARAMS['use_multiprocessing'] = True # this is often a good idea\n", 166 | "cfg.PARAMS['use_rgi_area'] = False # this is required for user-defined inventories\n", 167 | "cfg.PARAMS['use_intersects'] = False # we don't care about intersects for centerlines\n", 168 | "cfg.PARAMS['border'] = 10 # no need to make a large map\n", 169 | "\n", 170 | "# Optional: change the grid resolution\n", 171 | "# E.g. fixed grid spacing\n", 172 | "# cfg.PARAMS['grid_dx_method'] = 'fixed'\n", 173 | "# cfg.PARAMS['fixed_dx'] = 10\n", 174 | "# Or variable but twice higher than default \n", 175 | "cfg.PARAMS['grid_dx_method'] = 'square'\n", 176 | "cfg.PARAMS['d1'] = 7 # (default is 14)\n", 177 | "cfg.PARAMS['d2'] = 5 # (default is 10)\n", 178 | "cfg.PARAMS['dmax'] = 200 # (default is 100)\n", 179 | "\n", 180 | "# Tell OGGM to use our user DEM (important!)\n", 181 | "cfg.PATHS['dem_file'] = fpath_dem\n", 182 | "\n", 183 | "# Where to work\n", 184 | "cfg.PATHS['working_dir'] = utils.gettempdir(dirname='NORWAY_CENTERLINES', reset=True)" 185 | ] 186 | }, 187 | { 188 | "cell_type": "markdown", 189 | "metadata": {}, 190 | "source": [ 191 | "Now the workflow: " 192 | ] 193 | }, 194 | { 195 | "cell_type": "code", 196 | "execution_count": null, 197 | "metadata": {}, 198 | "outputs": [], 199 | "source": [ 200 | "gdirs = workflow.init_glacier_directories(gdf)\n", 201 | "\n", 202 | "workflow.execute_entity_task(tasks.define_glacier_region, gdirs, source='USER'); # Use the user DEM\n", 203 | "\n", 204 | "workflow.execute_entity_task(tasks.glacier_masks, gdirs);\n", 205 | "workflow.execute_entity_task(tasks.compute_centerlines, gdirs);" 206 | ] 207 | }, 208 | { 209 | "cell_type": "markdown", 210 | "metadata": {}, 211 | "source": [ 212 | "**Note: the default in OGGM is to use a grid size of varying resolution for each glacier. I think it makes sense in many cases, but you may prefer to use the native resolution of your DEM. You can do so by commenting / un-commenting the options above.**" 213 | ] 214 | }, 215 | { 216 | "cell_type": "markdown", 217 | "metadata": {}, 218 | "source": [ 219 | "## Write the data to a shapefile with optional smoothing" 220 | ] 221 | }, 222 | { 223 | "cell_type": "markdown", 224 | "metadata": {}, 225 | "source": [ 226 | "The relevant task is \"write_centerlines_to_shape\", which writes everything to a shapefile:" 227 | ] 228 | }, 229 | { 230 | "cell_type": "code", 231 | "execution_count": null, 232 | "metadata": {}, 233 | "outputs": [], 234 | "source": [ 235 | "from oggm.utils import write_centerlines_to_shape, mkdir\n", 236 | "\n", 237 | "# We want to write in here\n", 238 | "mkdir('outputs')\n", 239 | "\n", 240 | "write_centerlines_to_shape(gdirs, # The glaciers to process\n", 241 | " path='outputs/Norway_Centerlines.shp', # The output file\n", 242 | " to_tar=False, # set to True to put everything into one single tar file\n", 243 | " to_crs=inventory.crs, # Write into the projection of the original inventory\n", 244 | " keep_main_only=True, # Write only the main flowline and discard the tributaries\n", 245 | " )" 246 | ] 247 | }, 248 | { 249 | "cell_type": "markdown", 250 | "metadata": {}, 251 | "source": [ 252 | "Let's have a look at the output:" 253 | ] 254 | }, 255 | { 256 | "cell_type": "code", 257 | "execution_count": null, 258 | "metadata": {}, 259 | "outputs": [], 260 | "source": [ 261 | "cls_default = gpd.read_file('outputs/Norway_Centerlines.shp')\n", 262 | "cls_default['breID'] = gdf['breID'] # This only works this way because we have one centerline per glacier!\n", 263 | "\n", 264 | "cls_default.head()" 265 | ] 266 | }, 267 | { 268 | "cell_type": "markdown", 269 | "metadata": {}, 270 | "source": [ 271 | "`LE_SEGMENT` is the length of the centerline in meters. The RGI \"IDs\" are fake (OGGM needs them) but the breID are real. Lets use them as index for the file:" 272 | ] 273 | }, 274 | { 275 | "cell_type": "code", 276 | "execution_count": null, 277 | "metadata": {}, 278 | "outputs": [], 279 | "source": [ 280 | "cls_default = cls_default.set_index('breID')\n", 281 | "orig_inventory = inventory.set_index('breID')" 282 | ] 283 | }, 284 | { 285 | "cell_type": "markdown", 286 | "metadata": {}, 287 | "source": [ 288 | "Now we can plot an example:" 289 | ] 290 | }, 291 | { 292 | "cell_type": "code", 293 | "execution_count": null, 294 | "metadata": {}, 295 | "outputs": [], 296 | "source": [ 297 | "sel_breID = 1189 # 5570\n", 298 | "\n", 299 | "f, ax = plt.subplots(figsize=(9, 4))\n", 300 | "orig_inventory.loc[[sel_breID]].plot(ax=ax, facecolor='lightblue');\n", 301 | "cls_default.loc[[sel_breID]].plot(ax=ax);" 302 | ] 303 | }, 304 | { 305 | "cell_type": "markdown", 306 | "metadata": {}, 307 | "source": [ 308 | "What can we see? \n", 309 | "\n", 310 | "- the centerline does not end *exactly* at the glacier outline\n", 311 | "- the line seems \"crooked\", it has sudden turns\n", 312 | "\n", 313 | "Both effects are due to the algorithm we use to compute the centerlines ([Kienholz et al., (2014)](https://tc.copernicus.org/articles/8/503/2014/)),\n", 314 | "which works on the underlying glacier grid. Each vertice (point) in the line corresponds to the center of the grid point.\n", 315 | "\n", 316 | "**We have implemented a few new options in OGGM v1.6, which allow to circumvent these limitations**. We illustrate them here:" 317 | ] 318 | }, 319 | { 320 | "cell_type": "code", 321 | "execution_count": null, 322 | "metadata": {}, 323 | "outputs": [], 324 | "source": [ 325 | "write_centerlines_to_shape(gdirs, # The glaciers to process\n", 326 | " path='outputs/Norway_Centerlines_smooth.shp', # The output file\n", 327 | " to_tar=False, # set to True to put everything into one single tar file\n", 328 | " to_crs=inventory.crs, # Write into the projection of the original inventory\n", 329 | " keep_main_only=True, # Write only the main flowline and discard the tributaries\n", 330 | " ensure_exterior_match=True, # NEW! Ensure that the lines are touching the outlines\n", 331 | " simplify_line_before=0.75, # NEW! this option reduces the number of vertices along the line\n", 332 | " corner_cutting=3, # NEW! this then augments the number of vertices again\n", 333 | " )" 334 | ] 335 | }, 336 | { 337 | "cell_type": "markdown", 338 | "metadata": {}, 339 | "source": [ 340 | "The `simplify_line` and `corner_cutting` options are cosmetic and subjective. The former will simplify the line, by making it look less edgy but also less precise, while the latter then \"smoothes\" it. Users may try different combinations to see their effect (see the [documentation](https://docs.oggm.org/en/latest/generated/oggm.global_tasks.write_centerlines_to_shape.html))." 341 | ] 342 | }, 343 | { 344 | "cell_type": "code", 345 | "execution_count": null, 346 | "metadata": {}, 347 | "outputs": [], 348 | "source": [ 349 | "cls_smooth = gpd.read_file('outputs/Norway_Centerlines_smooth.shp')\n", 350 | "cls_smooth['breID'] = gdf['breID']\n", 351 | "cls_smooth = cls_smooth.set_index('breID')" 352 | ] 353 | }, 354 | { 355 | "cell_type": "code", 356 | "execution_count": null, 357 | "metadata": {}, 358 | "outputs": [], 359 | "source": [ 360 | "sel_breID = 1189\n", 361 | "\n", 362 | "f, ax = plt.subplots(figsize=(9, 4))\n", 363 | "orig_inventory.loc[[sel_breID]].plot(ax=ax, facecolor='lightblue');\n", 364 | "cls_default.loc[[sel_breID]].plot(ax=ax, color='C0', alpha=0.5);\n", 365 | "cls_smooth.loc[[sel_breID]].plot(ax=ax, color='C3');" 366 | ] 367 | }, 368 | { 369 | "cell_type": "markdown", 370 | "metadata": {}, 371 | "source": [ 372 | "## Final remarks" 373 | ] 374 | }, 375 | { 376 | "cell_type": "markdown", 377 | "metadata": {}, 378 | "source": [ 379 | "While the centerline algorithm is quite robust, the results will vary as a function of the resolution of the underlying grid, and the smoothing options. After trying a little, it seems difficult to find a setting which works \"best\" in all circumstances, and we encourage users to try several options and see what they prefer. The option likely to have the most impact (assuming smoothing with `(0.5, 5)` is the underlying grid resolution." 380 | ] 381 | }, 382 | { 383 | "cell_type": "markdown", 384 | "metadata": {}, 385 | "source": [ 386 | "## What's next?\n", 387 | "\n", 388 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 389 | "- back to the [table of contents](../welcome.ipynb)" 390 | ] 391 | } 392 | ], 393 | "metadata": { 394 | "kernelspec": { 395 | "display_name": "Python 3 (ipykernel)", 396 | "language": "python", 397 | "name": "python3" 398 | }, 399 | "language_info": { 400 | "codemirror_mode": { 401 | "name": "ipython", 402 | "version": 3 403 | }, 404 | "file_extension": ".py", 405 | "mimetype": "text/x-python", 406 | "name": "python", 407 | "nbconvert_exporter": "python", 408 | "pygments_lexer": "ipython3", 409 | "version": "3.11.4" 410 | }, 411 | "metadata": { 412 | "interpreter": { 413 | "hash": "705f036afebab14ba3958dfbf5720c1e1e37a03d5afe33574ff09620abf8737d" 414 | } 415 | } 416 | }, 417 | "nbformat": 4, 418 | "nbformat_minor": 4 419 | } 420 | -------------------------------------------------------------------------------- /notebooks/tutorials/deal_with_errors.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Dealing with errors after a run" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "In this example, we run the model on a list of three glaciers:\n", 15 | "two of them will end with errors: one because it already failed at\n", 16 | "preprocessing (i.e. prior to this run), and one during the run. We show how to analyze theses erros and solve (some) of them, as described in the OGGM documentation under [troubleshooting](https://docs.oggm.org/en/stable/faq.html?highlight=border#troubleshooting)." 17 | ] 18 | }, 19 | { 20 | "cell_type": "markdown", 21 | "metadata": {}, 22 | "source": [ 23 | "## Continue on error" 24 | ] 25 | }, 26 | { 27 | "cell_type": "code", 28 | "execution_count": null, 29 | "metadata": { 30 | "tags": [] 31 | }, 32 | "outputs": [], 33 | "source": [ 34 | "# Locals\n", 35 | "import oggm.cfg as cfg\n", 36 | "from oggm import utils, workflow, tasks, DEFAULT_BASE_URL\n", 37 | "\n", 38 | "# Libs\n", 39 | "import os\n", 40 | "import xarray as xr\n", 41 | "import pandas as pd\n", 42 | "\n", 43 | "# Initialize OGGM and set up the default run parameters\n", 44 | "cfg.initialize(logging_level='WARNING')\n", 45 | "\n", 46 | "# Here we override some of the default parameters\n", 47 | "# How many grid points around the glacier?\n", 48 | "# We make it small because we want the model to error because\n", 49 | "# of flowing out of the domain\n", 50 | "cfg.PARAMS['border'] = 80\n", 51 | "\n", 52 | "# This is useful since we have three glaciers\n", 53 | "cfg.PARAMS['use_multiprocessing'] = True\n", 54 | "\n", 55 | "# This is the important bit!\n", 56 | "# We tell OGGM to continue despite of errors\n", 57 | "cfg.PARAMS['continue_on_error'] = True\n", 58 | "\n", 59 | "# Local working directory (where OGGM will write its output)\n", 60 | "WORKING_DIR = utils.gettempdir('OGGM_Errors', reset=True)\n", 61 | "cfg.PATHS['working_dir'] = WORKING_DIR\n", 62 | "\n", 63 | "rgi_ids = ['RGI60-11.00897', 'RGI60-11.01450', 'RGI60-11.03235']\n", 64 | "\n", 65 | "# Go - get the pre-processed glacier directories\n", 66 | "# in OGGM v1.6 you have to explicitly indicate the url from where you want to start from\n", 67 | "# we will use here the elevation band flowlines which are much simpler than the centerlines\n", 68 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_prepro_level=5, prepro_base_url=DEFAULT_BASE_URL)\n", 69 | "\n", 70 | "# We can step directly to the experiment!\n", 71 | "# Random climate representative for the recent climate (1985-2015)\n", 72 | "# with a negative bias added to the random temperature series:\n", 73 | "workflow.execute_entity_task(tasks.run_random_climate, gdirs, y0=2000,\n", 74 | " nyears=150, seed=0,\n", 75 | " temperature_bias=-2)" 76 | ] 77 | }, 78 | { 79 | "cell_type": "markdown", 80 | "metadata": {}, 81 | "source": [ 82 | "## Error diagnostics" 83 | ] 84 | }, 85 | { 86 | "cell_type": "code", 87 | "execution_count": null, 88 | "metadata": { 89 | "tags": [] 90 | }, 91 | "outputs": [], 92 | "source": [ 93 | "# Write the compiled output\n", 94 | "utils.compile_glacier_statistics(gdirs); # saved as glacier_statistics.csv in the WORKING_DIR folder\n", 95 | "utils.compile_run_output(gdirs); # saved as run_output.nc in the WORKING_DIR folder" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "metadata": { 102 | "tags": [] 103 | }, 104 | "outputs": [], 105 | "source": [ 106 | "# Read it\n", 107 | "with xr.open_dataset(os.path.join(WORKING_DIR, 'run_output.nc')) as ds:\n", 108 | " ds = ds.load()\n", 109 | "df_stats = pd.read_csv(os.path.join(WORKING_DIR, 'glacier_statistics.csv'), index_col=0)" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": null, 115 | "metadata": { 116 | "tags": [] 117 | }, 118 | "outputs": [], 119 | "source": [ 120 | "# all possible statistics about the glaciers\n", 121 | "df_stats" 122 | ] 123 | }, 124 | { 125 | "cell_type": "markdown", 126 | "metadata": {}, 127 | "source": [ 128 | "- in the column *error_task*, we can see whether an error occurred, and if yes during which task\n", 129 | "- *error_msg* describes the actual error message " 130 | ] 131 | }, 132 | { 133 | "cell_type": "code", 134 | "execution_count": null, 135 | "metadata": { 136 | "tags": [] 137 | }, 138 | "outputs": [], 139 | "source": [ 140 | "df_stats[['error_task', 'error_msg']]" 141 | ] 142 | }, 143 | { 144 | "cell_type": "markdown", 145 | "metadata": {}, 146 | "source": [ 147 | "We can also check which glacier failed at which task by using [compile_task_log](https://docs.oggm.org/en/stable/generated/oggm.utils.compile_task_log.html#oggm.utils.compile_task_log)." 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": null, 153 | "metadata": { 154 | "tags": [] 155 | }, 156 | "outputs": [], 157 | "source": [ 158 | "# also saved as task_log.csv in the WORKING_DIR folder - \"append=False\" replaces the existing one\n", 159 | "utils.compile_task_log(gdirs, task_names=['glacier_masks', 'compute_centerlines', 'flowline_model_run'], append=False)" 160 | ] 161 | }, 162 | { 163 | "cell_type": "markdown", 164 | "metadata": {}, 165 | "source": [ 166 | "## Error solving" 167 | ] 168 | }, 169 | { 170 | "cell_type": "markdown", 171 | "metadata": {}, 172 | "source": [ 173 | "### `Glacier exceeds domain boundaries`" 174 | ] 175 | }, 176 | { 177 | "cell_type": "markdown", 178 | "metadata": {}, 179 | "source": [ 180 | "To remove this error just increase the domain boundary **before** running `init_glacier_directories` ! Attention, this means that more data has to be downloaded and the run takes more time. The available preprocessed directories for `cfg.PARAMS['border']` for OGGM v1.6 are **10, 80, 160 or 240** at the moment; the unit is number of grid points outside the glacier boundaries. More about that in the OGGM documentation under [preprocessed files](https://docs.oggm.org/en/stable/input-data.html#pre-processed-directories)." 181 | ] 182 | }, 183 | { 184 | "cell_type": "code", 185 | "execution_count": null, 186 | "metadata": { 187 | "tags": [] 188 | }, 189 | "outputs": [], 190 | "source": [ 191 | "# reset to recompute statistics\n", 192 | "# Beware! If you use `reset=True` in `utils.mkdir`, ALL DATA in this folder will be deleted!\n", 193 | "utils.mkdir(WORKING_DIR, reset=True)\n", 194 | "\n", 195 | "# increase the amount of gridpoints outside the glacier\n", 196 | "cfg.PARAMS['border'] = 160\n", 197 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_prepro_level=5, prepro_base_url=DEFAULT_BASE_URL)\n", 198 | "workflow.execute_entity_task(tasks.run_random_climate, gdirs, y0=2000,\n", 199 | " nyears=150, seed=0,\n", 200 | " temperature_bias=-2);\n", 201 | "\n", 202 | "# recompute the output\n", 203 | "# we can also get the run output directly from the methods\n", 204 | "df_stats = utils.compile_glacier_statistics(gdirs)\n", 205 | "ds = utils.compile_run_output(gdirs)" 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": null, 211 | "metadata": { 212 | "tags": [] 213 | }, 214 | "outputs": [], 215 | "source": [ 216 | "# check again\n", 217 | "df_stats[['error_task', 'error_msg']]" 218 | ] 219 | }, 220 | { 221 | "cell_type": "markdown", 222 | "metadata": {}, 223 | "source": [ 224 | "Now `RGI60-11.00897` runs without errors!" 225 | ] 226 | }, 227 | { 228 | "cell_type": "markdown", 229 | "metadata": {}, 230 | "source": [ 231 | "### `RGI60-11.xxxxx is a nominal glacier.`" 232 | ] 233 | }, 234 | { 235 | "cell_type": "markdown", 236 | "metadata": {}, 237 | "source": [ 238 | "This error message in the log is misleading: it does not really describe the source of the error, which happened earlier in the processing chain. Therefore we can look instead into the glacier_statistics via [compile_glacier_statistics](https://docs.oggm.org/en/stable/generated/oggm.utils.compile_glacier_statistics.html) or into the log output via [compile_task_log](https://docs.oggm.org/en/stable/generated/oggm.utils.compile_task_log.html#oggm.utils.compile_task_log):" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": null, 244 | "metadata": { 245 | "tags": [] 246 | }, 247 | "outputs": [], 248 | "source": [ 249 | "print('error_task: {}, error_msg: {}'.format(df_stats.loc['RGI60-11.03235']['error_task'],\n", 250 | " df_stats.loc['RGI60-11.03235']['error_msg']))" 251 | ] 252 | }, 253 | { 254 | "cell_type": "markdown", 255 | "metadata": {}, 256 | "source": [ 257 | "Now we have a better understanding of the error: \n", 258 | "- OGGM can not work with this geometry of this glacier and could therefore not make a gridded mask of the glacier outlines. \n", 259 | "- there is no way to prevent this except you find a better way to pre-process the geometry of this glacier\n", 260 | "- these glaciers have to be ignored! Less than 0.5% of glacier area globally have errors during the geometry processing or failures in computing certain topographical properties by e.g. invalid DEM, see [Sect. 4.2 Invalid Glaciers of the OGGM paper (Maussion et al., 2019)](https://gmd.copernicus.org/articles/12/909/2019/#section4) and [this tutorial](../tutorials/preprocessing_errors.ipynb) for more up-to-date numbers" 261 | ] 262 | }, 263 | { 264 | "cell_type": "markdown", 265 | "metadata": {}, 266 | "source": [ 267 | "## Ignoring those glaciers with errors that we can't solve" 268 | ] 269 | }, 270 | { 271 | "cell_type": "markdown", 272 | "metadata": {}, 273 | "source": [ 274 | "In the run_output, you can for example just use `*.dropna` to remove these. For other applications (e.g. quantitative mass change evaluation), more will be needed (not available yet in the OGGM codebase):" 275 | ] 276 | }, 277 | { 278 | "cell_type": "code", 279 | "execution_count": null, 280 | "metadata": { 281 | "tags": [] 282 | }, 283 | "outputs": [], 284 | "source": [ 285 | "ds.dropna(dim='rgi_id') # here we can e.g. find the volume evolution" 286 | ] 287 | }, 288 | { 289 | "cell_type": "markdown", 290 | "metadata": {}, 291 | "source": [ 292 | "## What's next?\n", 293 | "\n", 294 | "- read about [preprocessing errors](../tutorials/preprocessing_errors.ipynb)\n", 295 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 296 | "- back to the [table of contents](../welcome.ipynb)" 297 | ] 298 | } 299 | ], 300 | "metadata": { 301 | "hide_input": false, 302 | "kernelspec": { 303 | "display_name": "Python 3 (ipykernel)", 304 | "language": "python", 305 | "name": "python3" 306 | }, 307 | "language_info": { 308 | "codemirror_mode": { 309 | "name": "ipython", 310 | "version": 3 311 | }, 312 | "file_extension": ".py", 313 | "mimetype": "text/x-python", 314 | "name": "python", 315 | "nbconvert_exporter": "python", 316 | "pygments_lexer": "ipython3", 317 | "version": "3.11.4" 318 | }, 319 | "toc": { 320 | "base_numbering": 1, 321 | "nav_menu": {}, 322 | "number_sections": false, 323 | "sideBar": true, 324 | "skip_h1_title": true, 325 | "title_cell": "Table of Contents", 326 | "title_sidebar": "Contents", 327 | "toc_cell": false, 328 | "toc_position": {}, 329 | "toc_section_display": true, 330 | "toc_window_display": false 331 | } 332 | }, 333 | "nbformat": 4, 334 | "nbformat_minor": 4 335 | } 336 | -------------------------------------------------------------------------------- /notebooks/tutorials/dem_sources.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Create local topography maps from different DEM sources with OGGM" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "There are a number of datasets available [out-of-the box](https://rgitools.readthedocs.io/en/latest/dems.html) in OGGM. This Notebook will show you how to download the original sources and create the local glacier centered map.\n", 15 | "It is also possible to use your own DEM data in OGGM." 16 | ] 17 | }, 18 | { 19 | "cell_type": "markdown", 20 | "metadata": {}, 21 | "source": [ 22 | "## Set-up " 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": null, 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "import rioxarray as rioxr\n", 32 | "import matplotlib.pyplot as plt\n", 33 | "import oggm\n", 34 | "from oggm import cfg, utils, workflow, tasks, graphics\n", 35 | "from oggm.core import gis\n", 36 | "cfg.initialize(logging_level='WARNING')\n", 37 | "cfg.PARAMS['border'] = 10" 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "## RGI outlines " 45 | ] 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": {}, 50 | "source": [ 51 | "We use the RGI outlines to identify the necessary DEM tiles. If you haven't downloaded the RGI files yet, this will also download them. Feel free to use your desired RGI-ID here, otherwise let's use the Hintereisferner glacier as an example. " 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "entity = utils.get_rgi_glacier_entities(['RGI60-11.00897'])\n", 61 | "entity.plot();" 62 | ] 63 | }, 64 | { 65 | "cell_type": "markdown", 66 | "metadata": {}, 67 | "source": [ 68 | "## Choose a DEM source (e.g. SRTM)" 69 | ] 70 | }, 71 | { 72 | "cell_type": "markdown", 73 | "metadata": {}, 74 | "source": [ 75 | "If not specifying anything, OGGM will use it's default settings, i.e. NASADEM for mid- and low-latitudes (60°S-60°N). However, this needs registration at [NASA Earthdata](https://urs.earthdata.nasa.gov/) (see \"Register\" below). Here, we choose the **SRTM** source as example DEM (no registration necessary)." 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": null, 81 | "metadata": {}, 82 | "outputs": [], 83 | "source": [ 84 | "# Let's make a working directory for this DEM \n", 85 | "cfg.PATHS['working_dir'] = utils.gettempdir('default', reset=True)\n", 86 | "gdir = workflow.init_glacier_directories(entity)[0]\n", 87 | "tasks.define_glacier_region(gdir, source='SRTM')\n", 88 | "# if not yet installed, you have to install rasterio" 89 | ] 90 | }, 91 | { 92 | "cell_type": "markdown", 93 | "metadata": {}, 94 | "source": [ 95 | "You can access the (reprojected and interpolated) DEM file in the working directory:" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "metadata": {}, 102 | "outputs": [], 103 | "source": [ 104 | "dem_path = gdir.get_filepath('dem')\n", 105 | "dem_path" 106 | ] 107 | }, 108 | { 109 | "cell_type": "markdown", 110 | "metadata": {}, 111 | "source": [ 112 | "It is a geotiff file. [Xarray](http://xarray.pydata.org) can open them thanks to [rasterio](https://rasterio.readthedocs.io):" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": null, 118 | "metadata": {}, 119 | "outputs": [], 120 | "source": [ 121 | "da = rioxr.open_rasterio(dem_path)\n", 122 | "f, ax = plt.subplots()\n", 123 | "da.plot(cmap='terrain', ax=ax);\n", 124 | "# Add the outlines\n", 125 | "gdir.read_shapefile('outlines').plot(ax=ax, color='none', edgecolor='black');" 126 | ] 127 | }, 128 | { 129 | "cell_type": "markdown", 130 | "metadata": {}, 131 | "source": [ 132 | "The source of the DEM is documented in the directory itself:" 133 | ] 134 | }, 135 | { 136 | "cell_type": "code", 137 | "execution_count": null, 138 | "metadata": {}, 139 | "outputs": [], 140 | "source": [ 141 | "with open(gdir.get_filepath('dem_source'), 'r') as f:\n", 142 | " print(f.read())" 143 | ] 144 | }, 145 | { 146 | "cell_type": "markdown", 147 | "metadata": {}, 148 | "source": [ 149 | "**OGGM is neither the owner nor the distributer of these datasets! OGGM only provides tools to access it. It is your responsibility as the data user to read the individual usage requirements and cite and acknowledge the original data sources accordingly.**" 150 | ] 151 | }, 152 | { 153 | "cell_type": "markdown", 154 | "metadata": {}, 155 | "source": [ 156 | "## OGGM provided datasets" 157 | ] 158 | }, 159 | { 160 | "cell_type": "markdown", 161 | "metadata": {}, 162 | "source": [ 163 | "At the moment OGGM is able to download and process the following DEM sources:" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": null, 169 | "metadata": {}, 170 | "outputs": [], 171 | "source": [ 172 | "for src in utils.DEM_SOURCES:\n", 173 | " print('{:<10}: {}'.format(src, gis.DEM_SOURCE_INFO[src].split('\\n')[0]))" 174 | ] 175 | }, 176 | { 177 | "cell_type": "markdown", 178 | "metadata": {}, 179 | "source": [ 180 | "## Register for online datasets\n", 181 | "The default DEM source for low and mid-latitudes (60°S-60°N), **NASADEM**, requires a user account to download data, so you need to register at [NASA Earthdata](https://urs.earthdata.nasa.gov/). There are other DEM sources where a registration is necessary; for **ASTGTMV3** at [NASA Earthdata](https://urs.earthdata.nasa.gov/), for **TanDEM-X** at [DLR](https://sso.eoc.dlr.de/tdm90/selfservice/), and for **COPDEM** at [spacedata.copernicus.eu/](https://spacedata.copernicus.eu).\n", 182 | "\n", 183 | "After that you can use the command line functionality `oggm_netrc_credentials` to store your user credentials in a local `~/.netrc` file. Your user credentials are only stored locally and are only used by the download function for authentification with the original DEM source. **Credentials are not needed if you use the RGI-TOPO data (see below).**" 184 | ] 185 | }, 186 | { 187 | "cell_type": "markdown", 188 | "metadata": {}, 189 | "source": [ 190 | "## Use pre-processed DEMs from RGI-TOPO " 191 | ] 192 | }, 193 | { 194 | "cell_type": "markdown", 195 | "metadata": {}, 196 | "source": [ 197 | "The [RGI-TOPO](https://rgitools.readthedocs.io/en/latest/dems.html) dataset is an RGI-provided dataset in beta release. These data are available for everyone, and were created with OGGM. Of course you can easily use these data in OGGM as well:" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": null, 203 | "metadata": {}, 204 | "outputs": [], 205 | "source": [ 206 | "# use NASADEM, the default DEM for low and mid-latitudes in OGGM, you can also change to e.g. 'COPDEM'\n", 207 | "from oggm.shop import rgitopo\n", 208 | "cfg.PATHS['working_dir'] = utils.gettempdir('rgitopo', reset=True)\n", 209 | "gdir = rgitopo.init_glacier_directories_from_rgitopo(['RGI60-11.00897'], dem_source='NASADEM')[0]\n", 210 | "graphics.plot_domain(gdir)" 211 | ] 212 | }, 213 | { 214 | "cell_type": "markdown", 215 | "metadata": {}, 216 | "source": [ 217 | "## Use another DEM source" 218 | ] 219 | }, 220 | { 221 | "cell_type": "markdown", 222 | "metadata": {}, 223 | "source": [ 224 | "Using RGI-TOPO DEMs is by far the easiest since all data is prepared for you and ready to use. But if you really want, you can go back to the original data sources:" 225 | ] 226 | }, 227 | { 228 | "cell_type": "code", 229 | "execution_count": null, 230 | "metadata": {}, 231 | "outputs": [], 232 | "source": [ 233 | "# Let's make a working directory for this DEM \n", 234 | "cfg.PATHS['working_dir'] = utils.gettempdir('alternative')\n", 235 | "try:\n", 236 | " gdir = workflow.init_glacier_directories(entity)[0]\n", 237 | " tasks.define_glacier_region(gdir, source='DEM3')\n", 238 | "except oggm.exceptions.InvalidDEMError as err:\n", 239 | " print(err)" 240 | ] 241 | }, 242 | { 243 | "cell_type": "markdown", 244 | "metadata": {}, 245 | "source": [ 246 | "Let's check that the source text is updated as well:" 247 | ] 248 | }, 249 | { 250 | "cell_type": "code", 251 | "execution_count": null, 252 | "metadata": {}, 253 | "outputs": [], 254 | "source": [ 255 | "with open(gdir.get_filepath('dem_source'), 'r') as f:\n", 256 | " print(f.read())" 257 | ] 258 | }, 259 | { 260 | "cell_type": "code", 261 | "execution_count": null, 262 | "metadata": {}, 263 | "outputs": [], 264 | "source": [ 265 | "f, ax = plt.subplots()\n", 266 | "da_dem3 = rioxr.open_rasterio(gdir.get_filepath('dem'))\n", 267 | "da_dem3.plot(cmap='terrain', ax=ax);\n", 268 | "gdir.read_shapefile('outlines').plot(ax=ax, color='none', edgecolor='black');" 269 | ] 270 | }, 271 | { 272 | "cell_type": "markdown", 273 | "metadata": {}, 274 | "source": [ 275 | "There might not be much difference a first sight, but by subtracting them the difference become clear: " 276 | ] 277 | }, 278 | { 279 | "cell_type": "code", 280 | "execution_count": null, 281 | "metadata": {}, 282 | "outputs": [], 283 | "source": [ 284 | "f, ax = plt.subplots()\n", 285 | "(da_dem3 - da).plot(ax=ax);\n", 286 | "plt.title('DEM3 - SRTM');\n", 287 | "gdir.read_shapefile('outlines').plot(ax=ax, color='none', edgecolor='black');" 288 | ] 289 | }, 290 | { 291 | "cell_type": "markdown", 292 | "metadata": {}, 293 | "source": [ 294 | "## Regional DEMs / DEM availability" 295 | ] 296 | }, 297 | { 298 | "cell_type": "markdown", 299 | "metadata": {}, 300 | "source": [ 301 | "Of course not all sources are available for every glacier as some of the DEMs are regional only. If we for example try the GIMP DEM, which is a Greenland specific DEM, it will not work for glaciers outside that region:" 302 | ] 303 | }, 304 | { 305 | "cell_type": "code", 306 | "execution_count": null, 307 | "metadata": {}, 308 | "outputs": [], 309 | "source": [ 310 | "# Let's make a working directory for this DEM \n", 311 | "cfg.PATHS['working_dir'] = utils.gettempdir('gimp', reset=True)\n", 312 | "try:\n", 313 | " gdir = workflow.init_glacier_directories(entity)[0]\n", 314 | " tasks.define_glacier_region(gdir, source='GIMP')\n", 315 | "except oggm.exceptions.InvalidWorkflowError as err:\n", 316 | " print(err)" 317 | ] 318 | }, 319 | { 320 | "cell_type": "markdown", 321 | "metadata": {}, 322 | "source": [ 323 | "## User provided DEM " 324 | ] 325 | }, 326 | { 327 | "cell_type": "markdown", 328 | "metadata": {}, 329 | "source": [ 330 | "Users should be able to use any DEM file which can be opened by rasterio (i.e. geotiff). Here, we use a subset SRTM file shipped with OGGM as an example:" 331 | ] 332 | }, 333 | { 334 | "cell_type": "code", 335 | "execution_count": null, 336 | "metadata": {}, 337 | "outputs": [], 338 | "source": [ 339 | "custom_dem_path = utils.get_demo_file('hef_srtm.tif')\n", 340 | "custom_dem_path" 341 | ] 342 | }, 343 | { 344 | "cell_type": "markdown", 345 | "metadata": {}, 346 | "source": [ 347 | "We tell OGGM to use it by changing the entry in the RGI table and by giving the path to the file:" 348 | ] 349 | }, 350 | { 351 | "cell_type": "code", 352 | "execution_count": null, 353 | "metadata": {}, 354 | "outputs": [], 355 | "source": [ 356 | "cfg.PATHS['dem_file'] = custom_dem_path" 357 | ] 358 | }, 359 | { 360 | "cell_type": "code", 361 | "execution_count": null, 362 | "metadata": {}, 363 | "outputs": [], 364 | "source": [ 365 | "cfg.PATHS['working_dir'] = utils.gettempdir('user', reset=True)\n", 366 | "gdir = workflow.init_glacier_directories(entity)[0]\n", 367 | "tasks.define_glacier_region(gdir, source='USER')" 368 | ] 369 | }, 370 | { 371 | "cell_type": "markdown", 372 | "metadata": {}, 373 | "source": [ 374 | "Now the user provided DEM is used:" 375 | ] 376 | }, 377 | { 378 | "cell_type": "code", 379 | "execution_count": null, 380 | "metadata": {}, 381 | "outputs": [], 382 | "source": [ 383 | "f, ax = plt.subplots()\n", 384 | "da_user = rioxr.open_rasterio(gdir.get_filepath('dem'))\n", 385 | "da_user.plot(cmap='terrain', ax=ax);\n", 386 | "gdir.read_shapefile('outlines').plot(ax=ax, color='none', edgecolor='black');" 387 | ] 388 | }, 389 | { 390 | "cell_type": "markdown", 391 | "metadata": {}, 392 | "source": [ 393 | "## The border value, or how to chose the size of the topographic map" 394 | ] 395 | }, 396 | { 397 | "cell_type": "markdown", 398 | "metadata": {}, 399 | "source": [ 400 | "It is possible to specify the extent of the local topographic map. All maps are centered on the glacier and the size of the map is determined in grid points around the glacier. The number of grid points that was used in this example are 10 in order to save storage. But depending on your study you might need a larger topographic map. \n", 401 | "\n", 402 | "OGGM's [pre-processed directories](https://docs.oggm.org/en/stable/input-data.html#pre-processed-directories) come in 4 border sizes: 10, 40, 80 and 160. But if you process the topography yourself you can chose every value." 403 | ] 404 | }, 405 | { 406 | "cell_type": "code", 407 | "execution_count": null, 408 | "metadata": {}, 409 | "outputs": [], 410 | "source": [ 411 | "# print the currently used number of gridpoints around a glacier\n", 412 | "cfg.PARAMS['border']" 413 | ] 414 | }, 415 | { 416 | "cell_type": "code", 417 | "execution_count": null, 418 | "metadata": {}, 419 | "outputs": [], 420 | "source": [ 421 | "cfg.PARAMS['border'] = 1" 422 | ] 423 | }, 424 | { 425 | "cell_type": "code", 426 | "execution_count": null, 427 | "metadata": {}, 428 | "outputs": [], 429 | "source": [ 430 | "# Let's make a working directory for this DEM \n", 431 | "cfg.PATHS['working_dir'] = utils.gettempdir('border1')\n", 432 | "gdir = workflow.init_glacier_directories(entity)[0]\n", 433 | "tasks.define_glacier_region(gdir)\n", 434 | "da = rioxr.open_rasterio(gdir.get_filepath('dem'))\n", 435 | "f, ax = plt.subplots()\n", 436 | "da.plot(cmap='terrain', ax=ax);\n", 437 | "# Add the outlines\n", 438 | "gdir.read_shapefile('outlines').plot(ax=ax, color='none', edgecolor='black');" 439 | ] 440 | }, 441 | { 442 | "cell_type": "code", 443 | "execution_count": null, 444 | "metadata": {}, 445 | "outputs": [], 446 | "source": [ 447 | "cfg.PARAMS['border'] = 100" 448 | ] 449 | }, 450 | { 451 | "cell_type": "code", 452 | "execution_count": null, 453 | "metadata": {}, 454 | "outputs": [], 455 | "source": [ 456 | "# Let's make a working directory for this DEM \n", 457 | "cfg.PATHS['working_dir'] = utils.gettempdir('border100')\n", 458 | "gdir = workflow.init_glacier_directories(entity)[0]\n", 459 | "tasks.define_glacier_region(gdir)\n", 460 | "da = rioxr.open_rasterio(gdir.get_filepath('dem'))\n", 461 | "f, ax = plt.subplots()\n", 462 | "da.plot(cmap='terrain', ax=ax);\n", 463 | "# Add the outlines\n", 464 | "gdir.read_shapefile('outlines').plot(ax=ax, color='none', edgecolor='black');" 465 | ] 466 | }, 467 | { 468 | "cell_type": "markdown", 469 | "metadata": {}, 470 | "source": [ 471 | "## What's next?\n", 472 | "\n", 473 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 474 | "- back to the [table of contents](../welcome.ipynb)" 475 | ] 476 | } 477 | ], 478 | "metadata": { 479 | "hide_input": false, 480 | "kernelspec": { 481 | "display_name": "Python 3 (ipykernel)", 482 | "language": "python", 483 | "name": "python3" 484 | }, 485 | "language_info": { 486 | "codemirror_mode": { 487 | "name": "ipython", 488 | "version": 3 489 | }, 490 | "file_extension": ".py", 491 | "mimetype": "text/x-python", 492 | "name": "python", 493 | "nbconvert_exporter": "python", 494 | "pygments_lexer": "ipython3", 495 | "version": "3.12.4" 496 | }, 497 | "latex_envs": { 498 | "LaTeX_envs_menu_present": true, 499 | "autoclose": false, 500 | "autocomplete": true, 501 | "bibliofile": "biblio.bib", 502 | "cite_by": "apalike", 503 | "current_citInitial": 1, 504 | "eqLabelWithNumbers": true, 505 | "eqNumInitial": 1, 506 | "hotkeys": { 507 | "equation": "Ctrl-E", 508 | "itemize": "Ctrl-I" 509 | }, 510 | "labels_anchors": false, 511 | "latex_user_defs": false, 512 | "report_style_numbering": false, 513 | "user_envs_cfg": false 514 | }, 515 | "nbTranslate": { 516 | "displayLangs": [ 517 | "*" 518 | ], 519 | "hotkey": "alt-t", 520 | "langInMainMenu": true, 521 | "sourceLang": "en", 522 | "targetLang": "fr", 523 | "useGoogleTranslate": true 524 | }, 525 | "toc": { 526 | "base_numbering": 1, 527 | "nav_menu": {}, 528 | "number_sections": false, 529 | "sideBar": true, 530 | "skip_h1_title": true, 531 | "title_cell": "Table of Contents", 532 | "title_sidebar": "Contents", 533 | "toc_cell": false, 534 | "toc_position": {}, 535 | "toc_section_display": true, 536 | "toc_window_display": false 537 | } 538 | }, 539 | "nbformat": 4, 540 | "nbformat_minor": 4 541 | } 542 | -------------------------------------------------------------------------------- /notebooks/tutorials/elevation_bands_vs_centerlines.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Differences between the \"elevation band\" and \"centerline\" flowlines" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "In version 1.4, OGGM introduced a new way to compute flowlines: the so-called \"elevation-band flowlines\" (after [Huss & Farinotti, 2012](https://agupubs.onlinelibrary.wiley.com/doi/full/10.1029/2012JF002523)). These elevation bands complement the already available \"multiple centerlines\" glacier representation. \n", 15 | "\n", 16 | "**In OGGM 1.6 and above, the \"elevation band\" representation is the most commonly used representation for large scale simulations.**\n", 17 | "\n", 18 | "This notebook allows you to compare the two representations. It shows that the difference between the two are small for projections of glacier change, but each representation comes with pros and cons when it comes to single glacier simulations." 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "**Tags:** beginner, workflow, dynamics, flowlines " 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": { 32 | "tags": [] 33 | }, 34 | "outputs": [], 35 | "source": [ 36 | "from oggm import cfg, utils, workflow, graphics, tasks\n", 37 | "import xarray as xr\n", 38 | "import numpy as np\n", 39 | "import matplotlib.pyplot as plt\n", 40 | "import seaborn as sns" 41 | ] 42 | }, 43 | { 44 | "cell_type": "code", 45 | "execution_count": null, 46 | "metadata": { 47 | "tags": [] 48 | }, 49 | "outputs": [], 50 | "source": [ 51 | "cfg.initialize(logging_level='WARNING')" 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": { 58 | "tags": [] 59 | }, 60 | "outputs": [], 61 | "source": [ 62 | "# Pick the glacier you want! We use Baltoro here\n", 63 | "rgi_ids = ['RGI60-14.06794']" 64 | ] 65 | }, 66 | { 67 | "cell_type": "markdown", 68 | "metadata": {}, 69 | "source": [ 70 | "## Get ready" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "metadata": {}, 76 | "source": [ 77 | "In order to open the same glacier on two different glacier directories, we apply a trick: we set a new working directory for each case! This trick is not recommended for real runs: if you have a use case for such a workflow (the same glacier with different flowline types, please [get in touch with us](https://docs.oggm.org/en/stable/#get-in-touch))." 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": { 84 | "tags": [] 85 | }, 86 | "outputs": [], 87 | "source": [ 88 | "# Geometrical centerline\n", 89 | "# Where to store the data \n", 90 | "cfg.PATHS['working_dir'] = utils.gettempdir(dirname='OGGM-centerlines', reset=True)\n", 91 | "\n", 92 | "# We start from prepro level 3 with all data ready - note the url here\n", 93 | "base_url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/centerlines/W5E5/'\n", 94 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_prepro_level=3, prepro_border=80, prepro_base_url=base_url)\n", 95 | "gdir_cl = gdirs[0]\n", 96 | "gdir_cl" 97 | ] 98 | }, 99 | { 100 | "cell_type": "code", 101 | "execution_count": null, 102 | "metadata": { 103 | "tags": [] 104 | }, 105 | "outputs": [], 106 | "source": [ 107 | "# Elevation band flowline\n", 108 | "# New working directory\n", 109 | "cfg.PATHS['working_dir'] = utils.gettempdir(dirname='OGGM-elevbands', reset=True)\n", 110 | "\n", 111 | "# Note the new url\n", 112 | "base_url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/elev_bands/W5E5/'\n", 113 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_prepro_level=3, prepro_border=80, prepro_base_url=base_url)\n", 114 | "gdir_eb = gdirs[0]\n", 115 | "gdir_eb" 116 | ] 117 | }, 118 | { 119 | "cell_type": "markdown", 120 | "metadata": {}, 121 | "source": [ 122 | "## Some reading first " 123 | ] 124 | }, 125 | { 126 | "cell_type": "markdown", 127 | "metadata": {}, 128 | "source": [ 129 | "We wrote a bit of information about the differences between these two. First, go to the [glacier flowlines](https://docs.oggm.org/en/stable/flowlines.html#glacier-flowlines) documentation where you can find detailed information about the two flowline types and also a [guideline when to use which flowline method](https://docs.oggm.org/en/stable/flowlines.html#pros-and-cons-of-both-methods).\n", 130 | "\n", 131 | "The examples below illustrate these differences, without much text for now because of lack of time:" 132 | ] 133 | }, 134 | { 135 | "cell_type": "markdown", 136 | "metadata": {}, 137 | "source": [ 138 | "## Glacier length and cross section" 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": null, 144 | "metadata": { 145 | "tags": [] 146 | }, 147 | "outputs": [], 148 | "source": [ 149 | "fls_cl = gdir_cl.read_pickle('model_flowlines')\n", 150 | "fls_eb = gdir_eb.read_pickle('model_flowlines')" 151 | ] 152 | }, 153 | { 154 | "cell_type": "code", 155 | "execution_count": null, 156 | "metadata": { 157 | "tags": [] 158 | }, 159 | "outputs": [], 160 | "source": [ 161 | "f, (ax1, ax2) = plt.subplots(2, 1, figsize=(10, 14), sharex=True, sharey=True)\n", 162 | "graphics.plot_modeloutput_section(fls_cl, ax=ax1)\n", 163 | "ax1.set_title('Geometrical centerline')\n", 164 | "graphics.plot_modeloutput_section(fls_eb, ax=ax2)\n", 165 | "ax2.set_title('Elevation band flowline');" 166 | ] 167 | }, 168 | { 169 | "cell_type": "markdown", 170 | "metadata": {}, 171 | "source": [ 172 | "**Note that the elevation band flowline length is shorter than the geometrical centerline!**" 173 | ] 174 | }, 175 | { 176 | "cell_type": "markdown", 177 | "metadata": {}, 178 | "source": [ 179 | "## Projections: generally small differences in volume, but larger differences in geometry (length and area) " 180 | ] 181 | }, 182 | { 183 | "cell_type": "markdown", 184 | "metadata": {}, 185 | "source": [ 186 | "Thanks to OGGM's modular workflow, a simulation with each geometry is fairly similar in terms of code. For example, we can process the climate data for both representations with the same command:" 187 | ] 188 | }, 189 | { 190 | "cell_type": "code", 191 | "execution_count": null, 192 | "metadata": { 193 | "tags": [] 194 | }, 195 | "outputs": [], 196 | "source": [ 197 | "gdirs = [gdir_cl, gdir_eb]" 198 | ] 199 | }, 200 | { 201 | "cell_type": "code", 202 | "execution_count": null, 203 | "metadata": { 204 | "tags": [] 205 | }, 206 | "outputs": [], 207 | "source": [ 208 | "from oggm.shop import gcm_climate\n", 209 | "\n", 210 | "# you can choose one of these 5 different GCMs:\n", 211 | "# 'gfdl-esm4_r1i1p1f1', 'mpi-esm1-2-hr_r1i1p1f1', 'mri-esm2-0_r1i1p1f1' (\"low sensitivity\" models, within typical ranges from AR6)\n", 212 | "# 'ipsl-cm6a-lr_r1i1p1f1', 'ukesm1-0-ll_r1i1p1f2' (\"hotter\" models, especially ukesm1-0-ll)\n", 213 | "member = 'mri-esm2-0_r1i1p1f1' \n", 214 | "\n", 215 | "for ssp in ['ssp126', 'ssp370','ssp585']:\n", 216 | " # bias correct them\n", 217 | " workflow.execute_entity_task(gcm_climate.process_monthly_isimip_data, gdirs, \n", 218 | " ssp = ssp,\n", 219 | " # gcm member -> you can choose another one\n", 220 | " member=member,\n", 221 | " # recognize the climate file for later\n", 222 | " output_filesuffix=f'_ISIMIP3b_{member}_{ssp}'\n", 223 | " );" 224 | ] 225 | }, 226 | { 227 | "cell_type": "markdown", 228 | "metadata": {}, 229 | "source": [ 230 | "For the ice dynamics simulations, the commands are exactly the same as well. The only difference is that centerlines require the more flexible \"FluxBased\" numerical model, while the elevation bands can also use the more robust \"SemiImplicit\" one. **The runs are considerabily faster with the elevation bands flowlines.**" 231 | ] 232 | }, 233 | { 234 | "cell_type": "code", 235 | "execution_count": null, 236 | "metadata": { 237 | "tags": [] 238 | }, 239 | "outputs": [], 240 | "source": [ 241 | "# add additional outputs to default OGGM\n", 242 | "cfg.PARAMS['store_model_geometry'] = True \n", 243 | "cfg.PARAMS['store_fl_diagnostics'] = True\n", 244 | "\n", 245 | "for gdir in gdirs:\n", 246 | " if gdir is gdir_cl:\n", 247 | " cfg.PARAMS['evolution_model'] = 'FluxBased'\n", 248 | " else:\n", 249 | " cfg.PARAMS['evolution_model'] = 'SemiImplicit'\n", 250 | "\n", 251 | " workflow.execute_entity_task(tasks.run_from_climate_data, [gdir],\n", 252 | " output_filesuffix='_historical', \n", 253 | " );\n", 254 | "\n", 255 | " for ssp in ['ssp126', 'ssp370', 'ssp585']:\n", 256 | " rid = f'_ISIMIP3b_{member}_{ssp}'\n", 257 | "\n", 258 | " workflow.execute_entity_task(tasks.run_from_climate_data, [gdir],\n", 259 | " climate_filename='gcm_data', # use gcm_data, not climate_historical\n", 260 | " climate_input_filesuffix=rid, # use the chosen scenario\n", 261 | " init_model_filesuffix='_historical', # this is important! Start from 2020 glacier\n", 262 | " output_filesuffix=rid, # recognize the run for later\n", 263 | " );" 264 | ] 265 | }, 266 | { 267 | "cell_type": "code", 268 | "execution_count": null, 269 | "metadata": {}, 270 | "outputs": [], 271 | "source": [ 272 | "f, (ax1, ax2) = plt.subplots(1, 2, figsize=(14, 4))\n", 273 | "\n", 274 | "# Pick some colors for the lines\n", 275 | "color_dict={'ssp126':'blue', 'ssp370':'orange', 'ssp585':'red'}\n", 276 | "\n", 277 | "for ssp in ['ssp126','ssp370', 'ssp585']:\n", 278 | " rid = f'_ISIMIP3b_{member}_{ssp}'\n", 279 | " with xr.open_dataset(gdir_cl.get_filepath('model_diagnostics', filesuffix=rid)) as ds:\n", 280 | " ds.volume_m3.plot(ax=ax1, label=ssp, c=color_dict[ssp]);\n", 281 | "for ssp in ['ssp126','ssp370', 'ssp585']:\n", 282 | " rid = f'_ISIMIP3b_{member}_{ssp}'\n", 283 | " with xr.open_dataset(gdir_eb.get_filepath('model_diagnostics', filesuffix=rid)) as ds:\n", 284 | " ds.volume_m3.plot(ax=ax1, label=ssp, c=color_dict[ssp], ls='--');\n", 285 | " ax1.set_title('Glacier volume')\n", 286 | " ax1.set_xlim([2020,2100])\n", 287 | " ax1.set_ylim([0, ds.volume_m3.max().max()*1.1])\n", 288 | "\n", 289 | "for ssp in ['ssp126','ssp370', 'ssp585']:\n", 290 | " rid = f'_ISIMIP3b_{member}_{ssp}'\n", 291 | " with xr.open_dataset(gdir_cl.get_filepath('model_diagnostics', filesuffix=rid)) as ds:\n", 292 | " ds.length_m.plot(ax=ax2, label=ssp, c=color_dict[ssp]);\n", 293 | " ax2.set_ylim([0, ds.length_m.max().max()*1.1])\n", 294 | "for ssp in ['ssp126','ssp370', 'ssp585']:\n", 295 | " rid = f'_ISIMIP3b_{member}_{ssp}'\n", 296 | " with xr.open_dataset(gdir_eb.get_filepath('model_diagnostics', filesuffix=rid)) as ds:\n", 297 | " ds.length_m.plot(ax=ax2, label=ssp, c=color_dict[ssp], ls='--');\n", 298 | " ax2.set_title('Glacier length')\n", 299 | " ax2.set_xlim([2020,2100])\n", 300 | " \n", 301 | "plt.legend(); " 302 | ] 303 | }, 304 | { 305 | "cell_type": "markdown", 306 | "metadata": { 307 | "tags": [] 308 | }, 309 | "source": [ 310 | "As you can see, for this disappearing glacier, the representations create slightly different volume projections. The differences can be quite a bit larger at times, for example for length projections." 311 | ] 312 | }, 313 | { 314 | "cell_type": "markdown", 315 | "metadata": {}, 316 | "source": [ 317 | "## Graphical representation: centerlines win by short margin (for now)" 318 | ] 319 | }, 320 | { 321 | "cell_type": "code", 322 | "execution_count": null, 323 | "metadata": { 324 | "tags": [] 325 | }, 326 | "outputs": [], 327 | "source": [ 328 | "rid = f'_ISIMIP3b_{member}_ssp126'" 329 | ] 330 | }, 331 | { 332 | "cell_type": "markdown", 333 | "metadata": {}, 334 | "source": [ 335 | "Both models can be reprensented with a cross-section, like this: " 336 | ] 337 | }, 338 | { 339 | "cell_type": "code", 340 | "execution_count": null, 341 | "metadata": { 342 | "tags": [] 343 | }, 344 | "outputs": [], 345 | "source": [ 346 | "sel_years = np.linspace(2020, 2100, 17).astype(int)\n", 347 | "colors = sns.color_palette('rocket', len(sel_years))\n", 348 | "with plt.rc_context({'axes.prop_cycle': plt.cycler(color=colors)}):\n", 349 | " f, (ax1, ax2) = plt.subplots(1, 2, figsize=(15, 5.5), sharey=True, sharex=True)\n", 350 | " n_lines = len(gdir_cl.read_pickle('model_flowlines'))\n", 351 | " with xr.open_dataset(gdir_cl.get_filepath('fl_diagnostics', filesuffix=rid), group=f'fl_{n_lines-1}') as ds:\n", 352 | " (ds.bed_h + ds.sel(time=sel_years).thickness_m).plot(ax=ax1, hue='time')\n", 353 | " ds.bed_h.plot(ax=ax1, c='k')\n", 354 | " ax1.set_title('Centerlines')\n", 355 | " with xr.open_dataset(gdir_eb.get_filepath('fl_diagnostics', filesuffix=rid), group='fl_0') as ds:\n", 356 | " (ds.bed_h + ds.sel(time=sel_years).thickness_m).plot(ax=ax2, hue='time')\n", 357 | " ds.bed_h.plot(ax=ax2, c='k')\n", 358 | " ax2.set_ylabel('')\n", 359 | " ax2.set_title('Elevation bands')" 360 | ] 361 | }, 362 | { 363 | "cell_type": "markdown", 364 | "metadata": { 365 | "tags": [] 366 | }, 367 | "source": [ 368 | "However, only centerlines can be plotted as a map:" 369 | ] 370 | }, 371 | { 372 | "cell_type": "code", 373 | "execution_count": null, 374 | "metadata": { 375 | "tags": [] 376 | }, 377 | "outputs": [], 378 | "source": [ 379 | "# this can take some time\n", 380 | "# if you want to see more thickness differences you can use ssp585 instead of ssp126\n", 381 | "# by uncomment the following line\n", 382 | "# rid = f'_ISIMIP3b_{member}_ssp585'\n", 383 | "f, (ax1, ax2, ax3) = plt.subplots(1, 3, figsize=(14, 6))\n", 384 | "# let's have the same colorbar for every subplot for better comparability\n", 385 | "graphics.plot_modeloutput_map(gdir_cl, filesuffix=rid, modelyr=2020, ax=ax1, vmax=600) \n", 386 | "graphics.plot_modeloutput_map(gdir_cl, filesuffix=rid, modelyr=2050, ax=ax2, vmax=600)\n", 387 | "graphics.plot_modeloutput_map(gdir_cl, filesuffix=rid, modelyr=2100, ax=ax3, vmax=600)\n", 388 | "plt.tight_layout();" 389 | ] 390 | }, 391 | { 392 | "cell_type": "markdown", 393 | "metadata": {}, 394 | "source": [ 395 | "We are however working on a better representation of retreating glaciers for outreach. Have a look at [this tutorial](../tutorials/distribute_flowline.ipynb)!" 396 | ] 397 | }, 398 | { 399 | "cell_type": "markdown", 400 | "metadata": {}, 401 | "source": [ 402 | "## Take home messages " 403 | ] 404 | }, 405 | { 406 | "cell_type": "markdown", 407 | "metadata": {}, 408 | "source": [ 409 | "- in the absence of additional data to better calibrate the mass balance model, using multiple centerlines is considered not useful: indeed, the distributed representation offers little advantages if the mass balance is only a function of elevation.\n", 410 | "- elevation band flowlines are now the default of most OGGM applications. It is faster, much cheaper, and more robust to use these simplified glaciers.\n", 411 | "- elevation band flowlines cannot be represented on a map \"out of the box\". We have however developped a tool to display the changes by redistributing them on a map: have a look at [this tutorial](../tutorials/distribute_flowline.ipynb)!\n", 412 | "- multiple centerlines can be useful for growing glacier cases and use cases where geometry plays an important role (e.g. lakes, paleo applications)." 413 | ] 414 | }, 415 | { 416 | "cell_type": "markdown", 417 | "metadata": {}, 418 | "source": [ 419 | "## What's next?\n", 420 | "\n", 421 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 422 | "- back to the [table of contents](../welcome.ipynb)" 423 | ] 424 | } 425 | ], 426 | "metadata": { 427 | "celltoolbar": "Tags", 428 | "hide_input": false, 429 | "kernelspec": { 430 | "display_name": "Python 3 (ipykernel)", 431 | "language": "python", 432 | "name": "python3" 433 | }, 434 | "language_info": { 435 | "codemirror_mode": { 436 | "name": "ipython", 437 | "version": 3 438 | }, 439 | "file_extension": ".py", 440 | "mimetype": "text/x-python", 441 | "name": "python", 442 | "nbconvert_exporter": "python", 443 | "pygments_lexer": "ipython3", 444 | "version": "3.11.4" 445 | }, 446 | "latex_envs": { 447 | "LaTeX_envs_menu_present": true, 448 | "autoclose": false, 449 | "autocomplete": true, 450 | "bibliofile": "biblio.bib", 451 | "cite_by": "apalike", 452 | "current_citInitial": 1, 453 | "eqLabelWithNumbers": true, 454 | "eqNumInitial": 1, 455 | "hotkeys": { 456 | "equation": "Ctrl-E", 457 | "itemize": "Ctrl-I" 458 | }, 459 | "labels_anchors": false, 460 | "latex_user_defs": false, 461 | "report_style_numbering": false, 462 | "user_envs_cfg": false 463 | }, 464 | "nbTranslate": { 465 | "displayLangs": [ 466 | "*" 467 | ], 468 | "hotkey": "alt-t", 469 | "langInMainMenu": true, 470 | "sourceLang": "en", 471 | "targetLang": "fr", 472 | "useGoogleTranslate": true 473 | }, 474 | "papermill": { 475 | "duration": 78.878142, 476 | "end_time": "2019-05-02T12:30:59.784271", 477 | "environment_variables": {}, 478 | "exception": null, 479 | "input_path": "dem_comparison.ipynb", 480 | "output_path": "out-param.ipynb", 481 | "parameters": { 482 | "rgi_id": "RGI60-03.02489" 483 | }, 484 | "start_time": "2019-05-02T12:29:40.906129", 485 | "version": "1.0.0" 486 | }, 487 | "toc": { 488 | "base_numbering": 1, 489 | "nav_menu": {}, 490 | "number_sections": false, 491 | "sideBar": true, 492 | "skip_h1_title": true, 493 | "title_cell": "Table of Contents", 494 | "title_sidebar": "Contents", 495 | "toc_cell": false, 496 | "toc_position": {}, 497 | "toc_section_display": true, 498 | "toc_window_display": false 499 | } 500 | }, 501 | "nbformat": 4, 502 | "nbformat_minor": 4 503 | } 504 | -------------------------------------------------------------------------------- /notebooks/tutorials/hydrological_output.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Hydrological mass-balance output" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "Since OGGM 1.5, a new task called `run_with_hydro` adds mass-balance and runoff diagnostics to the OGGM output files.\n", 15 | "\n", 16 | "We are in the process of reworking this tutorial for advanced use cases, but in the meantime we recommend to have a look at the two very nice OGGM-Edu tutorials:\n", 17 | "- [Glaciers as water resources: part 1 (idealized climate)](https://oggm.org/oggm-edu-notebooks/oggm-edu/glacier_water_resources.html)\n", 18 | "- [Glaciers as water resources: part 2 (projections)](https://oggm.org/oggm-edu-notebooks/oggm-edu/glacier_water_resources_projections.html)\n", 19 | "\n", 20 | "They demonstrate the capabilities quite well." 21 | ] 22 | }, 23 | { 24 | "cell_type": "markdown", 25 | "metadata": {}, 26 | "source": [ 27 | "## What's next?\n", 28 | "\n", 29 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 30 | "- back to the [table of contents](../welcome.ipynb)" 31 | ] 32 | } 33 | ], 34 | "metadata": { 35 | "hide_input": false, 36 | "kernelspec": { 37 | "display_name": "Python 3 (ipykernel)", 38 | "language": "python", 39 | "name": "python3" 40 | }, 41 | "language_info": { 42 | "codemirror_mode": { 43 | "name": "ipython", 44 | "version": 3 45 | }, 46 | "file_extension": ".py", 47 | "mimetype": "text/x-python", 48 | "name": "python", 49 | "nbconvert_exporter": "python", 50 | "pygments_lexer": "ipython3", 51 | "version": "3.12.4" 52 | }, 53 | "toc": { 54 | "base_numbering": 1, 55 | "nav_menu": {}, 56 | "number_sections": false, 57 | "sideBar": true, 58 | "skip_h1_title": true, 59 | "title_cell": "Table of Contents", 60 | "title_sidebar": "Contents", 61 | "toc_cell": false, 62 | "toc_position": {}, 63 | "toc_section_display": true, 64 | "toc_window_display": false 65 | } 66 | }, 67 | "nbformat": 4, 68 | "nbformat_minor": 4 69 | } 70 | -------------------------------------------------------------------------------- /notebooks/tutorials/massbalance_global_params.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "0", 6 | "metadata": {}, 7 | "source": [ 8 | "# Global distribution of the mass-balance model parameters" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "1", 14 | "metadata": {}, 15 | "source": [ 16 | "This notebook is a follow-up on the very important overview of the [mass-balance calibration procedure in v1.6](massbalance_calibration.ipynb).\n", 17 | "\n", 18 | "Here we illustrate a few useful methods:\n", 19 | "- checking some statistics on the pre-processed directories (similar to [preprocessing_errors.ipynb](preprocessing_errors.ipynb))\n", 20 | "- we check how the new global calibration procedure operates\n", 21 | "- we assess the importance of the dynamical spinup for the calibration" 22 | ] 23 | }, 24 | { 25 | "cell_type": "code", 26 | "execution_count": null, 27 | "id": "2", 28 | "metadata": {}, 29 | "outputs": [], 30 | "source": [ 31 | "from oggm import utils\n", 32 | "import pandas as pd\n", 33 | "import numpy as np\n", 34 | "import matplotlib.pyplot as plt\n", 35 | "import seaborn as sns" 36 | ] 37 | }, 38 | { 39 | "cell_type": "markdown", 40 | "id": "3", 41 | "metadata": {}, 42 | "source": [ 43 | "Let's start by reading in the \"glacier statistics\" files, a bunch of statistics written by OGGM at the end of the preprocessing:" 44 | ] 45 | }, 46 | { 47 | "cell_type": "code", 48 | "execution_count": null, 49 | "id": "4", 50 | "metadata": {}, 51 | "outputs": [], 52 | "source": [ 53 | "# W5E5 elevbands, no spinup \n", 54 | "url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/elev_bands/W5E5/RGI62/b_080/L5/summary/'\n", 55 | "\n", 56 | "# this can take some time to downloan\n", 57 | "df = []\n", 58 | "for rgi_reg in range(1, 19):\n", 59 | " fpath = utils.file_downloader(url + f'glacier_statistics_{rgi_reg:02d}.csv')\n", 60 | " df.append(pd.read_csv(fpath, index_col=0, low_memory=False))\n", 61 | "df = pd.concat(df, sort=False).sort_index()\n", 62 | "\n", 63 | "# There are a lot of columns - let's pick a few columns only\n", 64 | "# We drop failing glaciers\n", 65 | "df_params = df[['melt_f', 'prcp_fac', 'temp_bias', 'rgi_area_km2']].dropna().copy()\n", 66 | "f'Failing glaciers: {len(df) - len(df_params)} ({100 - df_params.rgi_area_km2.sum()/df.rgi_area_km2.sum()*100:.2f}% of global area)'" 67 | ] 68 | }, 69 | { 70 | "cell_type": "markdown", 71 | "id": "5", 72 | "metadata": {}, 73 | "source": [ 74 | "## Global statistics of OGGM's 1.6.1 \"informed three steps\" method" 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "id": "6", 80 | "metadata": {}, 81 | "source": [ 82 | "As explained in the [mass-balance calibration procedure in v1.6](massbalance_calibration.ipynb) notebook, the \"informed three steps\" method provides first guesses for the precipitation factor and the temperature bias. We then calibrate each glacier in three steps - let's check the number of glaciers calibrated this way:" 83 | ] 84 | }, 85 | { 86 | "cell_type": "markdown", 87 | "id": "7", 88 | "metadata": {}, 89 | "source": [ 90 | "Step 0: use the first guess `melt_f` = 5, `prcp_fac` = data-informed from winter precipitation, `temp_bias` = data-informed from the global calibration with fixed parameters (see [mass-balance calibration procedure in v1.6](massbalance_calibration.ipynb) for details). \n", 91 | "\n", 92 | "Step 1: if Step 0 doesn't match (only likely to happen if there is one isolated glacier in a climate grid point), allow `prcp_fac` to vary again between 0.8 and 1.2 times the roiginal guess ($\\pm$20%). This is justified by the fact that the first guess for precipitation is also highly uncertain. If that worked, the calibration stops.\n", 93 | "\n", 94 | "To find out which glaciers have been calibrated after step 1, we count the number of glaciers with a melt factor of exactly 5:" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": null, 100 | "id": "8", 101 | "metadata": {}, 102 | "outputs": [], 103 | "source": [ 104 | "df_params['Step 1'] = np.isclose(df_params['melt_f'], 5)\n", 105 | "perc1 = df_params['Step 1'].sum() / len(df_params) * 100\n", 106 | "perc1_area = df_params.loc[df_params['Step 1']].rgi_area_km2.sum()/df.rgi_area_km2.sum()*100\n", 107 | "print(f'{perc1:.1f}% of all glaciers are calibrated after step 1 ({perc1_area:.1f}% area)')" 108 | ] 109 | }, 110 | { 111 | "cell_type": "markdown", 112 | "id": "9", 113 | "metadata": {}, 114 | "source": [ 115 | "Step 2: if Step 1 did not work, we allow `melt_f` to vary between a predefined range (1.5 - 17) while fixing `temp_bias` and `prcp_fac` again.\n", 116 | "\n", 117 | "Step 3: finally, if the above did not work, allow `temp_bias` to vary again, fixing the other parameters to their last value.\n", 118 | "\n", 119 | "To check wether these steps were successful from our files, we can compute the number of glaciers which have hit the \"hard limits\" of the allowed melt factor range, i.e. have reached step 3, and then substract them from the total:" 120 | ] 121 | }, 122 | { 123 | "cell_type": "code", 124 | "execution_count": null, 125 | "id": "10", 126 | "metadata": {}, 127 | "outputs": [], 128 | "source": [ 129 | "df_params['Step 3'] = np.isclose(df_params['melt_f'], df_params['melt_f'].max()) | np.isclose(df_params['melt_f'], df_params['melt_f'].min()) \n", 130 | "perc3 = df_params['Step 3'].sum() / len(df_params) * 100\n", 131 | "perc3_area = df_params.loc[df_params['Step 3']].rgi_area_km2.sum()/df.rgi_area_km2.sum()*100\n", 132 | "\n", 133 | "df_params['Step 2'] = (~ df_params['Step 1']) & (~ df_params['Step 3'])\n", 134 | "perc2 = df_params['Step 2'].sum() / len(df_params) * 100\n", 135 | "perc2_area = df_params.loc[df_params['Step 2']].rgi_area_km2.sum()/df.rgi_area_km2.sum()*100\n", 136 | "\n", 137 | "print(f'{perc2:.1f}% of all glaciers are calibrated after step 2 ({perc2_area:.1f}% area)')\n", 138 | "print(f'{perc3:.1f}% of all glaciers are calibrated after step 3 ({perc3_area:.1f}% area)')" 139 | ] 140 | }, 141 | { 142 | "cell_type": "markdown", 143 | "id": "11", 144 | "metadata": {}, 145 | "source": [ 146 | "## Global parameter distributions" 147 | ] 148 | }, 149 | { 150 | "cell_type": "markdown", 151 | "id": "12", 152 | "metadata": {}, 153 | "source": [ 154 | "### Melt factor" 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": null, 160 | "id": "13", 161 | "metadata": {}, 162 | "outputs": [], 163 | "source": [ 164 | "f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))\n", 165 | "\n", 166 | "df_params['melt_f'].plot.hist(bins=51, density=True, ax=ax1, alpha=0.5, label='Frequency');\n", 167 | "df_params['melt_f'].plot.hist(bins=51, density=True, ax=ax1, weights=df_params['rgi_area_km2'], alpha=0.5, label='Area weighted');\n", 168 | "ax1.set_title('Melt factor distribution (global)');\n", 169 | "ax1.set_ylabel('Frequency (%)');\n", 170 | "ax1.legend();\n", 171 | "\n", 172 | "df_params['melt_f'].plot.hist(bins=51, density=True, ax=ax2, alpha=0.5, label='Frequency');\n", 173 | "df_params['melt_f'].plot.hist(bins=51, density=True, ax=ax2, weights=df_params['rgi_area_km2'], alpha=0.5, label='Area weighted');\n", 174 | "ax2.set_yscale('log')\n", 175 | "ax2.set_title('Melt factor distribution (log scale)');\n", 176 | "ax2.set_ylabel('Frequency (log scale)');" 177 | ] 178 | }, 179 | { 180 | "cell_type": "markdown", 181 | "id": "14", 182 | "metadata": {}, 183 | "source": [ 184 | "### Precip factor" 185 | ] 186 | }, 187 | { 188 | "cell_type": "code", 189 | "execution_count": null, 190 | "id": "15", 191 | "metadata": {}, 192 | "outputs": [], 193 | "source": [ 194 | "f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))\n", 195 | "\n", 196 | "df_params['prcp_fac'].plot.hist(bins=51, density=True, ax=ax1, alpha=0.5, label='Frequency');\n", 197 | "df_params['prcp_fac'].plot.hist(bins=51, density=True, ax=ax1, weights=df_params['rgi_area_km2'], alpha=0.5, label='Area weighted');\n", 198 | "ax1.set_title('Precipitation factor distribution (global)');\n", 199 | "ax1.set_ylabel('Frequency (%)');\n", 200 | "ax1.legend();\n", 201 | "\n", 202 | "df_params['prcp_fac'].plot.hist(bins=51, density=True, ax=ax2, alpha=0.5, label='Frequency');\n", 203 | "df_params['prcp_fac'].plot.hist(bins=51, density=True, ax=ax2, weights=df_params['rgi_area_km2'], alpha=0.5, label='Area weighted');\n", 204 | "ax2.set_yscale('log')\n", 205 | "ax2.set_title('Precipitation factor distribution (log scale)');\n", 206 | "ax2.set_ylabel('Frequency (log scale)');" 207 | ] 208 | }, 209 | { 210 | "cell_type": "code", 211 | "execution_count": null, 212 | "id": "16", 213 | "metadata": {}, 214 | "outputs": [], 215 | "source": [ 216 | "print(f\"The precipitation factor median is {df_params['prcp_fac'].median():.1f}.\")\n", 217 | "print(f\"The 5% percentile is {df_params['prcp_fac'].quantile(0.05):.1f} and the 95% percentile is {df_params['prcp_fac'].quantile(0.95):.1f}\")" 218 | ] 219 | }, 220 | { 221 | "cell_type": "markdown", 222 | "id": "17", 223 | "metadata": {}, 224 | "source": [ 225 | "### Temperature bias" 226 | ] 227 | }, 228 | { 229 | "cell_type": "code", 230 | "execution_count": null, 231 | "id": "18", 232 | "metadata": {}, 233 | "outputs": [], 234 | "source": [ 235 | "f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))\n", 236 | "\n", 237 | "df_params['temp_bias'].plot.hist(bins=51, density=True, ax=ax1, alpha=0.5, label='Frequency');\n", 238 | "df_params['temp_bias'].plot.hist(bins=51, density=True, ax=ax1, weights=df_params['rgi_area_km2'], alpha=0.5, label='Area weighted');\n", 239 | "ax1.set_title('Temperature bias distribution (global)');\n", 240 | "ax1.set_ylabel('Frequency (%)');\n", 241 | "ax1.legend();\n", 242 | "\n", 243 | "df_params['temp_bias'].plot.hist(bins=51, density=True, ax=ax2, alpha=0.5, label='Frequency');\n", 244 | "df_params['temp_bias'].plot.hist(bins=51, density=True, ax=ax2, weights=df_params['rgi_area_km2'], alpha=0.5, label='Area weighted');\n", 245 | "ax2.set_yscale('log')\n", 246 | "ax2.set_title('Temperature bias distribution (log scale)');\n", 247 | "ax2.set_ylabel('Frequency (log scale)');" 248 | ] 249 | }, 250 | { 251 | "cell_type": "markdown", 252 | "id": "19", 253 | "metadata": {}, 254 | "source": [ 255 | "### Take home" 256 | ] 257 | }, 258 | { 259 | "cell_type": "markdown", 260 | "id": "20", 261 | "metadata": {}, 262 | "source": [ 263 | "- a substantial (33%) part of all glaciers are attributed the default melt factor of 5 after the first guesses in climate data bias correction. In other words, this means that we are substantially correcting the climate forcing to \"match\" the presence of a glacier. Other calibration methods are using similar techniques (they differ in the details and the allowed range of parameter values)\n", 264 | "- the large amount of glaciers with melt factor of exactly 5 is problematic, but is mitigated somewhat by the dynamical spinup (see below)\n", 265 | "- the largest bulk of the glacier area is calibrated with \"pre-informed\" precip factor and temperature bias, and have a calibrated melt factor. The resulting melt factor distribution is centered around 5 and has a long tail towards higher values.\n", 266 | "- in general, weighting the distributions by area tends to reduce the extremes." 267 | ] 268 | }, 269 | { 270 | "cell_type": "markdown", 271 | "id": "21", 272 | "metadata": {}, 273 | "source": [ 274 | "## Influence of dynamical spinup " 275 | ] 276 | }, 277 | { 278 | "cell_type": "markdown", 279 | "id": "22", 280 | "metadata": {}, 281 | "source": [ 282 | "The dynamical spinup procedure (explained in this [10 minutes tutorial](../10minutes/dynamical_spinup.ipynb) and in more detail in [this tutorial](dynamical_spinup.ipynb)) starts from the parameters calibrated above with a *static* geometry and calibrate the melt factor again using an iterative procedure, making sure that the parameters and the past evolution of the glacier are consistent with the past evolution of the glacier. In doing so, it achieves two things:\n", 283 | "- the *actually modelled* mass balance of glaciers during a dynamical run matches observations better than without\n", 284 | "- it reshuffles the melt factors a bit\n", 285 | "\n", 286 | "Let's test this second hypothesis by downloading the statistics for the spinup directories:" 287 | ] 288 | }, 289 | { 290 | "cell_type": "code", 291 | "execution_count": null, 292 | "id": "23", 293 | "metadata": {}, 294 | "outputs": [], 295 | "source": [ 296 | "# W5E5 elevbands, with spinup \n", 297 | "url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/elev_bands/W5E5_spinup/RGI62/b_160/L5/summary/'\n", 298 | "\n", 299 | "# this can take some time\n", 300 | "dfs = []\n", 301 | "for rgi_reg in range(1, 19):\n", 302 | " fpath = utils.file_downloader(url + f'glacier_statistics_{rgi_reg:02d}.csv')\n", 303 | " dfs.append(pd.read_csv(fpath, index_col=0, low_memory=False))\n", 304 | "dfs = pd.concat(dfs, sort=False).sort_index()" 305 | ] 306 | }, 307 | { 308 | "cell_type": "code", 309 | "execution_count": null, 310 | "id": "24", 311 | "metadata": {}, 312 | "outputs": [], 313 | "source": [ 314 | "df_params['melt_f_dyna'] = dfs['melt_f']" 315 | ] 316 | }, 317 | { 318 | "cell_type": "markdown", 319 | "id": "25", 320 | "metadata": {}, 321 | "source": [ 322 | "First of all, let's see how many glaciers have had their melt factor changed as a result of the dynamical calibration (i.e. dynamical calibration was succesful):" 323 | ] 324 | }, 325 | { 326 | "cell_type": "code", 327 | "execution_count": null, 328 | "id": "26", 329 | "metadata": {}, 330 | "outputs": [], 331 | "source": [ 332 | "df_params['dyna_changed'] = ~ np.isclose(df_params['melt_f'], df_params['melt_f_dyna'])\n", 333 | "perc = df_params['dyna_changed'].sum() / len(df_params) * 100\n", 334 | "perc_area = df_params.loc[df_params['dyna_changed']].rgi_area_km2.sum()/df.rgi_area_km2.sum()*100\n", 335 | "print(f'{perc:.1f}% of all glaciers are re calibrated after dynamical spinup ({perc_area:.1f}% area)')" 336 | ] 337 | }, 338 | { 339 | "cell_type": "markdown", 340 | "id": "27", 341 | "metadata": {}, 342 | "source": [ 343 | "Let's plot the change in distribution of the parameters:" 344 | ] 345 | }, 346 | { 347 | "cell_type": "code", 348 | "execution_count": null, 349 | "id": "28", 350 | "metadata": {}, 351 | "outputs": [], 352 | "source": [ 353 | "f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))\n", 354 | "bins = np.linspace(0.1, 18, 51)\n", 355 | "df_params['melt_f'].plot.hist(bins=bins, density=True, ax=ax1, alpha=0.5, label='Static');\n", 356 | "df_params['melt_f_dyna'].plot.hist(bins=bins, density=True, ax=ax1, alpha=0.5, label='Dynamic');\n", 357 | "ax1.set_title('Melt factor distribution (global)');\n", 358 | "ax1.set_ylabel('Frequency (%)');\n", 359 | "ax1.legend();\n", 360 | "\n", 361 | "df_params['melt_f'].plot.hist(bins=bins, density=True, ax=ax2, alpha=0.5, label='Static');\n", 362 | "df_params['melt_f_dyna'].plot.hist(bins=bins, density=True, ax=ax2, alpha=0.5, label='Dynamic');\n", 363 | "ax2.set_yscale('log')\n", 364 | "ax2.set_title('Melt factor distribution (log scale)');\n", 365 | "ax2.set_ylabel('Frequency (log scale)');" 366 | ] 367 | }, 368 | { 369 | "cell_type": "markdown", 370 | "id": "29", 371 | "metadata": {}, 372 | "source": [ 373 | "In which direction is the parameter changed?" 374 | ] 375 | }, 376 | { 377 | "cell_type": "code", 378 | "execution_count": null, 379 | "id": "30", 380 | "metadata": {}, 381 | "outputs": [], 382 | "source": [ 383 | "diff = df_params['melt_f_dyna'] - df_params['melt_f']\n", 384 | "f, (ax1, ax2) = plt.subplots(1, 2, figsize=(12, 5))\n", 385 | "bins = np.linspace(-5, 5, 51)\n", 386 | "diff.plot.hist(bins=bins, density=True, ax=ax1, alpha=0.5, label='Static');\n", 387 | "ax1.set_title('Melt factor change after spinup (global)');\n", 388 | "ax1.set_ylabel('Frequency (%)');\n", 389 | "ax1.legend();\n", 390 | "\n", 391 | "diff.plot.hist(bins=bins, density=True, ax=ax2, alpha=0.5, label='Static');\n", 392 | "ax2.set_yscale('log')\n", 393 | "ax2.set_title('Melt factor change after spinup (log scale)');\n", 394 | "ax2.set_ylabel('Frequency (log scale)');" 395 | ] 396 | }, 397 | { 398 | "cell_type": "markdown", 399 | "id": "31", 400 | "metadata": {}, 401 | "source": [ 402 | "### Take home\n", 403 | "\n", 404 | "- dynamical spinup redistributes melt factors \"for the best\", i.e. it increases a bit more the randomness of the melt factors around their central value" 405 | ] 406 | }, 407 | { 408 | "cell_type": "markdown", 409 | "id": "32", 410 | "metadata": { 411 | "jp-MarkdownHeadingCollapsed": true 412 | }, 413 | "source": [ 414 | "## What's next?\n", 415 | "\n", 416 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 417 | "- back to the [table of contents](../welcome.ipynb)" 418 | ] 419 | } 420 | ], 421 | "metadata": { 422 | "kernelspec": { 423 | "display_name": "Python 3 (ipykernel)", 424 | "language": "python", 425 | "name": "python3" 426 | }, 427 | "language_info": { 428 | "codemirror_mode": { 429 | "name": "ipython", 430 | "version": 3 431 | }, 432 | "file_extension": ".py", 433 | "mimetype": "text/x-python", 434 | "name": "python", 435 | "nbconvert_exporter": "python", 436 | "pygments_lexer": "ipython3", 437 | "version": "3.11.4" 438 | } 439 | }, 440 | "nbformat": 4, 441 | "nbformat_minor": 5 442 | } 443 | -------------------------------------------------------------------------------- /notebooks/tutorials/massbalance_perturbation.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Mass balance parameter perturbation experiments with OGGM" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "By now, we assume that you have read and run the [mass balance calibration tutorial](massbalance_calibration.ipynb). We know that it is a lot of new information to take in!\n", 15 | "\n", 16 | "In this notebook, we will:\n", 17 | "- re-iterate and discuss the important role that mass balance calibration plays in the projections\n", 18 | "- give you some tools to run parameter perturbation experiments (this will also illustrate useful aspects of the OGGM internals)\n", 19 | "- provide some keys about how to address the calibration process in your use case" 20 | ] 21 | }, 22 | { 23 | "cell_type": "markdown", 24 | "metadata": {}, 25 | "source": [ 26 | "## Set-up" 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": null, 32 | "metadata": {}, 33 | "outputs": [], 34 | "source": [ 35 | "import matplotlib.pyplot as plt\n", 36 | "import matplotlib\n", 37 | "import pandas as pd\n", 38 | "import xarray as xr\n", 39 | "import numpy as np\n", 40 | "import os\n", 41 | "\n", 42 | "import oggm\n", 43 | "from oggm import cfg, utils, workflow, tasks, graphics\n", 44 | "from oggm.core import massbalance\n", 45 | "from oggm.core.massbalance import mb_calibration_from_scalar_mb, mb_calibration_from_geodetic_mb, mb_calibration_from_wgms_mb" 46 | ] 47 | }, 48 | { 49 | "cell_type": "code", 50 | "execution_count": null, 51 | "metadata": {}, 52 | "outputs": [], 53 | "source": [ 54 | "cfg.initialize(logging_level='WARNING')\n", 55 | "cfg.PATHS['working_dir'] = utils.gettempdir(dirname='OGGM-calib-pertubation', reset=True)\n", 56 | "cfg.PARAMS['border'] = 80" 57 | ] 58 | }, 59 | { 60 | "cell_type": "markdown", 61 | "metadata": {}, 62 | "source": [ 63 | "We start from our two well known glaciers in the Austrian Alps, Kesselwandferner and Hintereisferner. But you can also choose any other other glacier, e.g. from [this list](https://github.com/OGGM/oggm-sample-data/blob/master/wgms/rgi_wgms_links_20220112.csv). " 64 | ] 65 | }, 66 | { 67 | "cell_type": "code", 68 | "execution_count": null, 69 | "metadata": {}, 70 | "outputs": [], 71 | "source": [ 72 | "# we start from preprocessing level 5\n", 73 | "base_url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/elev_bands/W5E5/'\n", 74 | "gdirs = workflow.init_glacier_directories(['RGI60-11.00787', 'RGI60-11.00897'], from_prepro_level=5, prepro_base_url=base_url)" 75 | ] 76 | }, 77 | { 78 | "cell_type": "markdown", 79 | "metadata": {}, 80 | "source": [ 81 | "## Changing the mass balance parameters for the mass balance model" 82 | ] 83 | }, 84 | { 85 | "cell_type": "markdown", 86 | "metadata": {}, 87 | "source": [ 88 | "We just downloaded the data. Let's have a look at the calibrated parameters for these glaciers:" 89 | ] 90 | }, 91 | { 92 | "cell_type": "code", 93 | "execution_count": null, 94 | "metadata": { 95 | "tags": [] 96 | }, 97 | "outputs": [], 98 | "source": [ 99 | "# Pick each glacier\n", 100 | "gdir_kwf = gdirs[0]\n", 101 | "gdir_hef = gdirs[1]" 102 | ] 103 | }, 104 | { 105 | "cell_type": "code", 106 | "execution_count": null, 107 | "metadata": { 108 | "tags": [] 109 | }, 110 | "outputs": [], 111 | "source": [ 112 | "gdir_hef.read_json('mb_calib')" 113 | ] 114 | }, 115 | { 116 | "cell_type": "code", 117 | "execution_count": null, 118 | "metadata": { 119 | "tags": [] 120 | }, 121 | "outputs": [], 122 | "source": [ 123 | "gdir_kwf.read_json('mb_calib')" 124 | ] 125 | }, 126 | { 127 | "cell_type": "markdown", 128 | "metadata": {}, 129 | "source": [ 130 | "These parameters are stored in a file called `mb_calib.json` in the glacier directory. This file is then read by the mass balance model when created:" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": null, 136 | "metadata": {}, 137 | "outputs": [], 138 | "source": [ 139 | "mbmod = massbalance.MonthlyTIModel(gdir_hef)\n", 140 | "mbmod.calib_params" 141 | ] 142 | }, 143 | { 144 | "cell_type": "markdown", 145 | "metadata": {}, 146 | "source": [ 147 | "Therefore, if you want to mess around with these parameters, \"all you have to do\" is to overwrite this file somehow, or create a new one and ask the mass balance model to read it instead of the default one. Let's do that:" 148 | ] 149 | }, 150 | { 151 | "cell_type": "code", 152 | "execution_count": null, 153 | "metadata": {}, 154 | "outputs": [], 155 | "source": [ 156 | "params = gdir_hef.read_json('mb_calib')\n", 157 | "params['melt_f'] = 7 # a new value\n", 158 | "gdir_hef.write_json(params, 'mb_calib', filesuffix='_perturbed') # write a new file, with perturbed parameters" 159 | ] 160 | }, 161 | { 162 | "cell_type": "markdown", 163 | "metadata": {}, 164 | "source": [ 165 | "We can read it in with:" 166 | ] 167 | }, 168 | { 169 | "cell_type": "code", 170 | "execution_count": null, 171 | "metadata": {}, 172 | "outputs": [], 173 | "source": [ 174 | "mbmod_perturbed = massbalance.MonthlyTIModel(gdir_hef, mb_params_filesuffix='_perturbed')\n", 175 | "mbmod_perturbed.calib_params" 176 | ] 177 | }, 178 | { 179 | "cell_type": "markdown", 180 | "metadata": {}, 181 | "source": [ 182 | "Just for fun, check what this means for the mass balance:" 183 | ] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": null, 188 | "metadata": {}, 189 | "outputs": [], 190 | "source": [ 191 | "h = np.linspace(2000, 3800, 80)\n", 192 | "mb_default = mbmod.get_annual_mb(h, year=1980) * cfg.SEC_IN_YEAR * cfg.PARAMS['ice_density']\n", 193 | "mb_perturbed = mbmod_perturbed.get_annual_mb(h, year=1980) * cfg.SEC_IN_YEAR * cfg.PARAMS['ice_density']\n", 194 | "plt.plot(mb_default, h);\n", 195 | "plt.plot(mb_perturbed, h);\n", 196 | "plt.xlabel('Mass balance (mm w.e)'); plt.ylabel('Elevation');" 197 | ] 198 | }, 199 | { 200 | "cell_type": "markdown", 201 | "metadata": {}, 202 | "source": [ 203 | "So far so good. But how to feed this into the heavy OGGM pipeline? Many OGGM users will be more familiar with the `run_*` entity tasks. These tasks \"hide\" the process of creating the mass balance model and therefore make it look like we cant's change anything internally.\n", 204 | "\n", 205 | "We *could* have added a mechanism to pass the `mb_params_filesuffix` from, for example, `run_random_climate`, to the underlying mass balance model (similar to the \"climate_input_filesuffix\" mechanism). We may add this one day, but for now I'd like to use this opportunity to demonstrate another possible mechanism: " 206 | ] 207 | }, 208 | { 209 | "cell_type": "code", 210 | "execution_count": null, 211 | "metadata": {}, 212 | "outputs": [], 213 | "source": [ 214 | "# So far so good: default run with the default mass balance\n", 215 | "tasks.run_random_climate(gdir_hef, y0=2000, nyears=100, seed=1, output_filesuffix='_default');" 216 | ] 217 | }, 218 | { 219 | "cell_type": "code", 220 | "execution_count": null, 221 | "metadata": {}, 222 | "outputs": [], 223 | "source": [ 224 | "# Let' create another \"mass balance model\" which is like the default one but with another default parameter\n", 225 | "from functools import partial\n", 226 | "PerturbedMassBalance = partial(massbalance.MonthlyTIModel, mb_params_filesuffix='_perturbed')\n", 227 | "\n", 228 | "# Pass it to the run task\n", 229 | "tasks.run_random_climate(gdir_hef, y0=2000, nyears=100, seed=1, mb_model_class=PerturbedMassBalance, output_filesuffix='_perturbed');" 230 | ] 231 | }, 232 | { 233 | "cell_type": "markdown", 234 | "metadata": {}, 235 | "source": [ 236 | "The partial function allows to create a function that is created by fixing a certain number of arguments of another function. Here we create a new \"class\" which is the same as the default original one, but by setting one parameters to another value. This proves very useful here, since we are just tricking OGGM into using the new one!\n", 237 | "\n", 238 | "Let's check the outcome:" 239 | ] 240 | }, 241 | { 242 | "cell_type": "code", 243 | "execution_count": null, 244 | "metadata": {}, 245 | "outputs": [], 246 | "source": [ 247 | "with xr.open_dataset(gdir_hef.get_filepath('model_diagnostics', filesuffix='_default')) as ds:\n", 248 | " ds_default = ds.load()\n", 249 | "with xr.open_dataset(gdir_hef.get_filepath('model_diagnostics', filesuffix='_perturbed')) as ds:\n", 250 | " ds_perturbed = ds.load()" 251 | ] 252 | }, 253 | { 254 | "cell_type": "code", 255 | "execution_count": null, 256 | "metadata": {}, 257 | "outputs": [], 258 | "source": [ 259 | "ds_default.volume_m3.plot(label='Default');\n", 260 | "ds_perturbed.volume_m3.plot(label='Perturbed');\n", 261 | "plt.legend();" 262 | ] 263 | }, 264 | { 265 | "cell_type": "markdown", 266 | "metadata": {}, 267 | "source": [ 268 | "Quite a big difference for \"just\" 2 units of melt factor more, from 5 to 7!" 269 | ] 270 | }, 271 | { 272 | "cell_type": "markdown", 273 | "metadata": {}, 274 | "source": [ 275 | "## More structured parameters perturbation experiments" 276 | ] 277 | }, 278 | { 279 | "cell_type": "markdown", 280 | "metadata": {}, 281 | "source": [ 282 | "OK, so let's say we want to do this \"at scale\". We actually had such an assignment recently for the PROTECT SLR project. We were asked to do a number of perturbed simulations with parameters diverging from their default values, for example +1 temp_bias everywhere. But how to do this, knowing that each glacier has a different temp_bias? We can't simply set the bias to 1 everywhere (we need +=1).\n", 283 | "\n", 284 | "For this I wrote a \"task\", originally outside of OGGM but that is now (v1.6.4) part of the main codebase. Let's have a look at it:\n", 285 | "\n", 286 | "\n", 287 | "```python\n", 288 | "\n", 289 | "@entity_task(log, writes=['mb_calib'])\n", 290 | "def perturbate_mb_params(gdir, perturbation=None, reset_default=False, filesuffix=''):\n", 291 | " \"\"\"Replaces pre-calibrated MB params with perturbed ones for this glacier.\n", 292 | "\n", 293 | " It simply replaces the existing `mb_calib.json` file with an\n", 294 | " updated one with perturbed parameters. The original ones\n", 295 | " are stored in the file for re-use after perturbation.\n", 296 | "\n", 297 | " Users can change the following 4 parameters:\n", 298 | " - 'melt_f': unit [kg m-2 day-1 K-1], the melt factor\n", 299 | " - 'prcp_fac': unit [-], the precipitation factor\n", 300 | " - 'temp_bias': unit [K], the temperature correction applied to the timeseries\n", 301 | " - 'bias': unit [mm we yr-1], *substracted* from the computed MB. Rarely used.\n", 302 | "\n", 303 | " All parameter perturbations are additive, i.e. the value\n", 304 | " provided by the user is added to the *precalibrated* value.\n", 305 | " For example, `temp_bias=1` means that the temp_bias used by the\n", 306 | " model will be the precalibrated one, plus 1 Kelvin.\n", 307 | "\n", 308 | " The only exception is prpc_fac, which is multiplicative.\n", 309 | " For example prcp_fac=1 will leave the precalibrated prcp_fac unchanged,\n", 310 | " while 2 will double it.\n", 311 | "\n", 312 | " Parameters\n", 313 | " ----------\n", 314 | " perturbation : dict\n", 315 | " the parameters to change and the associated value (see doc above)\n", 316 | " reset_default : bool\n", 317 | " reset the parameters to their original value. This might be\n", 318 | " unnecessary if using the filesuffix mechanism.\n", 319 | " filesuffix : str\n", 320 | " write the modified parameters in a separate mb_calib.json file\n", 321 | " with the filesuffix appended. This can then be read by the\n", 322 | " MassBalanceModel for example instead of the default one.\n", 323 | " Note that it's always the default, precalibrated params\n", 324 | " file which is read to start with.\n", 325 | " \"\"\"\n", 326 | " df = gdir.read_json('mb_calib')\n", 327 | "\n", 328 | " # Save original params if not there\n", 329 | " if 'bias_orig' not in df:\n", 330 | " for k in ['bias', 'melt_f', 'prcp_fac', 'temp_bias']:\n", 331 | " df[k + '_orig'] = df[k]\n", 332 | "\n", 333 | " if reset_default:\n", 334 | " for k in ['bias', 'melt_f', 'prcp_fac', 'temp_bias']:\n", 335 | " df[k] = df[k + '_orig']\n", 336 | " gdir.write_json(df, 'mb_calib', filesuffix=filesuffix)\n", 337 | " return df\n", 338 | "\n", 339 | " for k, v in perturbation.items():\n", 340 | " if k == 'prcp_fac':\n", 341 | " df[k] = df[k + '_orig'] * v\n", 342 | " elif k in ['bias', 'melt_f', 'temp_bias']:\n", 343 | " df[k] = df[k + '_orig'] + v\n", 344 | " else:\n", 345 | " raise InvalidParamsError(f'Perturbation not valid: {k}')\n", 346 | "\n", 347 | " gdir.write_json(df, 'mb_calib', filesuffix=filesuffix)\n", 348 | " return df\n", 349 | "```\n", 350 | "\n", 351 | "It's a fairly easy piece of code isn't it? Let's apply it in a latin hypercube parameter set where we change all parameters in a structured way:" 352 | ] 353 | }, 354 | { 355 | "cell_type": "code", 356 | "execution_count": null, 357 | "metadata": {}, 358 | "outputs": [], 359 | "source": [ 360 | "from scipy.stats import qmc\n", 361 | "sampler = qmc.LatinHypercube(d=3)\n", 362 | "sample = sampler.random(n=30)\n", 363 | "\n", 364 | "def log_scale_value(value, low, high):\n", 365 | " \"\"\"This is to sample multiplicative factors in log space to avoid assymetry (detail, but important).\"\"\"\n", 366 | " return 2**((np.log2(high) - np.log2(low))*value + np.log2(low))\n", 367 | "\n", 368 | "sample[:,0] = 4*sample[:,0] - 2 # DDF factor (melt_f): apply change [-2, +2] mm/(°C day)\n", 369 | "sample[:,1] = 2*sample[:,1] - 1 # temperature bias (temp_bias): apply change [-1, +1] °C\n", 370 | "sample[:,2] = log_scale_value(sample[:,2], 0.5, 2) # precipitation scaling factor (prcp_fac): apply scaling [0.5, 2] on log2\n", 371 | "\n", 372 | "params_df = pd.DataFrame(sample, columns=['melt_f', 'temp_bias', 'prcp_fac'])\n", 373 | "params_df.plot.scatter(x='temp_bias', y='prcp_fac');" 374 | ] 375 | }, 376 | { 377 | "cell_type": "code", 378 | "execution_count": null, 379 | "metadata": {}, 380 | "outputs": [], 381 | "source": [ 382 | "cfg.set_logging_config('CRITICAL') # shut down log output (bad!)\n", 383 | "cfg.PARAMS['continue_on_error'] = True\n", 384 | "\n", 385 | "for exp in range(len(params_df)):\n", 386 | " params = params_df.loc[exp]\n", 387 | " # clim_params = {k: params[k] for k in ('temp_bias', 'prcp_fac', 'melt_f')}\n", 388 | " exp = f'{exp:02d}'\n", 389 | " workflow.execute_entity_task(tasks.perturbate_mb_params, gdirs, perturbation=params, filesuffix=f'_{exp}')\n", 390 | "\n", 391 | " PerturbedMassBalance = partial(massbalance.MonthlyTIModel, mb_params_filesuffix=f'_{exp}')\n", 392 | " \n", 393 | " workflow.execute_entity_task(tasks.run_random_climate, gdirs, \n", 394 | " y0=2000,\n", 395 | " nyears=100,\n", 396 | " seed=1,\n", 397 | " mb_model_class=PerturbedMassBalance,\n", 398 | " output_filesuffix=f'_{exp}', # recognize the run for later\n", 399 | " );" 400 | ] 401 | }, 402 | { 403 | "cell_type": "code", 404 | "execution_count": null, 405 | "metadata": {}, 406 | "outputs": [], 407 | "source": [ 408 | "out_df = pd.DataFrame()\n", 409 | "for exp in range(len(params_df)):\n", 410 | " try:\n", 411 | " ds = utils.compile_run_output(gdirs, input_filesuffix=f'_{exp:02d}')\n", 412 | " if np.any(ds.volume.isnull()):\n", 413 | " continue\n", 414 | " out_df[f'{exp:02d}'] = ds.volume.sum(dim='rgi_id').to_series()\n", 415 | " except RuntimeError:\n", 416 | " pass" 417 | ] 418 | }, 419 | { 420 | "cell_type": "code", 421 | "execution_count": null, 422 | "metadata": {}, 423 | "outputs": [], 424 | "source": [ 425 | "out_df.plot(legend=False, color='k', alpha=0.5);" 426 | ] 427 | }, 428 | { 429 | "cell_type": "markdown", 430 | "metadata": {}, 431 | "source": [ 432 | "## Parameters perturbation experiments which match observations" 433 | ] 434 | }, 435 | { 436 | "cell_type": "markdown", 437 | "metadata": {}, 438 | "source": [ 439 | "The section above is nice, but works only for a problem setting where we don't ask the mass balance model to match observations. If we were to match obervations, things would be quite different! \n", 440 | "\n", 441 | "To do this, we could define a new task very much like the above, but this time realizing a calibration step before writing its solution down.\n", 442 | "\n", 443 | "This exercise is left to the reader ;-)" 444 | ] 445 | }, 446 | { 447 | "cell_type": "markdown", 448 | "metadata": {}, 449 | "source": [ 450 | "## What's next?\n", 451 | "\n", 452 | "- Check out the [massbalance_global_params.ipynb](massbalance_global_params.ipynb) notebook\n", 453 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 454 | "- back to the [table of contents](../welcome.ipynb)" 455 | ] 456 | } 457 | ], 458 | "metadata": { 459 | "hide_input": false, 460 | "kernelspec": { 461 | "display_name": "Python 3 (ipykernel)", 462 | "language": "python", 463 | "name": "python3" 464 | }, 465 | "language_info": { 466 | "codemirror_mode": { 467 | "name": "ipython", 468 | "version": 3 469 | }, 470 | "file_extension": ".py", 471 | "mimetype": "text/x-python", 472 | "name": "python", 473 | "nbconvert_exporter": "python", 474 | "pygments_lexer": "ipython3", 475 | "version": "3.12.4" 476 | }, 477 | "latex_envs": { 478 | "LaTeX_envs_menu_present": true, 479 | "autoclose": false, 480 | "autocomplete": true, 481 | "bibliofile": "biblio.bib", 482 | "cite_by": "apalike", 483 | "current_citInitial": 1, 484 | "eqLabelWithNumbers": true, 485 | "eqNumInitial": 1, 486 | "hotkeys": { 487 | "equation": "Ctrl-E", 488 | "itemize": "Ctrl-I" 489 | }, 490 | "labels_anchors": false, 491 | "latex_user_defs": false, 492 | "report_style_numbering": false, 493 | "user_envs_cfg": false 494 | }, 495 | "nbTranslate": { 496 | "displayLangs": [ 497 | "*" 498 | ], 499 | "hotkey": "alt-t", 500 | "langInMainMenu": true, 501 | "sourceLang": "en", 502 | "targetLang": "fr", 503 | "useGoogleTranslate": true 504 | }, 505 | "toc": { 506 | "base_numbering": 1, 507 | "nav_menu": {}, 508 | "number_sections": false, 509 | "sideBar": true, 510 | "skip_h1_title": true, 511 | "title_cell": "Table of Contents", 512 | "title_sidebar": "Contents", 513 | "toc_cell": false, 514 | "toc_position": {}, 515 | "toc_section_display": true, 516 | "toc_window_display": false 517 | } 518 | }, 519 | "nbformat": 4, 520 | "nbformat_minor": 4 521 | } 522 | -------------------------------------------------------------------------------- /notebooks/tutorials/numeric_solvers.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "id": "0", 6 | "metadata": {}, 7 | "source": [ 8 | "# Understand the difference between the ice dynamic solvers in OGGM" 9 | ] 10 | }, 11 | { 12 | "cell_type": "markdown", 13 | "id": "1", 14 | "metadata": {}, 15 | "source": [ 16 | "In version 1.6, OGGM changed the default numeric solver to the **Semi-Implicit** model. In this notebook, we explore the main differences compared to the old default, the **Flux-Based** model." 17 | ] 18 | }, 19 | { 20 | "cell_type": "code", 21 | "execution_count": null, 22 | "id": "2", 23 | "metadata": { 24 | "tags": [] 25 | }, 26 | "outputs": [], 27 | "source": [ 28 | "import time\n", 29 | "import xarray as xr\n", 30 | "import matplotlib.pyplot as plt\n", 31 | "from oggm import cfg, utils, workflow, graphics, tasks\n", 32 | "from oggm.core.flowline import FluxBasedModel, SemiImplicitModel" 33 | ] 34 | }, 35 | { 36 | "cell_type": "code", 37 | "execution_count": null, 38 | "id": "3", 39 | "metadata": { 40 | "tags": [] 41 | }, 42 | "outputs": [], 43 | "source": [ 44 | "# Initialize OGGM and set up the default run parameters\n", 45 | "cfg.initialize(logging_level='WARNING')\n", 46 | "\n", 47 | "# Define our test glacier (Baltoro)\n", 48 | "rgi_ids = ['RGI60-14.06794']\n", 49 | "\n", 50 | "# load elevation band representation\n", 51 | "cfg.PATHS['working_dir'] = utils.gettempdir('OGGM_dynamic_solvers_elevation_bands', reset=True)\n", 52 | "base_url_eb = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/elev_bands/W5E5/'\n", 53 | "gdir_eb = workflow.init_glacier_directories(rgi_ids, from_prepro_level=3, prepro_base_url=base_url_eb)[0]\n", 54 | "\n", 55 | "# load centerline representation\n", 56 | "cfg.PATHS['working_dir'] = utils.gettempdir('OGGM_dynamic_solvers_centerliens', reset=True)\n", 57 | "base_url_cl = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/centerlines/W5E5/'\n", 58 | "gdir_cl = workflow.init_glacier_directories(rgi_ids, from_prepro_level=3, prepro_base_url=base_url_cl)[0]" 59 | ] 60 | }, 61 | { 62 | "cell_type": "markdown", 63 | "id": "4", 64 | "metadata": {}, 65 | "source": [ 66 | "## Flux-Based model is more flexible, but unstable" 67 | ] 68 | }, 69 | { 70 | "cell_type": "markdown", 71 | "id": "5", 72 | "metadata": {}, 73 | "source": [ 74 | "The big advantage of the Flux-Based model is that it works for all flowline representations (multiple flowlines and different bed shapes). See the tutorial [\"elevation band\" and \"centerline\" flowlines](../tutorials/elevation_bands_vs_centerlines.ipynb) for a\n", 75 | "hands-on introduction to the different flowline types." 76 | ] 77 | }, 78 | { 79 | "cell_type": "code", 80 | "execution_count": null, 81 | "id": "6", 82 | "metadata": { 83 | "tags": [] 84 | }, 85 | "outputs": [], 86 | "source": [ 87 | "# run Flux-Based with centerlines\n", 88 | "tasks.run_random_climate(gdir_cl,\n", 89 | " evolution_model=FluxBasedModel,\n", 90 | " nyears=300,\n", 91 | " y0=2000,\n", 92 | " seed=0,\n", 93 | " store_fl_diagnostics=True,\n", 94 | " output_filesuffix='_flux_based')\n", 95 | "\n", 96 | "# plot result\n", 97 | "with xr.open_dataset(gdir_cl.get_filepath('model_diagnostics', filesuffix='_flux_based')) as ds:\n", 98 | " ds_trap = ds.load()\n", 99 | "ds_trap.volume_m3.plot();" 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "id": "7", 106 | "metadata": { 107 | "tags": [] 108 | }, 109 | "outputs": [], 110 | "source": [ 111 | "# run Flux-Based with elevation bands\n", 112 | "start_time = time.time() # time it for later comparision\n", 113 | "tasks.run_random_climate(gdir_eb,\n", 114 | " evolution_model=FluxBasedModel,\n", 115 | " nyears=300,\n", 116 | " y0=2000,\n", 117 | " seed=0,\n", 118 | " store_fl_diagnostics=True,\n", 119 | " output_filesuffix='_flux_based')\n", 120 | "flux_based_time = time.time() - start_time\n", 121 | "\n", 122 | "# plot result\n", 123 | "with xr.open_dataset(gdir_eb.get_filepath('model_diagnostics', filesuffix='_flux_based')) as ds:\n", 124 | " ds_flux_eb = ds.load()\n", 125 | "ds_flux_eb.volume_m3.plot();" 126 | ] 127 | }, 128 | { 129 | "cell_type": "markdown", 130 | "id": "8", 131 | "metadata": {}, 132 | "source": [ 133 | "Whereas the Semi-Impicit model only works for single trapezoidal flowlines (elevation bands)." 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "id": "9", 140 | "metadata": { 141 | "tags": [] 142 | }, 143 | "outputs": [], 144 | "source": [ 145 | "# run Semi-Implicit with centerlines raises an error \n", 146 | "# The code below would fail (expected)\n", 147 | "import pytest\n", 148 | "with pytest.raises(ValueError):\n", 149 | " tasks.run_random_climate(gdir_cl,\n", 150 | " evolution_model=SemiImplicitModel,\n", 151 | " y0=2000,\n", 152 | " seed=0,\n", 153 | " store_fl_diagnostics=True,\n", 154 | " output_filesuffix='_semi_implicit')" 155 | ] 156 | }, 157 | { 158 | "cell_type": "code", 159 | "execution_count": null, 160 | "id": "10", 161 | "metadata": { 162 | "tags": [] 163 | }, 164 | "outputs": [], 165 | "source": [ 166 | "# run Semi-Implicit with elevation bands\n", 167 | "start_time = time.time() # time it for later comparision\n", 168 | "tasks.run_random_climate(gdir_eb,\n", 169 | " evolution_model=SemiImplicitModel,\n", 170 | " nyears=300,\n", 171 | " y0=2000,\n", 172 | " seed=0,\n", 173 | " store_fl_diagnostics=True,\n", 174 | " output_filesuffix='_semi_implicit')\n", 175 | "semi_implicit_time = time.time() - start_time\n", 176 | "\n", 177 | "# plot result\n", 178 | "with xr.open_dataset(gdir_eb.get_filepath('model_diagnostics', filesuffix='_semi_implicit')) as ds:\n", 179 | " ds_impl_eb = ds.load()\n", 180 | "\n", 181 | "ds_impl_eb.volume_m3.plot(label='SemiImplicitModel', lw=4)\n", 182 | "ds_flux_eb.volume_m3.plot(label='FluxBasedModel')\n", 183 | "plt.legend();" 184 | ] 185 | }, 186 | { 187 | "cell_type": "markdown", 188 | "id": "11", 189 | "metadata": {}, 190 | "source": [ 191 | "You see that for the elevation band flowlines, both produce similar results. The differences arise from numeric instabilities in the Flux-Based model (see next paragraph). You can redo the experiment with a glacier where these instabilities are not that severe (e.g. RGI60-11.00897 Hintereisferner) and you will see both models produce the same result." 192 | ] 193 | }, 194 | { 195 | "cell_type": "markdown", 196 | "id": "12", 197 | "metadata": { 198 | "tags": [] 199 | }, 200 | "source": [ 201 | "## Semi-Implicit model is faster and more stable, but less flexible" 202 | ] 203 | }, 204 | { 205 | "cell_type": "markdown", 206 | "id": "13", 207 | "metadata": {}, 208 | "source": [ 209 | "Even the Semi-Implicit model is not as flexible as the Flux-Based one, we see it is faster when comparing the computing time:" 210 | ] 211 | }, 212 | { 213 | "cell_type": "code", 214 | "execution_count": null, 215 | "id": "14", 216 | "metadata": { 217 | "tags": [] 218 | }, 219 | "outputs": [], 220 | "source": [ 221 | "print(f'Semi-Implicit time needed: {semi_implicit_time:.1f} s')\n", 222 | "print(f'Flux-Based time needed: {flux_based_time:.1f} s')" 223 | ] 224 | }, 225 | { 226 | "cell_type": "markdown", 227 | "id": "15", 228 | "metadata": {}, 229 | "source": [ 230 | "For a single glacier, this speed-up is probably not that important, but when thinking about regional to global simulations it can save you a lot of time.\n", 231 | "\n", 232 | "One reason for the speed-up is that the Semi-Implicit model is numerically more stable and can take larger time steps without producing instabilities:" 233 | ] 234 | }, 235 | { 236 | "cell_type": "code", 237 | "execution_count": null, 238 | "id": "16", 239 | "metadata": { 240 | "tags": [] 241 | }, 242 | "outputs": [], 243 | "source": [ 244 | "# open flowline diagnostics\n", 245 | "f_impl = gdir_eb.get_filepath('fl_diagnostics', filesuffix='_semi_implicit')\n", 246 | "f_flux = gdir_eb.get_filepath('fl_diagnostics', filesuffix='_flux_based')\n", 247 | "with xr.open_dataset(f_impl, group=f'fl_0') as ds:\n", 248 | " ds_fl_impl = ds.load()\n", 249 | "with xr.open_dataset(f_flux, group=f'fl_0') as ds:\n", 250 | " ds_fl_flux = ds.load()\n", 251 | " \n", 252 | "# compare velocities along flowline\n", 253 | "year = 100\n", 254 | "ds_fl_impl.sel(time=year).ice_velocity_myr.plot(label='SemiImplicitModel')\n", 255 | "ds_fl_flux.sel(time=year).ice_velocity_myr.plot(label='FluxBasedModel')\n", 256 | "plt.legend();" 257 | ] 258 | }, 259 | { 260 | "cell_type": "markdown", 261 | "id": "17", 262 | "metadata": {}, 263 | "source": [ 264 | "In this case instabilities are visible for the FluxBasedModel at around 30 km distance along the flowline. They can lead to very large velocities which reduce the maximum possible step size due to the cfl-criterion (see also in the [documentation](https://docs.oggm.org/en/latest/faq.html#ice-velocities-in-oggm-are-sometimes-noisy-or-unrealistic-how-so)).\n", 265 | "\n", 266 | "The increased computational speed and, even more importantly, the increased stability are the reasons why we switched to the SemiImplicitModel in OGGM v1.6.\n", 267 | "\n", 268 | "However, if you want to set the FluxBasedModel as your default, you can do so with:" 269 | ] 270 | }, 271 | { 272 | "cell_type": "code", 273 | "execution_count": null, 274 | "id": "18", 275 | "metadata": { 276 | "tags": [] 277 | }, 278 | "outputs": [], 279 | "source": [ 280 | "cfg.PARAMS['evolution_model'] = 'FluxBased' # default is 'SemiImplicit'" 281 | ] 282 | }, 283 | { 284 | "cell_type": "markdown", 285 | "id": "19", 286 | "metadata": {}, 287 | "source": [ 288 | "## Have 5 minutes more? The bed shape of the downstream line" 289 | ] 290 | }, 291 | { 292 | "cell_type": "markdown", 293 | "id": "20", 294 | "metadata": {}, 295 | "source": [ 296 | "This paragraph deals with the downstream line, the initially ice-free part in front of the glacier. You can see it below as the red line connecting the end of the outline with the left border of the figure:" 297 | ] 298 | }, 299 | { 300 | "cell_type": "code", 301 | "execution_count": null, 302 | "id": "21", 303 | "metadata": { 304 | "tags": [] 305 | }, 306 | "outputs": [], 307 | "source": [ 308 | "graphics.plot_centerlines(gdir_cl,\n", 309 | " use_flowlines=True,\n", 310 | " add_downstream=True)" 311 | ] 312 | }, 313 | { 314 | "cell_type": "markdown", 315 | "id": "22", 316 | "metadata": {}, 317 | "source": [ 318 | "In OGGM before v1.6, with the FluxBasedModel, the shape of this downstream line was defined by fitting a parabola to the valley walls. However, for the SemiImplicitModel we had to change the shape to a trapezoidal, eventhough a parabola approximates a mountain valley arguably better. We checked the influence of this change on advancing glaciers and found negligibly small differences in the volume on a regional scale. There might be some differences in the area.\n", 319 | "\n", 320 | "By default, we use a trapezoidal bed shape for the downstream line:" 321 | ] 322 | }, 323 | { 324 | "cell_type": "code", 325 | "execution_count": null, 326 | "id": "23", 327 | "metadata": {}, 328 | "outputs": [], 329 | "source": [ 330 | "fl_trap = gdir_eb.read_pickle('model_flowlines')\n", 331 | "fl_trap[-1].is_trapezoid[fl_trap[-1].thick == 0]" 332 | ] 333 | }, 334 | { 335 | "cell_type": "markdown", 336 | "id": "24", 337 | "metadata": {}, 338 | "source": [ 339 | "But if for any reason you decided to use the FluxBasedModel you also can switch back to a parabolic downstream line using `cfg.PARAMS['downstream_line_shape'] = 'parabola'`." 340 | ] 341 | }, 342 | { 343 | "cell_type": "code", 344 | "execution_count": null, 345 | "id": "25", 346 | "metadata": { 347 | "tags": [] 348 | }, 349 | "outputs": [], 350 | "source": [ 351 | "# change the downstream line shape\n", 352 | "cfg.PARAMS['downstream_line_shape'] = 'parabola' # default is 'trapezoidal'\n", 353 | "\n", 354 | "# IMPORTANT: need to call init_present_time_glacier to take effect\n", 355 | "tasks.init_present_time_glacier(gdir_eb)\n", 356 | "\n", 357 | "fl_trap = gdir_eb.read_pickle('model_flowlines')\n", 358 | "fl_trap[-1].is_trapezoid[fl_trap[-1].thick == 0]" 359 | ] 360 | }, 361 | { 362 | "cell_type": "markdown", 363 | "id": "26", 364 | "metadata": {}, 365 | "source": [ 366 | "# What's next?" 367 | ] 368 | }, 369 | { 370 | "cell_type": "markdown", 371 | "id": "27", 372 | "metadata": {}, 373 | "source": [ 374 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 375 | "- back to the [table of contents](../welcome.ipynb)" 376 | ] 377 | } 378 | ], 379 | "metadata": { 380 | "kernelspec": { 381 | "display_name": "Python 3 (ipykernel)", 382 | "language": "python", 383 | "name": "python3" 384 | }, 385 | "language_info": { 386 | "codemirror_mode": { 387 | "name": "ipython", 388 | "version": 3 389 | }, 390 | "file_extension": ".py", 391 | "mimetype": "text/x-python", 392 | "name": "python", 393 | "nbconvert_exporter": "python", 394 | "pygments_lexer": "ipython3", 395 | "version": "3.11.4" 396 | } 397 | }, 398 | "nbformat": 4, 399 | "nbformat_minor": 5 400 | } 401 | -------------------------------------------------------------------------------- /notebooks/tutorials/preprocessing_errors.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Error analysis of the global pre-processing workflow" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "Here we reproduce the error analysis shown in [Maussion et al. (2019)](https://www.geosci-model-dev.net/12/909/2019/) **for the pre-processing part only, and for the glacier directories (version 1.6 and 1.4)**. The error analysis of user runs needs a separate handling, see the [deal_with_errors](../tutorials/deal_with_errors.ipynb) notebook for more information." 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "## Get the files" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "We download the `glacier_statistics` files from the preprocessed directories folders, at the level 5. That is, we are going to count all errors that happened during the pre-processing chain." 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": null, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "from oggm import utils\n", 38 | "import pandas as pd\n", 39 | "import seaborn as sns" 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "We will start with a preprocessed directory for OGGM v1.6 (W5E5 with centerlines):" 47 | ] 48 | }, 49 | { 50 | "cell_type": "code", 51 | "execution_count": null, 52 | "metadata": {}, 53 | "outputs": [], 54 | "source": [ 55 | "# W5E5 centerlines\n", 56 | "url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/centerlines/W5E5/RGI62/b_080/L5/summary/'" 57 | ] 58 | }, 59 | { 60 | "cell_type": "code", 61 | "execution_count": null, 62 | "metadata": {}, 63 | "outputs": [], 64 | "source": [ 65 | "# this can take some time\n", 66 | "df = []\n", 67 | "for rgi_reg in range(1, 19):\n", 68 | " fpath = utils.file_downloader(url + f'glacier_statistics_{rgi_reg:02d}.csv')\n", 69 | " df.append(pd.read_csv(fpath, index_col=0, low_memory=False))\n", 70 | "df = pd.concat(df, sort=False).sort_index()" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "metadata": {}, 76 | "source": [ 77 | "## Analyze the errors" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "sns.countplot(y=\"error_task\", data=df);" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "metadata": {}, 93 | "outputs": [], 94 | "source": [ 95 | "\"% area errors all sources: {:.2f}%\".format(df.loc[~df['error_task'].isnull()].rgi_area_km2.sum() / df.rgi_area_km2.sum() * 100)" 96 | ] 97 | }, 98 | { 99 | "cell_type": "code", 100 | "execution_count": null, 101 | "metadata": {}, 102 | "outputs": [], 103 | "source": [ 104 | "\"% failing glaciers all sources: {:.2f}%\".format(df.loc[~df['error_task'].isnull()].rgi_area_km2.count() / df.rgi_area_km2.count() * 100)" 105 | ] 106 | }, 107 | { 108 | "cell_type": "markdown", 109 | "metadata": {}, 110 | "source": [ 111 | "We now look at the errors that occur already before applying the climate tasks and the historical run (i.e., before `mb_calibration_from_scalar_mb` and `flowline_model_run_historical`):" 112 | ] 113 | }, 114 | { 115 | "cell_type": "code", 116 | "execution_count": null, 117 | "metadata": {}, 118 | "outputs": [], 119 | "source": [ 120 | "dfe = df.loc[~df['error_task'].isnull()]\n", 121 | "dfe = dfe.loc[~dfe['error_task'].isin(['mb_calibration_from_scalar_mb','flowline_model_run_historical'])]\n", 122 | "\"% area errors before climate: {:.2f}%\".format(dfe.rgi_area_km2.sum() / df.rgi_area_km2.sum() * 100)" 123 | ] 124 | }, 125 | { 126 | "cell_type": "code", 127 | "execution_count": null, 128 | "metadata": {}, 129 | "outputs": [], 130 | "source": [ 131 | "\"% failing glaciers all sources: {:.2f}%\".format(dfe.loc[~dfe['error_task'].isnull()].rgi_area_km2.count() / df.rgi_area_km2.count() * 100)" 132 | ] 133 | }, 134 | { 135 | "cell_type": "markdown", 136 | "metadata": {}, 137 | "source": [ 138 | "Although there are already many glaciers failing before the climate tasks, from a relative missing glacier area perspective, much less of the failing glacier area occurs. The reason is that the largest failing glaciers have mostly `flowline_model_run_historical` errors that only occur in the preprocessed directories level 4 or higher (after the climate tasks):" 139 | ] 140 | }, 141 | { 142 | "cell_type": "code", 143 | "execution_count": null, 144 | "metadata": {}, 145 | "outputs": [], 146 | "source": [ 147 | "# 15 largest glaciers\n", 148 | "df.loc[~df['error_task'].isnull()].sort_values(by='rgi_area_km2',\n", 149 | " ascending=False)[['rgi_area_km2', 'error_task',\n", 150 | " 'error_msg']].iloc[:15]" 151 | ] 152 | }, 153 | { 154 | "cell_type": "markdown", 155 | "metadata": {}, 156 | "source": [ 157 | "## Example error comparison\n" 158 | ] 159 | }, 160 | { 161 | "cell_type": "markdown", 162 | "metadata": {}, 163 | "source": [ 164 | "- **centerlines vs elevation bands**:" 165 | ] 166 | }, 167 | { 168 | "cell_type": "code", 169 | "execution_count": null, 170 | "metadata": {}, 171 | "outputs": [], 172 | "source": [ 173 | "# W5E5 elevation bands\n", 174 | "url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/L3-L5_files/2023.3/elev_bands/W5E5/RGI62/b_080/L5/summary/'\n", 175 | "# this can take some time\n", 176 | "df_elev = []\n", 177 | "# we don't look at RGI19 (Antarctic glaciers), because no climate available for CRU\n", 178 | "for rgi_reg in range(1, 19):\n", 179 | " fpath = utils.file_downloader(url + f'glacier_statistics_{rgi_reg:02d}.csv')\n", 180 | " df_elev.append(pd.read_csv(fpath, index_col=0, low_memory=False))\n", 181 | "df_elev = pd.concat(df_elev, sort=False).sort_index()" 182 | ] 183 | }, 184 | { 185 | "cell_type": "code", 186 | "execution_count": null, 187 | "metadata": {}, 188 | "outputs": [], 189 | "source": [ 190 | "rel_area_elev = df_elev.loc[~df_elev['error_task'].isnull()].rgi_area_km2.sum() / df_elev.rgi_area_km2.sum() * 100\n", 191 | "rel_area_cent = df.loc[~df['error_task'].isnull()].rgi_area_km2.sum() / df.rgi_area_km2.sum() * 100\n", 192 | "print(f'% area errors from all sources for elevation band flowlines is {rel_area_elev:.2f}%'+'\\n'\n", 193 | " f'compared to {rel_area_cent:.2f}% for centerlines with W5E5') " 194 | ] 195 | }, 196 | { 197 | "cell_type": "markdown", 198 | "metadata": {}, 199 | "source": [ 200 | "*much less errors occur when using elevation band flowlines than when using centerlines!*\n", 201 | "\n", 202 | "-> Reason: less *glacier_mask* errors! " 203 | ] 204 | }, 205 | { 206 | "cell_type": "code", 207 | "execution_count": null, 208 | "metadata": {}, 209 | "outputs": [], 210 | "source": [ 211 | "# you can check out the different error messages with that\n", 212 | "# but we only output the first 20 here\n", 213 | "df_elev.error_msg.dropna().unique()[:20]" 214 | ] 215 | }, 216 | { 217 | "cell_type": "markdown", 218 | "metadata": {}, 219 | "source": [ 220 | "- **Compare different climate datasets**\n", 221 | " - this works only when using OGGM v1.4 urls (comparing **CRU vs ERA5**)\n", 222 | " - in OGGM v1.6 (state: March 2023), only GSWP3_W5E5 exists" 223 | ] 224 | }, 225 | { 226 | "cell_type": "code", 227 | "execution_count": null, 228 | "metadata": {}, 229 | "outputs": [], 230 | "source": [ 231 | "# attention here OGGM_v1.4 is used and this is just for demonstration purposes how to compare\n", 232 | "# different preprocessed directories!\n", 233 | "# This is CRU + centerlines. But you can try CRU+elev_bands, or ERA5+elev_bands, etc!\n", 234 | "url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.4/L3-L5_files/CRU/centerlines/qc3/pcp2.5/no_match/RGI62/b_040/L5/summary/'\n", 235 | "# this can take some time\n", 236 | "df_cru_v14 = []\n", 237 | "# we don't look at RGI19 (Antarctic glaciers), because no climate available for CRU\n", 238 | "for rgi_reg in range(1, 19):\n", 239 | " fpath = utils.file_downloader(url + f'glacier_statistics_{rgi_reg:02d}.csv')\n", 240 | " df_cru_v14.append(pd.read_csv(fpath, index_col=0, low_memory=False))\n", 241 | "df_cru_v14 = pd.concat(df_cru_v14, sort=False).sort_index()" 242 | ] 243 | }, 244 | { 245 | "cell_type": "code", 246 | "execution_count": null, 247 | "metadata": {}, 248 | "outputs": [], 249 | "source": [ 250 | "# ERA5 uses a different precipitation factor in OGGM_v1.4\n", 251 | "url = 'https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.4/L3-L5_files/ERA5/centerlines/qc3/pcp1.6/no_match/RGI62/b_040/L5/summary/'\n", 252 | "# this can take some time\n", 253 | "df_era5_v14 = []\n", 254 | "# we don't look at RGI19 (Antarctic glaciers), because no climate available for CRU\n", 255 | "for rgi_reg in range(1, 19):\n", 256 | " fpath = utils.file_downloader(url + f'glacier_statistics_{rgi_reg:02d}.csv')\n", 257 | " df_era5_v14.append(pd.read_csv(fpath, index_col=0, low_memory=False))\n", 258 | "df_era5_v14 = pd.concat(df_era5_v14, sort=False).sort_index()" 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "execution_count": null, 264 | "metadata": {}, 265 | "outputs": [], 266 | "source": [ 267 | "rel_area_cent_era5_v14 = df_era5_v14.loc[~df_era5_v14['error_task'].isnull()].rgi_area_km2.sum() / df_era5_v14.rgi_area_km2.sum() * 100\n", 268 | "rel_area_cent_cru_v14 = df_cru_v14.loc[~df_cru_v14['error_task'].isnull()].rgi_area_km2.sum() / df_cru_v14.rgi_area_km2.sum() * 100\n", 269 | "print(f\"% area errors all sources for ERA5 is {rel_area_cent_era5_v14:.2f}% compared to {rel_area_cent_cru_v14:.2f}% for CRU\")\n", 270 | " " 271 | ] 272 | }, 273 | { 274 | "cell_type": "markdown", 275 | "metadata": {}, 276 | "source": [ 277 | "*more than three times less errors from the climate tasks occur when using ERA5 than when using CRU* !" 278 | ] 279 | }, 280 | { 281 | "cell_type": "markdown", 282 | "metadata": {}, 283 | "source": [ 284 | "- **Compare between OGGM versions (and climate datasets)**" 285 | ] 286 | }, 287 | { 288 | "cell_type": "code", 289 | "execution_count": null, 290 | "metadata": {}, 291 | "outputs": [], 292 | "source": [ 293 | "print('% area errors from all sources for centerlines is: \\n'+\n", 294 | " f'{rel_area_cent_cru_v14:.2f}% for CRU and OGGM_v14 \\n'+\n", 295 | " f'{rel_area_cent_era5_v14:.2f}% for ERA5 and OGGM_v14 \\n'+\n", 296 | " f'{rel_area_cent:.2f}% for W5E5 and OGGM_v16')\n", 297 | " " 298 | ] 299 | }, 300 | { 301 | "cell_type": "markdown", 302 | "metadata": {}, 303 | "source": [ 304 | "**Great, the most recent preprocessed directories create the least amount of failing glacier area** \n", 305 | "\n", 306 | "*This is either a result of the different applied climate, MB calibration or other changes in OGGM_v16. This could be checked more in details by looking into which tasks fail less.*" 307 | ] 308 | }, 309 | { 310 | "cell_type": "markdown", 311 | "metadata": {}, 312 | "source": [ 313 | "## What's next?\n", 314 | "\n", 315 | "- A more detailed analysis about the type, amount and relative failing glacier area (in total and per RGI region) can be found in [this error analysis jupyter notebook](https://nbviewer.org/urls/cluster.klima.uni-bremen.de/~lschuster/error_analysis/error_analysis_v1.ipynb?flush_cache=true). It also includes an error analysis for different [MB calibration and climate quality check methods](https://nbviewer.org/urls/cluster.klima.uni-bremen.de/~lschuster/error_analysis/error_analysis_v1.ipynb?flush_cache=true#Analysis-for-Level-5-pre-processing-directories!).\n", 316 | "- If you are interested in how the “common” non-failing glaciers differ in terms of historical volume change, total mass change and specific mass balance between different pre-processed glacier directories, you can check out [this jupyter notebook](https://nbviewer.org/urls/cluster.klima.uni-bremen.de/~lschuster/error_analysis/working_glacier_gdirs_comparison.ipynb?flush_cache=true).\n", 317 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 318 | "- back to the [table of contents](../welcome.ipynb)" 319 | ] 320 | } 321 | ], 322 | "metadata": { 323 | "hide_input": false, 324 | "kernelspec": { 325 | "display_name": "Python 3 (ipykernel)", 326 | "language": "python", 327 | "name": "python3" 328 | }, 329 | "language_info": { 330 | "codemirror_mode": { 331 | "name": "ipython", 332 | "version": 3 333 | }, 334 | "file_extension": ".py", 335 | "mimetype": "text/x-python", 336 | "name": "python", 337 | "nbconvert_exporter": "python", 338 | "pygments_lexer": "ipython3", 339 | "version": "3.11.4" 340 | }, 341 | "toc": { 342 | "base_numbering": 1, 343 | "nav_menu": {}, 344 | "number_sections": false, 345 | "sideBar": true, 346 | "skip_h1_title": true, 347 | "title_cell": "Table of Contents", 348 | "title_sidebar": "Contents", 349 | "toc_cell": false, 350 | "toc_position": {}, 351 | "toc_section_display": true, 352 | "toc_window_display": false 353 | } 354 | }, 355 | "nbformat": 4, 356 | "nbformat_minor": 4 357 | } 358 | -------------------------------------------------------------------------------- /notebooks/tutorials/run_with_a_spinup_and_gcm_data.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Run with a long spinup and GCM data\n", 8 | "\n", 9 | "The initial state of glaciers play a large role for the model output. In this example we illustrate how to “spinup” a glacier (e.g.: make them grow) before running over the period of interest. For this example we use climate data from the CESM Last Millennium Ensemble." 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "# Libs\n", 19 | "import matplotlib.pyplot as plt\n", 20 | "\n", 21 | "# Locals\n", 22 | "import oggm.cfg as cfg\n", 23 | "from oggm import tasks, utils, workflow\n", 24 | "from oggm.workflow import execute_entity_task\n", 25 | "from oggm.utils import get_demo_file" 26 | ] 27 | }, 28 | { 29 | "cell_type": "code", 30 | "execution_count": null, 31 | "metadata": {}, 32 | "outputs": [], 33 | "source": [ 34 | "# Initialize OGGM and set up the default run parameters\n", 35 | "cfg.initialize()\n", 36 | "\n", 37 | "# Local working directory (where OGGM will write its output)\n", 38 | "cfg.PATHS['working_dir'] = utils.gettempdir('OGGM_spinup_run')\n", 39 | "\n", 40 | "# Use multiprocessing?\n", 41 | "cfg.PARAMS['use_multiprocessing'] = False\n", 42 | "\n", 43 | "# This is necessary for spinup runs!\n", 44 | "cfg.PARAMS['store_model_geometry'] = True" 45 | ] 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": {}, 50 | "source": [ 51 | "Pre-processed directories are being used here." 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": {}, 58 | "outputs": [], 59 | "source": [ 60 | "# How many grid points around the glacier?\n", 61 | "# Make it large if you expect your glaciers to grow large\n", 62 | "cfg.PARAMS['border'] = 80\n", 63 | "\n", 64 | "# Go - initialize glacier directories\n", 65 | "# in OGGM v1.6 you have to explicitly indicate the url from where you want to start from\n", 66 | "# we will use here the elevation band flowlines which are much simpler than the centerlines\n", 67 | "base_url = ('https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/'\n", 68 | " 'L3-L5_files/2023.3/elev_bands/W5E5/')\n", 69 | "gdirs = workflow.init_glacier_directories(['RGI60-11.00897'], from_prepro_level=5,\n", 70 | " prepro_base_url=base_url)" 71 | ] 72 | }, 73 | { 74 | "cell_type": "markdown", 75 | "metadata": {}, 76 | "source": [ 77 | "Here the paths to the CESM-LME files are set. (The demo files that are being used in this example don't contain the whole last millennium, neither do they have the global coverage that they original files have. These demo files have been made for test purposes and to reduce the time it takes to run the example. If you use the demo files for a glacier outside the domain, you won't get an error. Instead the climate of the nearest point to the glacier that is available in the demo files will be used, which could be thousands of kilometers away.)" 78 | ] 79 | }, 80 | { 81 | "cell_type": "code", 82 | "execution_count": null, 83 | "metadata": {}, 84 | "outputs": [], 85 | "source": [ 86 | "# Additional climate file (CESM)\n", 87 | "cfg.PATHS['cesm_temp_file'] = get_demo_file('cesm.TREFHT.160001-200512'\n", 88 | " '.selection.nc')\n", 89 | "cfg.PATHS['cesm_precc_file'] = get_demo_file('cesm.PRECC.160001-200512'\n", 90 | " '.selection.nc')\n", 91 | "cfg.PATHS['cesm_precl_file'] = get_demo_file('cesm.PRECL.160001-200512'\n", 92 | " '.selection.nc')\n", 93 | "execute_entity_task(tasks.process_cesm_data, gdirs);" 94 | ] 95 | }, 96 | { 97 | "cell_type": "markdown", 98 | "metadata": {}, 99 | "source": [ 100 | "Here the CESM-LME data is being pre-processed. This process makes use of the delta method and uses scaled temperature anomalies by default (it is strongly recommended to use this default setting of scaling the temperature anomalies, unless you have very good reasons not to do so)." 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": null, 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "execute_entity_task(tasks.process_cesm_data, gdirs);" 110 | ] 111 | }, 112 | { 113 | "cell_type": "code", 114 | "execution_count": null, 115 | "metadata": {}, 116 | "outputs": [], 117 | "source": [ 118 | "# Run the last 200 years with the default starting point (current glacier)\n", 119 | "# and CESM data as input\n", 120 | "execute_entity_task(tasks.run_from_climate_data, gdirs,\n", 121 | " climate_filename='gcm_data',\n", 122 | " ys=1801, ye=2000,\n", 123 | " output_filesuffix='_no_spinup');" 124 | ] 125 | }, 126 | { 127 | "cell_type": "code", 128 | "execution_count": null, 129 | "metadata": {}, 130 | "outputs": [], 131 | "source": [ 132 | "# Run the spinup simulation: a rather \"cold\" climate with a cold temperature bias\n", 133 | "execute_entity_task(tasks.run_constant_climate, gdirs, y0 = 1965,\n", 134 | " nyears=100, bias=0, \n", 135 | " output_filesuffix='_spinup');\n", 136 | "# Run a past climate run based on this spinup\n", 137 | "execute_entity_task(tasks.run_from_climate_data, gdirs,\n", 138 | " climate_filename='gcm_data',\n", 139 | " ys=1801, ye=2000,\n", 140 | " init_model_filesuffix='_spinup',\n", 141 | " output_filesuffix='_with_spinup');" 142 | ] 143 | }, 144 | { 145 | "cell_type": "markdown", 146 | "metadata": {}, 147 | "source": [ 148 | "When starting from a spin-up, by default the last year of the spin-up is being used to initialize a glacier. With `init_model_yr`, you can select any other year from the spin-up as initial year. An important parameter here is ``cfg.PARAMS['store_model_geometry'] = True`` set above, which told OGGM to store these \"restart files\" during the run." 149 | ] 150 | }, 151 | { 152 | "cell_type": "code", 153 | "execution_count": null, 154 | "metadata": {}, 155 | "outputs": [], 156 | "source": [ 157 | "# Run a past climate run based on this spinup\n", 158 | "execute_entity_task(tasks.run_from_climate_data, gdirs,\n", 159 | " climate_filename='gcm_data',\n", 160 | " ys=1801, ye=2000, init_model_yr=50,\n", 161 | " init_model_filesuffix='_spinup',\n", 162 | " output_filesuffix='_with_spinup_50yr');" 163 | ] 164 | }, 165 | { 166 | "cell_type": "code", 167 | "execution_count": null, 168 | "metadata": {}, 169 | "outputs": [], 170 | "source": [ 171 | "# Compile output\n", 172 | "utils.compile_glacier_statistics(gdirs)\n", 173 | "ds1 = utils.compile_run_output(gdirs, input_filesuffix='_no_spinup')\n", 174 | "ds2 = utils.compile_run_output(gdirs, input_filesuffix='_with_spinup')\n", 175 | "ds3 = utils.compile_run_output(gdirs, input_filesuffix='_with_spinup_50yr')" 176 | ] 177 | }, 178 | { 179 | "cell_type": "code", 180 | "execution_count": null, 181 | "metadata": {}, 182 | "outputs": [], 183 | "source": [ 184 | "# Plot\n", 185 | "f, ax = plt.subplots(figsize=(9, 4))\n", 186 | "(ds1.volume.sum(dim='rgi_id') * 1e-9).plot(ax=ax, label='No spinup')\n", 187 | "(ds2.volume.sum(dim='rgi_id') * 1e-9).plot(ax=ax, label='With 100-yr spinup')\n", 188 | "(ds3.volume.sum(dim='rgi_id') * 1e-9).plot(ax=ax, label='With 50-yr spinup')\n", 189 | "ax.set_ylabel('Volume (km$^3$)')\n", 190 | "ax.set_xlabel('Year')\n", 191 | "ax.set_title('Hintereisferner volume under CESM-LME forcing')\n", 192 | "plt.legend()\n", 193 | "plt.tight_layout()" 194 | ] 195 | }, 196 | { 197 | "cell_type": "markdown", 198 | "metadata": {}, 199 | "source": [ 200 | "## What's next?\n", 201 | "\n", 202 | "- look on the tutorial of how a typical \"projection run\" (there CMIP5 and CMIP6) is done at [run_with_gcm.ipynb](../10minutes/run_with_gcm.ipynb)\n", 203 | "- return to the [OGGM documentation](https://docs.oggm.org)\n", 204 | "- back to the [table of contents](../welcome.ipynb)" 205 | ] 206 | }, 207 | { 208 | "cell_type": "code", 209 | "execution_count": null, 210 | "metadata": {}, 211 | "outputs": [], 212 | "source": [] 213 | } 214 | ], 215 | "metadata": { 216 | "kernelspec": { 217 | "display_name": "Python 3 (ipykernel)", 218 | "language": "python", 219 | "name": "python3" 220 | }, 221 | "language_info": { 222 | "codemirror_mode": { 223 | "name": "ipython", 224 | "version": 3 225 | }, 226 | "file_extension": ".py", 227 | "mimetype": "text/x-python", 228 | "name": "python", 229 | "nbconvert_exporter": "python", 230 | "pygments_lexer": "ipython3", 231 | "version": "3.12.4" 232 | } 233 | }, 234 | "nbformat": 4, 235 | "nbformat_minor": 4 236 | } 237 | -------------------------------------------------------------------------------- /notebooks/tutorials/store_and_compress_glacierdirs.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Storing glacier directories for later use" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "\"Glacier directories\" are the fundamental data structure used by OGGM. They allow to share data between runs, between the OGGM developers and users, and between users themselves. \n", 15 | "\n", 16 | "Glacier directories can also be confusing at times, and can contain a high number of files, making them hard to move between clusters or computers. This notebook explains how these directories are structured and how to store them for move and later use.\n", 17 | "\n", 18 | "The main use-cases documented by this notebook are:\n", 19 | "- pre-process a number of glacier directories\n", 20 | "- stop working, and then re-start again from the same location\n", 21 | "- stop working, store them and copy them to another storage, or move them to another machine\n", 22 | "- re-start from them on another machine / instance" 23 | ] 24 | }, 25 | { 26 | "cell_type": "code", 27 | "execution_count": null, 28 | "metadata": { 29 | "tags": [] 30 | }, 31 | "outputs": [], 32 | "source": [ 33 | "# Libs\n", 34 | "import os\n", 35 | "import shutil\n", 36 | "\n", 37 | "# Locals\n", 38 | "import oggm.cfg as cfg\n", 39 | "from oggm import utils, workflow, tasks, DEFAULT_BASE_URL" 40 | ] 41 | }, 42 | { 43 | "cell_type": "markdown", 44 | "metadata": {}, 45 | "source": [ 46 | "## The structure of the working directory" 47 | ] 48 | }, 49 | { 50 | "cell_type": "markdown", 51 | "metadata": {}, 52 | "source": [ 53 | "Let's open a new workflow for two glaciers:" 54 | ] 55 | }, 56 | { 57 | "cell_type": "code", 58 | "execution_count": null, 59 | "metadata": { 60 | "tags": [] 61 | }, 62 | "outputs": [], 63 | "source": [ 64 | "# Initialize OGGM and set up the default run parameters\n", 65 | "cfg.initialize(logging_level='WARNING')\n", 66 | "rgi_version = '62'\n", 67 | "cfg.PARAMS['border'] = 80\n", 68 | "\n", 69 | "# Local working directory (where OGGM will write its output)\n", 70 | "WORKING_DIR = utils.gettempdir('oggm_gdirs_wd', reset=True)\n", 71 | "cfg.PATHS['working_dir'] = WORKING_DIR\n", 72 | "\n", 73 | "# RGI glaciers: Hintereisferner and Kesselwandferner\n", 74 | "rgi_ids = utils.get_rgi_glacier_entities(['RGI60-11.00897', 'RGI60-11.00787'])\n", 75 | "\n", 76 | "# Go - get the pre-processed glacier directories\n", 77 | "base_url = ('https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/'\n", 78 | " 'L3-L5_files/2023.3/elev_bands/W5E5/')\n", 79 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_prepro_level=3, prepro_base_url=base_url)" 80 | ] 81 | }, 82 | { 83 | "cell_type": "markdown", 84 | "metadata": {}, 85 | "source": [ 86 | "Note that in OGGM v1.6 you have to explicitly indicate the url from where you want to start from, \n", 87 | "we will use here a preprocessed directory with elevation band flowlines and used W5E5 for calibration. In the future, [other preprocessed directories might exist](https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.6/) and you can use them by changing the base_url. " 88 | ] 89 | }, 90 | { 91 | "cell_type": "markdown", 92 | "metadata": {}, 93 | "source": [ 94 | "OGGM downloaded the pre-processed directories, stored the tar files in your cache, and extracted them in your working directory. But how is this working directory structured? Let's have a look:" 95 | ] 96 | }, 97 | { 98 | "cell_type": "code", 99 | "execution_count": null, 100 | "metadata": { 101 | "tags": [] 102 | }, 103 | "outputs": [], 104 | "source": [ 105 | "def file_tree_print(prepro_dir=False):\n", 106 | " # Just a utility function to show the dir structure and selected files\n", 107 | " print(\"cfg.PATHS['working_dir']/\")\n", 108 | " tab = ' '\n", 109 | " for dirname, dirnames, filenames in os.walk(cfg.PATHS['working_dir']):\n", 110 | " for subdirname in dirnames:\n", 111 | " print(tab + subdirname + '/')\n", 112 | " for filename in filenames:\n", 113 | " if '.tar' in filename and 'RGI' in filename:\n", 114 | " print(tab + filename)\n", 115 | " tab += ' '" 116 | ] 117 | }, 118 | { 119 | "cell_type": "code", 120 | "execution_count": null, 121 | "metadata": { 122 | "tags": [] 123 | }, 124 | "outputs": [], 125 | "source": [ 126 | "file_tree_print()" 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": {}, 132 | "source": [ 133 | "OK, so from the `WORKING_DIR`, OGGM creates a `per_glacier` folder (always) where the glacier directories are stored. In order to avoid a large cluttering of the folder (and for other reasons which become apparent later), the directories are organised in regional (here `RGI60-16`) and then in folders containing up to 1000 glaciers (here `RGI60-16.02`, i.e. for ids `RGI60-16.020000` to `RGI60-16.029999`).\n", 134 | "\n", 135 | "Our files are located in the final folders of this tree (not shown in the tree). For example:" 136 | ] 137 | }, 138 | { 139 | "cell_type": "code", 140 | "execution_count": null, 141 | "metadata": { 142 | "tags": [] 143 | }, 144 | "outputs": [], 145 | "source": [ 146 | "gdirs[0].get_filepath('dem').replace(WORKING_DIR, 'WORKING_DIR')" 147 | ] 148 | }, 149 | { 150 | "cell_type": "markdown", 151 | "metadata": {}, 152 | "source": [ 153 | "Let's add some steps to our workflow, for example a spinup run that we would like to store for later: " 154 | ] 155 | }, 156 | { 157 | "cell_type": "code", 158 | "execution_count": null, 159 | "metadata": { 160 | "tags": [] 161 | }, 162 | "outputs": [], 163 | "source": [ 164 | "# Run\n", 165 | "workflow.execute_entity_task(tasks.run_from_climate_data, gdirs, \n", 166 | " output_filesuffix='_spinup', # to use the files as input later on\n", 167 | " );" 168 | ] 169 | }, 170 | { 171 | "cell_type": "markdown", 172 | "metadata": {}, 173 | "source": [ 174 | "## Stop there and restart from the same spot " 175 | ] 176 | }, 177 | { 178 | "cell_type": "markdown", 179 | "metadata": {}, 180 | "source": [ 181 | "The glacier directories are on disk, and won't move away. This means that next time you'll open OGGM, from this notebook or another script, you can start from them again. The only steps you have to take:\n", 182 | "- set the working directory to the one you want to start from\n", 183 | "- initialize the working directories without arguments (or, faster, with the list of IDs)\n", 184 | "\n", 185 | "See for example:" 186 | ] 187 | }, 188 | { 189 | "cell_type": "code", 190 | "execution_count": null, 191 | "metadata": { 192 | "tags": [] 193 | }, 194 | "outputs": [], 195 | "source": [ 196 | "# Set the working dir correctly\n", 197 | "cfg.PATHS['working_dir'] = utils.gettempdir('oggm_gdirs_wd')\n", 198 | "\n", 199 | "# Go - re-open the pre-processed glacier directories from what's there\n", 200 | "gdirs = workflow.init_glacier_directories()" 201 | ] 202 | }, 203 | { 204 | "cell_type": "markdown", 205 | "metadata": {}, 206 | "source": [ 207 | "The step above can be quite slow (because OGGM has to parse quite some info from the directories). Better is to start from the list of glaciers you want to work with:" 208 | ] 209 | }, 210 | { 211 | "cell_type": "code", 212 | "execution_count": null, 213 | "metadata": { 214 | "tags": [] 215 | }, 216 | "outputs": [], 217 | "source": [ 218 | "# Go - re-open the pre-processed glacier directories from what's there but with the list of glaciers\n", 219 | "gdirs = workflow.init_glacier_directories(rgi_ids)" 220 | ] 221 | }, 222 | { 223 | "cell_type": "markdown", 224 | "metadata": {}, 225 | "source": [ 226 | "**!!!CAREFUL!!!** do **not** start from a preprocessed level (or from a tar file), or your local directories (which may contain new data) will be overwritten, i.e. `workflow.init_glacier_directories(rgi_ids, from_prepro_level=3, prepro_base_url=base_url)` will always start from the pre-processed, fresh state." 227 | ] 228 | }, 229 | { 230 | "cell_type": "markdown", 231 | "metadata": {}, 232 | "source": [ 233 | "## Store the single glacier directories into tar files" 234 | ] 235 | }, 236 | { 237 | "cell_type": "markdown", 238 | "metadata": {}, 239 | "source": [ 240 | "The `gdir_to_tar` task will compress each single glacier directory into the same folder per default (but you can actually also put the compressed files somewhere else, e.g. in a folder in your `$home`):" 241 | ] 242 | }, 243 | { 244 | "cell_type": "code", 245 | "execution_count": null, 246 | "metadata": { 247 | "tags": [] 248 | }, 249 | "outputs": [], 250 | "source": [ 251 | "utils.gdir_to_tar?" 252 | ] 253 | }, 254 | { 255 | "cell_type": "code", 256 | "execution_count": null, 257 | "metadata": { 258 | "tags": [] 259 | }, 260 | "outputs": [], 261 | "source": [ 262 | "workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=False);\n", 263 | "file_tree_print()" 264 | ] 265 | }, 266 | { 267 | "cell_type": "markdown", 268 | "metadata": {}, 269 | "source": [ 270 | "Most of the time, you will actually want to delete the orginal directories because they are not needed for this run anymore:" 271 | ] 272 | }, 273 | { 274 | "cell_type": "code", 275 | "execution_count": null, 276 | "metadata": { 277 | "tags": [] 278 | }, 279 | "outputs": [], 280 | "source": [ 281 | "workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=True);\n", 282 | "file_tree_print()" 283 | ] 284 | }, 285 | { 286 | "cell_type": "markdown", 287 | "metadata": {}, 288 | "source": [ 289 | "Now the original directories are gone, and the `gdirs` objects are useless (attempting to do anything with them will lead to an error).\n", 290 | "\n", 291 | "Since they are already available in the correct file structure, however, OGGM will know how to reconstruct them from the tar files if asked to:" 292 | ] 293 | }, 294 | { 295 | "cell_type": "code", 296 | "execution_count": null, 297 | "metadata": { 298 | "tags": [] 299 | }, 300 | "outputs": [], 301 | "source": [ 302 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_tar=True, delete_tar=True)\n", 303 | "file_tree_print()" 304 | ] 305 | }, 306 | { 307 | "cell_type": "markdown", 308 | "metadata": {}, 309 | "source": [ 310 | "These directories are now ready to be used again! To summarize: thanks to this first step, you already reduced the number of files to move around from N x M (where M is the number of files in each glacier directory) to N (where N is the number of glaciers).\n", 311 | "\n", 312 | "You can now move this working directory somewhere else, and in another OGGM run instance, simply start from them as shown above." 313 | ] 314 | }, 315 | { 316 | "cell_type": "markdown", 317 | "metadata": {}, 318 | "source": [ 319 | "## Bundle of directories" 320 | ] 321 | }, 322 | { 323 | "cell_type": "markdown", 324 | "metadata": {}, 325 | "source": [ 326 | "It turned out that the file structure above was a bit cumbersome to use, in particular for glacier directories that we wanted to share online. For this, we found it more convenient to bundle the directories into groups of 1000 glaciers. Fortunately, this is easy to do:" 327 | ] 328 | }, 329 | { 330 | "cell_type": "code", 331 | "execution_count": null, 332 | "metadata": { 333 | "tags": [] 334 | }, 335 | "outputs": [], 336 | "source": [ 337 | "utils.base_dir_to_tar?" 338 | ] 339 | }, 340 | { 341 | "cell_type": "code", 342 | "execution_count": null, 343 | "metadata": { 344 | "tags": [] 345 | }, 346 | "outputs": [], 347 | "source": [ 348 | "# Tar the individual ones first\n", 349 | "workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=True);\n", 350 | "# Then tar the bundles\n", 351 | "utils.base_dir_to_tar(WORKING_DIR, delete=True)\n", 352 | "file_tree_print()" 353 | ] 354 | }, 355 | { 356 | "cell_type": "markdown", 357 | "metadata": {}, 358 | "source": [ 359 | "Now, the glacier directories are bundled in a file at a higher level even. This is even more convenient to move around (less files), but is not a mandatory step. The nice part about this bundling is that you can still select individual glaciers, as we will see in the next section. In the meantime, you can do: " 360 | ] 361 | }, 362 | { 363 | "cell_type": "code", 364 | "execution_count": null, 365 | "metadata": { 366 | "tags": [] 367 | }, 368 | "outputs": [], 369 | "source": [ 370 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_tar=True)\n", 371 | "file_tree_print()" 372 | ] 373 | }, 374 | { 375 | "cell_type": "markdown", 376 | "metadata": {}, 377 | "source": [ 378 | "Which did the trick! Note that the bundled tar files are never deleted. This is why they are useful for another purpose explained in the next section: creating your own \"pre-processed directories\"." 379 | ] 380 | }, 381 | { 382 | "cell_type": "markdown", 383 | "metadata": {}, 384 | "source": [ 385 | "## Self-made pre-processed directories for \"restart\" workflows" 386 | ] 387 | }, 388 | { 389 | "cell_type": "markdown", 390 | "metadata": {}, 391 | "source": [ 392 | "This workflow is the one used by OGGM to prepare the preprocessed directories that many of you are using. It is a variant of the workflow above, the only difference being that the directories are re-started from a file which is located elsewhere than in the working directory:" 393 | ] 394 | }, 395 | { 396 | "cell_type": "code", 397 | "execution_count": null, 398 | "metadata": { 399 | "tags": [] 400 | }, 401 | "outputs": [], 402 | "source": [ 403 | "# Where to put the compressed dirs\n", 404 | "PREPRO_DIR = utils.get_temp_dir('prepro_dir')\n", 405 | "if os.path.exists(PREPRO_DIR):\n", 406 | " shutil.rmtree(PREPRO_DIR)\n", 407 | "\n", 408 | "# Lets start from a clean state\n", 409 | "# Beware! If you use `reset=True` in `utils.mkdir`, ALL DATA in this folder will be deleted! Use with caution!\n", 410 | "utils.mkdir(WORKING_DIR, reset=True)\n", 411 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_prepro_level=3, prepro_base_url=base_url)\n", 412 | "\n", 413 | "# Then tar the gdirs and bundle\n", 414 | "workflow.execute_entity_task(utils.gdir_to_tar, gdirs, delete=True)\n", 415 | "utils.base_dir_to_tar(delete=True)\n", 416 | "\n", 417 | "# Copy the outcome in a new directory: scratch folder, new machine, etc.\n", 418 | "shutil.copytree(os.path.join(WORKING_DIR, 'per_glacier'), PREPRO_DIR);" 419 | ] 420 | }, 421 | { 422 | "cell_type": "markdown", 423 | "metadata": {}, 424 | "source": [ 425 | "OK so this `PREPRO_DIR` directory is where the files will stay for longer now. You can start from there at wish with:" 426 | ] 427 | }, 428 | { 429 | "cell_type": "code", 430 | "execution_count": null, 431 | "metadata": { 432 | "tags": [] 433 | }, 434 | "outputs": [], 435 | "source": [ 436 | "# Lets start from a clean state\n", 437 | "utils.mkdir(WORKING_DIR, reset=True)\n", 438 | "# This needs https://github.com/OGGM/oggm/pull/1158 to work\n", 439 | "# It uses the files you prepared beforehand to start the dirs\n", 440 | "gdirs = workflow.init_glacier_directories(rgi_ids, from_tar=PREPRO_DIR)\n", 441 | "file_tree_print()" 442 | ] 443 | }, 444 | { 445 | "cell_type": "markdown", 446 | "metadata": { 447 | "tags": [] 448 | }, 449 | "source": [ 450 | "## What's next?\n", 451 | "\n", 452 | "- look at the [OGGM-Shop documentation](https://docs.oggm.org/en/stable/input-data.html#)\n", 453 | "- back to the [table of contents](../welcome.ipynb)" 454 | ] 455 | } 456 | ], 457 | "metadata": { 458 | "hide_input": false, 459 | "kernelspec": { 460 | "display_name": "Python 3 (ipykernel)", 461 | "language": "python", 462 | "name": "python3" 463 | }, 464 | "language_info": { 465 | "codemirror_mode": { 466 | "name": "ipython", 467 | "version": 3 468 | }, 469 | "file_extension": ".py", 470 | "mimetype": "text/x-python", 471 | "name": "python", 472 | "nbconvert_exporter": "python", 473 | "pygments_lexer": "ipython3", 474 | "version": "3.12.4" 475 | }, 476 | "latex_envs": { 477 | "LaTeX_envs_menu_present": true, 478 | "autoclose": false, 479 | "autocomplete": true, 480 | "bibliofile": "biblio.bib", 481 | "cite_by": "apalike", 482 | "current_citInitial": 1, 483 | "eqLabelWithNumbers": true, 484 | "eqNumInitial": 1, 485 | "hotkeys": { 486 | "equation": "Ctrl-E", 487 | "itemize": "Ctrl-I" 488 | }, 489 | "labels_anchors": false, 490 | "latex_user_defs": false, 491 | "report_style_numbering": false, 492 | "user_envs_cfg": false 493 | }, 494 | "nbTranslate": { 495 | "displayLangs": [ 496 | "*" 497 | ], 498 | "hotkey": "alt-t", 499 | "langInMainMenu": true, 500 | "sourceLang": "en", 501 | "targetLang": "fr", 502 | "useGoogleTranslate": true 503 | }, 504 | "toc": { 505 | "base_numbering": 1, 506 | "nav_menu": {}, 507 | "number_sections": false, 508 | "sideBar": true, 509 | "skip_h1_title": true, 510 | "title_cell": "Table of Contents", 511 | "title_sidebar": "Contents", 512 | "toc_cell": false, 513 | "toc_position": {}, 514 | "toc_section_display": true, 515 | "toc_window_display": false 516 | } 517 | }, 518 | "nbformat": 4, 519 | "nbformat_minor": 4 520 | } 521 | -------------------------------------------------------------------------------- /notebooks/tutorials/working_with_rgi.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Working with the RGI and prepare glaciers for a run" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "The glacier outlines obtained from the [Randolph Glacier Inventory](https://www.glims.org/RGI/) are the reference dataset for global and regional applications in OGGM. The current version supported by is V6, and OGGM ships with a slightly modified version which we called `62`. OGGM also supports RGI 7, but not yet for full modelling workflows." 15 | ] 16 | }, 17 | { 18 | "cell_type": "markdown", 19 | "metadata": {}, 20 | "source": [ 21 | "**Tags:** beginner, glacier-directory, workflow, RGI" 22 | ] 23 | }, 24 | { 25 | "cell_type": "markdown", 26 | "metadata": {}, 27 | "source": [ 28 | "## Find out the ID of a glacier" 29 | ] 30 | }, 31 | { 32 | "cell_type": "markdown", 33 | "metadata": {}, 34 | "source": [ 35 | "If there is a glacier you like, and you would like to know their RGI6 or RGI7 id, we recommend the [GLIMS glacier viewer](https://www.glims.org/maps/glims). To find the RGI6 ID, you'll need to tick the \"RGI6\" box in the map layers, then click on the glacier of your choice as illustrated below.\n", 36 | "\n", 37 | "![](../../img/show_viewer.gif)" 38 | ] 39 | }, 40 | { 41 | "cell_type": "markdown", 42 | "metadata": {}, 43 | "source": [ 44 | "## Download the glacier outlines " 45 | ] 46 | }, 47 | { 48 | "cell_type": "markdown", 49 | "metadata": {}, 50 | "source": [ 51 | " To download this version, simply do: " 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": null, 57 | "metadata": { 58 | "tags": [] 59 | }, 60 | "outputs": [], 61 | "source": [ 62 | "# this might take a couple of minutes!\n", 63 | "from oggm import utils\n", 64 | "utils.get_rgi_dir(version='62') # path to the data after download - for RGI7, you can use '70G' (for the glacier product) or '70C' (for the glacier complex product)" 65 | ] 66 | }, 67 | { 68 | "cell_type": "markdown", 69 | "metadata": {}, 70 | "source": [ 71 | "## Access a region file" 72 | ] 73 | }, 74 | { 75 | "cell_type": "markdown", 76 | "metadata": {}, 77 | "source": [ 78 | "The RGI is divided in 19 regions (and many more sub-regions, not plotted here):" 79 | ] 80 | }, 81 | { 82 | "cell_type": "markdown", 83 | "metadata": {}, 84 | "source": [ 85 | "![rgi-map](https://www.researchgate.net/profile/Tobias_Bolch/publication/264125572/figure/fig1/AS:295867740377088@1447551774164/First-order-regions-of-the-RGI-with-glaciers-shown-in-red-Region-numbers-are-those-of.png)\n", 86 | "*Source: [the RGI consortium](http://www.glims.org/RGI/randolph60.html)*" 87 | ] 88 | }, 89 | { 90 | "cell_type": "code", 91 | "execution_count": null, 92 | "metadata": { 93 | "tags": [] 94 | }, 95 | "outputs": [], 96 | "source": [ 97 | "fr = utils.get_rgi_region_file(11, version='62') # Central Europe" 98 | ] 99 | }, 100 | { 101 | "cell_type": "markdown", 102 | "metadata": {}, 103 | "source": [ 104 | "The RGI region files are [shapefiles](https://en.wikipedia.org/wiki/Shapefile), a vector format commonly used in GIS applications. The library of choice to read shapefiles in python is [geopandas](http://geopandas.org/):" 105 | ] 106 | }, 107 | { 108 | "cell_type": "code", 109 | "execution_count": null, 110 | "metadata": { 111 | "tags": [] 112 | }, 113 | "outputs": [], 114 | "source": [ 115 | "import geopandas as gpd\n", 116 | "gdf = gpd.read_file(fr)" 117 | ] 118 | }, 119 | { 120 | "cell_type": "markdown", 121 | "metadata": {}, 122 | "source": [ 123 | "## The RGI files and their attributes " 124 | ] 125 | }, 126 | { 127 | "cell_type": "markdown", 128 | "metadata": {}, 129 | "source": [ 130 | "The `gdf` variable is a `GeoDataFrame`, i.e. you can use most of the tools you know from pandas' `DataFrames`:" 131 | ] 132 | }, 133 | { 134 | "cell_type": "code", 135 | "execution_count": null, 136 | "metadata": { 137 | "tags": [] 138 | }, 139 | "outputs": [], 140 | "source": [ 141 | "len(gdf)" 142 | ] 143 | }, 144 | { 145 | "cell_type": "code", 146 | "execution_count": null, 147 | "metadata": { 148 | "tags": [] 149 | }, 150 | "outputs": [], 151 | "source": [ 152 | "gdf.head()" 153 | ] 154 | }, 155 | { 156 | "cell_type": "code", 157 | "execution_count": null, 158 | "metadata": { 159 | "tags": [] 160 | }, 161 | "outputs": [], 162 | "source": [ 163 | "gdf[['Area']].plot(kind='hist', bins=100, logy=True);" 164 | ] 165 | }, 166 | { 167 | "cell_type": "code", 168 | "execution_count": null, 169 | "metadata": { 170 | "tags": [] 171 | }, 172 | "outputs": [], 173 | "source": [ 174 | "gdf[['Aspect']].plot(kind='hist', bins=45);" 175 | ] 176 | }, 177 | { 178 | "cell_type": "markdown", 179 | "metadata": {}, 180 | "source": [ 181 | "## Selecting glaciers per attribute" 182 | ] 183 | }, 184 | { 185 | "cell_type": "markdown", 186 | "metadata": {}, 187 | "source": [ 188 | "You may want to select all glaciers in the subregion 2 (Pyrenees):" 189 | ] 190 | }, 191 | { 192 | "cell_type": "code", 193 | "execution_count": null, 194 | "metadata": { 195 | "tags": [] 196 | }, 197 | "outputs": [], 198 | "source": [ 199 | "gdf_sel = gdf.loc[gdf.O2Region == '2']" 200 | ] 201 | }, 202 | { 203 | "cell_type": "code", 204 | "execution_count": null, 205 | "metadata": { 206 | "tags": [] 207 | }, 208 | "outputs": [], 209 | "source": [ 210 | "'Glacier area in the Pyrenees: {} km2'.format(gdf_sel.Area.sum())" 211 | ] 212 | }, 213 | { 214 | "cell_type": "markdown", 215 | "metadata": {}, 216 | "source": [ 217 | "## Selecting glaciers in a basin " 218 | ] 219 | }, 220 | { 221 | "cell_type": "markdown", 222 | "metadata": {}, 223 | "source": [ 224 | "Let's use a file shipped with OGGM for a start: " 225 | ] 226 | }, 227 | { 228 | "cell_type": "code", 229 | "execution_count": null, 230 | "metadata": { 231 | "tags": [] 232 | }, 233 | "outputs": [], 234 | "source": [ 235 | "path = utils.get_demo_file('rofental_hydrosheds.shp')\n", 236 | "basin = gpd.read_file(path)" 237 | ] 238 | }, 239 | { 240 | "cell_type": "code", 241 | "execution_count": null, 242 | "metadata": { 243 | "tags": [] 244 | }, 245 | "outputs": [], 246 | "source": [ 247 | "basin.plot();" 248 | ] 249 | }, 250 | { 251 | "cell_type": "markdown", 252 | "metadata": {}, 253 | "source": [ 254 | "And select all glaciers within this shape:" 255 | ] 256 | }, 257 | { 258 | "cell_type": "code", 259 | "execution_count": null, 260 | "metadata": { 261 | "tags": [] 262 | }, 263 | "outputs": [], 264 | "source": [ 265 | "import shapely.geometry as shpg\n", 266 | "in_bas = [basin.geometry.contains(shpg.Point(x, y))[0] for\n", 267 | " (x, y) in zip(gdf.CenLon, gdf.CenLat)]\n", 268 | "gdf_sel = gdf.loc[in_bas]" 269 | ] 270 | }, 271 | { 272 | "cell_type": "code", 273 | "execution_count": null, 274 | "metadata": { 275 | "tags": [] 276 | }, 277 | "outputs": [], 278 | "source": [ 279 | "ax = basin.plot();\n", 280 | "gdf_sel.plot(ax=ax, edgecolor='k');" 281 | ] 282 | }, 283 | { 284 | "cell_type": "markdown", 285 | "metadata": {}, 286 | "source": [ 287 | "## Select glaciers by their ID " 288 | ] 289 | }, 290 | { 291 | "cell_type": "markdown", 292 | "metadata": {}, 293 | "source": [ 294 | "Each glacier in the RGI has a unique ID. It is sometimes difficult to find out which one, but some tools can help you out. For example, the [GLIMS viewer](https://www.glims.org/maps/glims) allows to select glaciers and then see their ID. For example, the Aletsch Glacier in the Swiss Alps:" 295 | ] 296 | }, 297 | { 298 | "cell_type": "code", 299 | "execution_count": null, 300 | "metadata": { 301 | "tags": [] 302 | }, 303 | "outputs": [], 304 | "source": [ 305 | "al = utils.get_rgi_glacier_entities(['RGI60-11.01450'], version='62')\n", 306 | "al.plot(edgecolor='k');" 307 | ] 308 | }, 309 | { 310 | "cell_type": "markdown", 311 | "metadata": {}, 312 | "source": [ 313 | "## Use the RGI files to start an OGGM run " 314 | ] 315 | }, 316 | { 317 | "cell_type": "markdown", 318 | "metadata": {}, 319 | "source": [ 320 | "RGI files can be given as input to OGGM to make a run:" 321 | ] 322 | }, 323 | { 324 | "cell_type": "code", 325 | "execution_count": null, 326 | "metadata": { 327 | "tags": [] 328 | }, 329 | "outputs": [], 330 | "source": [ 331 | "from oggm import cfg, workflow, tasks, DEFAULT_BASE_URL\n", 332 | "cfg.initialize(logging_level='WARNING')\n", 333 | "cfg.PARAMS['continue_on_error'] = True\n", 334 | "cfg.PARAMS['use_multiprocessing'] = True\n", 335 | "cfg.PARAMS['border'] = 80\n", 336 | "cfg.PATHS['working_dir'] = utils.gettempdir(dirname='OGGM-Rofental', reset=True)\n", 337 | "\n", 338 | "# Go - get the pre-processed glacier directories\n", 339 | "gdirs = workflow.init_glacier_directories(gdf_sel, prepro_base_url=DEFAULT_BASE_URL, from_prepro_level=5)" 340 | ] 341 | }, 342 | { 343 | "cell_type": "code", 344 | "execution_count": null, 345 | "metadata": { 346 | "tags": [] 347 | }, 348 | "outputs": [], 349 | "source": [ 350 | "workflow.execute_entity_task(tasks.run_random_climate, gdirs, nyears=100,\n", 351 | " y0=2009, halfsize=10, output_filesuffix='_2000')\n", 352 | "ds2000 = utils.compile_run_output(gdirs, input_filesuffix='_2000')" 353 | ] 354 | }, 355 | { 356 | "cell_type": "code", 357 | "execution_count": null, 358 | "metadata": {}, 359 | "outputs": [], 360 | "source": [ 361 | "ds2000.sum(dim='rgi_id').volume.plot();" 362 | ] 363 | }, 364 | { 365 | "cell_type": "markdown", 366 | "metadata": {}, 367 | "source": [ 368 | "This shows the summed up volume evolution of all glaciers of the Rofental basin." 369 | ] 370 | }, 371 | { 372 | "cell_type": "markdown", 373 | "metadata": { 374 | "tags": [] 375 | }, 376 | "source": [ 377 | "## What's next?\n", 378 | "\n", 379 | "- look at the [OGGM-Shop documentation](https://docs.oggm.org/en/stable/input-data.html#)\n", 380 | "- back to the [table of contents](../welcome.ipynb)" 381 | ] 382 | } 383 | ], 384 | "metadata": { 385 | "hide_input": false, 386 | "kernelspec": { 387 | "display_name": "Python 3 (ipykernel)", 388 | "language": "python", 389 | "name": "python3" 390 | }, 391 | "language_info": { 392 | "codemirror_mode": { 393 | "name": "ipython", 394 | "version": 3 395 | }, 396 | "file_extension": ".py", 397 | "mimetype": "text/x-python", 398 | "name": "python", 399 | "nbconvert_exporter": "python", 400 | "pygments_lexer": "ipython3", 401 | "version": "3.12.4" 402 | }, 403 | "toc": { 404 | "base_numbering": 1, 405 | "nav_menu": {}, 406 | "number_sections": false, 407 | "sideBar": true, 408 | "skip_h1_title": true, 409 | "title_cell": "Table of Contents", 410 | "title_sidebar": "Contents", 411 | "toc_cell": false, 412 | "toc_position": {}, 413 | "toc_section_display": true, 414 | "toc_window_display": false 415 | } 416 | }, 417 | "nbformat": 4, 418 | "nbformat_minor": 4 419 | } 420 | -------------------------------------------------------------------------------- /notebooks/welcome.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# OGGM tutorials\n", 8 | "\n", 9 | "If you are new to jupyter notebooks or to jupyterlab, we recommend to have a look at our [introduction to jupyter notebooks](https://edu.oggm.org/en/latest/notebooks_howto.html) first!\n", 10 | "\n", 11 | "If you are reading this from our webpage (https://oggm.org/tutorials), remember that each page displayed here is in fact a jupyter notebook! You can start an interactive version of these tutorials online with [MyBinder](https://docs.oggm.org/en/latest/cloud.html) by clicking on the \"launch button\" on the top right of this page (the little rocket 🚀).\n", 12 | "\n", 13 | "⚠️ You can access various versions of these tutorials ont the web:\n", 14 | "- [Stable](https://oggm.org/tutorials) (**the default**): the version of the tutorials working with the latest official release of OGGM\n", 15 | "- [Development version](https://tutorials.oggm.org/master): the version of the tutorials working with the most up-to-date version of OGGM on github (unreleased)\n", 16 | "- [v1.5.3](https://tutorials.oggm.org/v1.5.3/notebooks/welcome.html) (2022): legacy tutorials for the OGGM versions predating v1.6 and the mass-balance calibration overhaul.\n", 17 | "\n", 18 | "Note that you can download the notebooks for any of these versions on github by selecting a given branch: [stable](https://github.com/OGGM/tutorials/tree/stable), [master](https://github.com/OGGM/tutorials), [v1.5.3](https://github.com/OGGM/tutorials/tree/v1.5.3). Be aware of which versions you are using!" 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "Ready to go?\n", 26 | "\n", 27 | "## 10 minutes tutorials\n", 28 | "\n", 29 | "These new tutorials are designed to illustrate one single OGGM concept at a time. They are a good way to get started with OGGM, or for returning users to learn about new features!\n", 30 | "- 10 minutes to... [a preprocessed directory](10minutes/preprocessed_directories.ipynb) (**start with this tutorial if you are new to OGGM**)\n", 31 | "- 10 minutes to... [a glacier change projection with GCM data](10minutes/run_with_gcm.ipynb)\n", 32 | "- 10 minutes to... [OGGM as an accelerator for modelling and machine learning](10minutes/machine_learning.ipynb)\n", 33 | "- 10 minutes to... [the new dynamical spinup in OGGM v1.6](10minutes/dynamical_spinup.ipynb)" 34 | ] 35 | }, 36 | { 37 | "cell_type": "markdown", 38 | "metadata": {}, 39 | "source": [ 40 | "## OGGM workflow\n", 41 | "\n", 42 | "- [Working with the RGI files and prepare glaciers for a run](tutorials/working_with_rgi.ipynb)\n", 43 | "- [Storing glacier directories for later use](tutorials/store_and_compress_glacierdirs.ipynb)\n", 44 | "- [Dealing with errors after a run](tutorials/deal_with_errors.ipynb)\n", 45 | "- [Differences between the “elevation band” and “centerline” flowlines](tutorials/elevation_bands_vs_centerlines.ipynb)\n", 46 | "- [Step-by-Step guide to building preprocessed directories from scratch](tutorials/building_the_prepro_gdirs.ipynb)\n", 47 | "- [What's in my preprocessed directories? A full centerlines workflow, step by step](tutorials/full_prepro_workflow.ipynb)" 48 | ] 49 | }, 50 | { 51 | "cell_type": "markdown", 52 | "metadata": {}, 53 | "source": [ 54 | "## Mass balance\n", 55 | "\n", 56 | "- [Plotting the OGGM surface mass-balance, the ELA and AAR](tutorials/plot_mass_balance.ipynb)\n", 57 | "- [A look into the new mass balance calibration in OGGM v1.6](tutorials/massbalance_calibration.ipynb)\n", 58 | "- [Global distribution of the mass-balance model parameters](tutorials/massbalance_global_params.ipynb)\n", 59 | "- [Mass balance parameter perturbation experiments with OGGM](tutorials/massbalance_perturbation.ipynb)" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "metadata": {}, 65 | "source": [ 66 | "## Hydrological output\n", 67 | "\n", 68 | "- [Hydrological mass-balance output](tutorials/hydrological_output.ipynb)" 69 | ] 70 | }, 71 | { 72 | "cell_type": "markdown", 73 | "metadata": {}, 74 | "source": [ 75 | "## Dynamical runs\n", 76 | "\n", 77 | "- [Run with a long spinup and GCM data](tutorials/run_with_a_spinup_and_gcm_data.ipynb)\n", 78 | "- [Dynamic spinup and dynamic melt_f calibration for past simulations](tutorials/dynamical_spinup.ipynb)\n", 79 | "- [Understand the difference between the ice dynamic solvers in OGGM](tutorials/numeric_solvers.ipynb)\n", 80 | "- [\"Instructed OGGM\": running IGM within the OGGM workflow](tutorials/ioggm.ipynb)" 81 | ] 82 | }, 83 | { 84 | "cell_type": "markdown", 85 | "metadata": {}, 86 | "source": [ 87 | "## Ice thickness\n", 88 | "\n", 89 | "- [Ice thickness inversion](tutorials/inversion.ipynb)\n", 90 | "- [Dynamic model initialization using observed thickness data](tutorials/observed_thickness_with_dynamic_spinup.ipynb)" 91 | ] 92 | }, 93 | { 94 | "cell_type": "markdown", 95 | "metadata": {}, 96 | "source": [ 97 | "## Calving\n", 98 | "\n", 99 | "- [The Oerlemans & Nick frontal ablation parameterization in OGGM](tutorials/kcalving_parameterization.ipynb)" 100 | ] 101 | }, 102 | { 103 | "cell_type": "markdown", 104 | "metadata": {}, 105 | "source": [ 106 | "## OGGM shop and additional data\n", 107 | "\n", 108 | "- [OGGM-Shop and Glacier Directories in OGGM](tutorials/oggm_shop.ipynb)\n", 109 | "- [Using your our own glacier inventory with OGGM](tutorials/use_your_own_inventory.ipynb)\n", 110 | "- [Ingest gridded products such as ice velocity into OGGM](tutorials/ingest_gridded_data_on_flowlines.ipynb)\n", 111 | "- [Create local topography maps from different DEM sources with OGGM](tutorials/dem_sources.ipynb)\n", 112 | "- [Compare different DEMs for individual glaciers: RGI-TOPO for RGI v6.0](tutorials/rgitopo_rgi6.ipynb)\n", 113 | "- [RGI-TOPO for RGI 7.0](tutorials/rgitopo_rgi7.ipynb)" 114 | ] 115 | }, 116 | { 117 | "cell_type": "markdown", 118 | "metadata": {}, 119 | "source": [ 120 | "## Visualisation and post-processing\n", 121 | "\n", 122 | "- [Display glacier area and thickness changes on a grid](tutorials/distribute_flowline.ipynb)\n", 123 | "- [OGGM flowlines: where are they?](tutorials/where_are_the_flowlines.ipynb)\n", 124 | "- [Compute smoother centerlines for shapefile output](tutorials/centerlines_to_shape.ipynb)\n", 125 | "- [Error analysis of the global pre-processing workflow](tutorials/preprocessing_errors.ipynb)\n", 126 | "- [Merge, analyse and visualize OGGM GCM runs](tutorials/merge_gcm_runs_and_visualize.ipynb)\n", 127 | "- [Small overview of HoloViz capability of data exploration](tutorials/holoviz_intro.ipynb)" 128 | ] 129 | }, 130 | { 131 | "cell_type": "markdown", 132 | "metadata": {}, 133 | "source": [ 134 | "## Tutorials in (re-)construction\n", 135 | "\n", 136 | "- [Ice thickness inversion with frontal ablation](construction/inversion_with_frontal_ablation.ipynb)\n", 137 | "- [Filter the glacier length and area time series](construction/area_length_filter.ipynb)" 138 | ] 139 | }, 140 | { 141 | "cell_type": "markdown", 142 | "metadata": {}, 143 | "source": [ 144 | "Have fun learning OGGM!" 145 | ] 146 | }, 147 | { 148 | "cell_type": "markdown", 149 | "metadata": {}, 150 | "source": [ 151 | "**Package versions used to build this documentation:** " 152 | ] 153 | }, 154 | { 155 | "cell_type": "code", 156 | "execution_count": null, 157 | "metadata": {}, 158 | "outputs": [], 159 | "source": [ 160 | "# Package versions\n", 161 | "from oggm.utils import show_versions\n", 162 | "print(show_versions())" 163 | ] 164 | } 165 | ], 166 | "metadata": { 167 | "hide_input": false, 168 | "kernelspec": { 169 | "display_name": "Python 3 (ipykernel)", 170 | "language": "python", 171 | "name": "python3" 172 | }, 173 | "language_info": { 174 | "codemirror_mode": { 175 | "name": "ipython", 176 | "version": 3 177 | }, 178 | "file_extension": ".py", 179 | "mimetype": "text/x-python", 180 | "name": "python", 181 | "nbconvert_exporter": "python", 182 | "pygments_lexer": "ipython3", 183 | "version": "3.12.4" 184 | }, 185 | "latex_envs": { 186 | "LaTeX_envs_menu_present": true, 187 | "autoclose": false, 188 | "autocomplete": true, 189 | "bibliofile": "biblio.bib", 190 | "cite_by": "apalike", 191 | "current_citInitial": 1, 192 | "eqLabelWithNumbers": true, 193 | "eqNumInitial": 1, 194 | "hotkeys": { 195 | "equation": "Ctrl-E", 196 | "itemize": "Ctrl-I" 197 | }, 198 | "labels_anchors": false, 199 | "latex_user_defs": false, 200 | "report_style_numbering": false, 201 | "user_envs_cfg": false 202 | }, 203 | "nbTranslate": { 204 | "displayLangs": [ 205 | "*" 206 | ], 207 | "hotkey": "alt-t", 208 | "langInMainMenu": true, 209 | "sourceLang": "en", 210 | "targetLang": "fr", 211 | "useGoogleTranslate": true 212 | }, 213 | "toc": { 214 | "base_numbering": 1, 215 | "nav_menu": {}, 216 | "number_sections": false, 217 | "sideBar": true, 218 | "skip_h1_title": true, 219 | "title_cell": "Table of Contents", 220 | "title_sidebar": "Contents", 221 | "toc_cell": false, 222 | "toc_position": {}, 223 | "toc_section_display": true, 224 | "toc_window_display": false 225 | } 226 | }, 227 | "nbformat": 4, 228 | "nbformat_minor": 4 229 | } 230 | -------------------------------------------------------------------------------- /push.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | ghp-import -n -p -f _build/html 4 | git push origin master 5 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | jupyter-book 2 | rioxarray 3 | bokeh 4 | panel 5 | holoviews 6 | geoviews 7 | datashader 8 | colorcet 9 | pyviz-comms 10 | param 11 | hvplot 12 | tables 13 | seaborn 14 | numpy 15 | scipy 16 | pyproj 17 | geopandas 18 | scikit-learn 19 | oggm 20 | git+https://github.com/oggm/oggm-edu.git@master#egg=oggm_edu 21 | --------------------------------------------------------------------------------