├── .coveragerc
├── .dockerignore
├── .gitattributes
├── .github
└── workflows
│ └── pythonapp.yml
├── .gitignore
├── Dockerfile
├── LICENSE
├── MANIFEST.in
├── README.md
├── codecov.yml
├── docs-source
├── Makefile
└── source
│ ├── _static
│ ├── HMC Lab-dark.png
│ ├── HMC Lab.png
│ ├── HMC Lab.svg
│ ├── ajax-loader.gif
│ ├── basic.css
│ ├── comment-bright.png
│ ├── comment-close.png
│ ├── comment.png
│ ├── css
│ │ └── badge_only.css
│ ├── doctools.js
│ ├── documentation_options.js
│ ├── down-pressed.png
│ ├── down.png
│ ├── file.png
│ ├── fonts
│ │ ├── Lato
│ │ │ ├── lato-bold.eot
│ │ │ ├── lato-bold.ttf
│ │ │ ├── lato-bold.woff
│ │ │ ├── lato-bold.woff2
│ │ │ ├── lato-bolditalic.eot
│ │ │ ├── lato-bolditalic.ttf
│ │ │ ├── lato-bolditalic.woff
│ │ │ ├── lato-bolditalic.woff2
│ │ │ ├── lato-italic.eot
│ │ │ ├── lato-italic.ttf
│ │ │ ├── lato-italic.woff
│ │ │ ├── lato-italic.woff2
│ │ │ ├── lato-regular.eot
│ │ │ ├── lato-regular.ttf
│ │ │ ├── lato-regular.woff
│ │ │ └── lato-regular.woff2
│ │ ├── RobotoSlab
│ │ │ ├── roboto-slab-v7-bold.eot
│ │ │ ├── roboto-slab-v7-bold.ttf
│ │ │ ├── roboto-slab-v7-bold.woff
│ │ │ ├── roboto-slab-v7-bold.woff2
│ │ │ ├── roboto-slab-v7-regular.eot
│ │ │ ├── roboto-slab-v7-regular.ttf
│ │ │ ├── roboto-slab-v7-regular.woff
│ │ │ └── roboto-slab-v7-regular.woff2
│ │ ├── fontawesome-webfont.eot
│ │ ├── fontawesome-webfont.svg
│ │ ├── fontawesome-webfont.ttf
│ │ ├── fontawesome-webfont.woff
│ │ └── fontawesome-webfont.woff2
│ ├── jquery-3.2.1.js
│ ├── jquery.js
│ ├── js
│ │ ├── modernizr.min.js
│ │ └── theme.js
│ ├── language_data.js
│ ├── minus.png
│ ├── plus.png
│ ├── pygments.css
│ ├── searchtools.js
│ ├── underscore-1.3.1.js
│ ├── underscore.js
│ ├── up-pressed.png
│ ├── up.png
│ └── websupport.js
│ ├── _templates
│ ├── custom-class-template.rst
│ └── custom-module-template.rst
│ ├── api
│ ├── distributions
│ │ └── index.rst
│ ├── index.rst
│ ├── massmatrices
│ │ └── index.rst
│ ├── optimizers
│ │ └── index.rst
│ ├── samplers
│ │ └── index.rst
│ └── visualization
│ │ ├── index.rst
│ │ ├── marginal.rst
│ │ ├── marginal_grid.rst
│ │ └── visualize_2_dimensions.rst
│ ├── biblio.bib
│ ├── conf.py
│ ├── genindex.rst
│ ├── hmc.rst
│ ├── index-examples.rst
│ ├── index-tutorials.rst
│ ├── index.rst
│ ├── notebooks.rst
│ ├── notebooks
│ ├── examples
│ │ ├── Elastic 2d FWI.ipynb
│ │ ├── Locating quakes on Grimsvötn, Iceland.ipynb
│ │ ├── Sampling linear equations.ipynb
│ │ ├── Sampling sparse linear equations.ipynb
│ │ └── locatingquakes
│ │ │ └── data
│ │ │ └── aerial-snowcat-truck-people-grimsvotn.jpg
│ └── tutorials
│ │ ├── 0 - Getting started.ipynb
│ │ ├── 1 - Tuning Hamiltonian Monte Carlo.ipynb
│ │ ├── 2 - Separate priors per dimension.ipynb
│ │ ├── 3 - Creating your own inverse problem.ipynb
│ │ ├── 4 - Running parallel Markov chains.ipynb
│ │ └── 5 - Transforming parameters.ipynb
│ ├── py-modindex.rst
│ └── setup.rst
├── environment.yml
├── github_deploy_key.enc
├── hmclab
├── Distributions
│ ├── ElasticFullWaveform2D.py
│ ├── LayeredRayTracing2D.py
│ ├── LinearMatrix.py
│ ├── SourceLocation.py
│ ├── Transforms.py
│ ├── __init__.py
│ └── base.py
├── Helpers
│ ├── AppendNPY.py
│ ├── BetterABC.py
│ ├── CaptureStdout.py
│ ├── CustomExceptions.py
│ ├── InterfaceMKL.py
│ ├── Processing.py
│ ├── RandomMatrices.py
│ ├── Timers.py
│ └── __init__.py
├── MassMatrices.py
├── Optimizers.py
├── Samplers.py
├── Samples.py
├── Visualization.py
├── __init__.py
└── _version.py
├── notebooks
├── Home.ipynb
├── Tests.ipynb
├── __init__.py
├── examples
│ ├── Elastic 2d FWI.ipynb
│ ├── Locating quakes.ipynb
│ ├── Ray tomography adv.ipynb
│ ├── Ray tomography.ipynb
│ ├── Sampling linear equations.ipynb
│ ├── Sampling sparse linear equations.ipynb
│ ├── locatingquakes
│ │ ├── data
│ │ │ ├── .gitignore
│ │ │ ├── 10.jpg
│ │ │ ├── 10.picks.csv
│ │ │ ├── 12.jpg
│ │ │ ├── 12.picks.csv
│ │ │ ├── 15.jpg
│ │ │ ├── 15.picks.csv
│ │ │ ├── 19.jpg
│ │ │ ├── 19.picks.csv
│ │ │ ├── 2.jpg
│ │ │ ├── 2.picks.csv
│ │ │ ├── 21.jpg
│ │ │ ├── 21.picks.csv
│ │ │ ├── 25.jpg
│ │ │ ├── 25.picks.csv
│ │ │ ├── 5.jpg
│ │ │ ├── 5.picks.csv
│ │ │ ├── 54.jpg
│ │ │ ├── 54.picks.csv
│ │ │ ├── 6.jpg
│ │ │ ├── 6.picks.csv
│ │ │ ├── 9.jpg
│ │ │ ├── 9.picks.csv
│ │ │ ├── Data processing.ipynb
│ │ │ ├── aerial-snowcat-truck-people-grimsvotn.jpg
│ │ │ ├── allgps_channels.csv
│ │ │ ├── allgps_channels.xlsx
│ │ │ ├── xyz.csv
│ │ │ └── xyz_polar.csv
│ │ ├── helpers.py
│ │ └── samples
│ │ │ └── .gitignore
│ ├── raytomography
│ │ ├── traveltimes_ava1_p.csv
│ │ ├── traveltimes_ava2_p.csv
│ │ ├── traveltimes_ava2_s.csv
│ │ ├── traveltimes_ava3_p.csv
│ │ ├── traveltimes_ava3_s.csv
│ │ ├── traveltimes_ava4_p.csv
│ │ ├── traveltimes_ava4_s.csv
│ │ ├── traveltimes_ava5_p.csv
│ │ └── traveltimes_ava5_s.csv
│ └── settings.ini
└── tutorials
│ ├── 0 - Getting started.ipynb
│ ├── 1 - Tuning Hamiltonian Monte Carlo.ipynb
│ ├── 2 - Using different priors per dimension.ipynb
│ ├── 3 - Creating your own inverse problem.ipynb
│ ├── 4 - Running parallel Markov chains.ipynb
│ ├── 5 - Transforming parameters.ipynb
│ ├── 6 - Adaptive Mass Matrix.ipynb
│ ├── 7 - Alternative integrators.ipynb
│ ├── __init__.py
│ ├── bin_samples
│ └── .gitignore
│ └── reproducibility
├── pyproject.toml
├── requirements-dev.txt
├── requirements.txt
├── setup.cfg
├── setup.py
├── tests
├── configurations
│ └── default_testing_configuration.ini
├── conftest.py
├── test_break.py
├── test_copying.py
├── test_diagnostic_mode.py
├── test_distributions.py
├── test_elasticFWI.py
├── test_failed_sampling.py
├── test_himmelblau.py
├── test_integrators.py
├── test_linear_dense_full.py
├── test_linear_dense_simple.py
├── test_linear_sparse_full.py
├── test_linear_sparse_simple.py
├── test_mass_matrices.py
├── test_mkl.py
├── test_notebooks.py
├── test_optimization.py
├── test_pickling.py
├── test_samples.py
├── test_sampling.py
├── test_sampling_reproducibility.py
├── test_version.py
├── test_visual_samplers.py
└── test_visualizations.py
└── versioneer.py
/.coveragerc:
--------------------------------------------------------------------------------
1 | [report]
2 | exclude_lines =
3 | pragma: no cover
4 | @abstract
5 | _AbstractMethodError()
6 |
7 | [run]
8 | omit =
9 | setup.py
10 | versioneer.py
11 | hmclab/_version.py
12 | hmclab/Distributions/ElasticFullWaveform2D.py
13 | hmclab/Helpers/InterfaceMKL.py
14 | hmclab/Helpers/CaptureStdout.py
15 | tests/test_elasticFWI.py
16 |
--------------------------------------------------------------------------------
/.dockerignore:
--------------------------------------------------------------------------------
1 | **/.git
2 | **/Dockerfile
3 | hmclab.egg-info
4 | dist
5 | build
6 | docs
7 | docs-source
8 | **/*.h5
9 | **/*.npy
10 |
11 | !notebooks/examples/locatingquakes/data/data_array.npy
12 | !notebooks/examples/locatingquakes/data/snr_array.npy
13 | !notebooks/examples/locatingquakes/data/uncertainty_array.npy
14 | !notebooks/examples/locatingquakes/data/csv_files.npy
--------------------------------------------------------------------------------
/.gitattributes:
--------------------------------------------------------------------------------
1 | hmclab/_version.py export-subst
2 |
--------------------------------------------------------------------------------
/.github/workflows/pythonapp.yml:
--------------------------------------------------------------------------------
1 | # This workflow will install Python dependencies, run tests and lint with a single version of Python
2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions
3 |
4 | name: Python application
5 |
6 | on:
7 | push:
8 | branches: [master]
9 | pull_request:
10 | branches: [master]
11 |
12 | jobs:
13 | build:
14 | runs-on: ubuntu-latest
15 | strategy:
16 | matrix:
17 | python-version: [3.9, 3.10]
18 | steps:
19 | - uses: actions/checkout@v2
20 | - name: Set up Python ${{ matrix.python-version }}
21 | uses: actions/setup-python@v2
22 | with:
23 | python-version: ${{ matrix.python-version }}
24 | - name: Install dependencies
25 | run: |
26 | python -m pip install --upgrade pip
27 | pip install -e .[dev]
28 | - name: Test with pytest and generate coverage
29 | run: |
30 | pytest --runxfail --cov=./ --cov-report=xml
31 | - name: Upload coverage to Codecov
32 | uses: codecov/codecov-action@v2
33 | with:
34 | token: ${{ secrets.CODECOV_TOKEN }}
35 | env_vars: OS,PYTHON
36 | fail_ci_if_error: true
37 | flags: unittests
38 |
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | docs
2 | .idea
3 | .vscode
4 | venv-hmc-tomography/
5 | output*
6 | build
7 | dist
8 | hmc_tomography_*
9 | *.egg-info
10 | *__pycache__
11 | .coverage
12 | .coverage.*
13 | *.h5
14 | *.npy
15 | *.npz
16 | examples/notebooks/bin_samples/*
17 | *_movie
18 | docs-source/source/examples
19 | *.ipynb_checkpoints
20 | *.NOGIT
21 | .pytest_cache/
22 | **/_autosummary
23 | coverage.xml
24 | .DS_Store
25 | linux*/
26 | *.pkl
--------------------------------------------------------------------------------
/Dockerfile:
--------------------------------------------------------------------------------
1 | FROM jupyter/datascience-notebook:python-3.10.9
2 | WORKDIR /home/jovyan/
3 | RUN mkdir /home/jovyan/hmclab
4 |
5 | # Add Python files from this project
6 | ADD --chown=jovyan:users . /home/jovyan/hmclab
7 |
8 | # Install required prerequisites for Python version.
9 | RUN pip install -e /home/jovyan/hmclab
10 | RUN pip install psvWave==0.2.1
11 |
12 | # Change the startup location of the notebook server.
13 | WORKDIR /home/jovyan/hmclab/notebooks/
14 |
15 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Copyright 2019-2023 LARS GEBRAAD, ANDREAS FICHTNER, ANDREA ZUNINO
2 |
3 | Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
4 |
5 | 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
6 |
7 | 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
8 |
9 | 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission.
10 |
11 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
--------------------------------------------------------------------------------
/MANIFEST.in:
--------------------------------------------------------------------------------
1 | include versioneer.py
2 | include hmclab/_version.py
3 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # HMC Lab
2 | ## Andrea Zunino, Lars Gebraad, Andreas Fichtner
3 |
4 | [](https://codecov.io/gh/larsgeb/hmclab) [](https://opensource.org/licenses/BSD-3-Clause) [](https://github.com/psf/black) [](https://github.com/larsgeb/hmclab/releases/latest)
5 |
6 | **HMC Lab** is a numerical laboratory for research in Bayesian seismology, written in Python and Julia. Jump to [Docker one-command setup](#docker-one-command-setup).
7 |
8 | - **Website:** https://hmclab.science
9 | - **Python documentation:** https://python.hmclab.science
10 | - **Source code:** https://github.com/larsgeb/hmclab
11 | - **Docker image:** https://hub.docker.com/repository/docker/larsgebraad/hmclab
12 | - **Bug reports:** https://github.com/larsgeb/hmclab/issues
13 |
14 | It provides all the ingredients to set up probabilistic (and deterministic) inverse
15 | problems, appraise them, and analyse them. This includes a plethora of prior
16 | distributions, different physical modelling modules and various MCMC (and
17 | other) algorithms.
18 |
19 | In particular it provides prior distributions, physics and appraisal algorithms.
20 |
21 | **Prior distributions:**
22 | - Normal
23 | - Laplace
24 | - Uniform
25 | - Arbitrary composites of other priors
26 | - Bayes rule
27 | - User supplied distributions
28 |
29 | **Physics:**
30 | - Linear equations
31 | - Straight ray tomography
32 | - 3d source location
33 | - 2d elastic full-waveform inversion
34 | - User supplied physics
35 |
36 | **Algorithms:**
37 | - Hamiltonian Monte Carlo (and variations)
38 | - Random Walk Metropolis Hastings
39 | - Stein Variational Gradient Descent
40 | - Gradient descent
41 | - Interfaces to non-linear optimization methods from SciPy
42 | - Animated versions of various algorithms
43 |
44 | # Docker one-command setup
45 |
46 | To get staerting with the tutorial and example notebooks, one can use a single command
47 | in Docker. This will pull a Docker image based on the Jupyter Datascience stack. The
48 | final container is approximately 5GB.
49 |
50 | ```bash
51 | docker run -p 9123:9123 larsgebraad/hmclab \
52 | start-notebook.sh --NotebookApp.token='hmclab' \
53 | --NotebookApp.port='9123' --LabApp.default_url='/lab/tree/Home.ipynb'
54 | ```
55 |
56 | Then either copy-past the link from your terminal, or navigate manually to [http://127.0.0.1:9123/lab/tree/Home.ipynb?token=hmclab](http://127.0.0.1:9123/lab/tree/Home.ipynb?token=hmclab).
57 |
58 | # Online tutorial notebooks
59 |
60 | All tutorial notebooks can also be accessed online in a non-interactive fashion. Simply
61 | use https://python.hmclab.science or use the following links:
62 |
63 | **Tutorials:**
64 |
65 | - [Getting started.ipynb](notebooks/tutorials/0%20-%20Getting%20started.ipynb)
66 | - [Tuning Hamiltonian Monte Carlo.ipynb](notebooks/tutorials/1%20-%20Tuning%20Hamiltonian%20Monte%20Carlo.ipynb)
67 | - [Separate priors per dimension.ipynb](notebooks/tutorials/2%20-%20Separate%20priors%20per%20dimension.ipynb)
68 | - [Creating your own inverse problem.ipynb](notebooks/tutorials/3%20-%20Creating%20your%20own%20inverse%20problem.ipynb)
69 | - [Running parallel Markov chains.ipynb](notebooks/tutorials/4%20-%20Running%20parallel%20Markov%20chains.ipynb)
70 |
71 | **Demos:**
72 |
73 | - [Sampling linear equations](notebooks/examples/Sampling%20linear%20equations.ipynb)
74 | - [Sampling sparse linear equations](notebooks/examples/Sampling%20sparse%20linear%20equations.ipynb)
75 | - [Locating quakes on Grimsvötn, Iceland](notebooks/examples/Locating%20quakes%20on%20Grimsvötn%2C%20Iceland.ipynb)
76 | - [Elastic 2d FWI](notebooks/examples/Elastic%202d%20FWI.ipynb)
77 |
78 |
79 | # The long way around: installing the package on your system
80 |
81 | For full installation instructions, including creating a proper Python environment, [see the installation instructions](https://python.hmclab.science/setup.html).
82 |
83 | Start with making sure that you have HDF5 or h5py installed properly.
84 |
85 | Directly to your environment:
86 |
87 | ```
88 | pip install -e git+git@github.com:larsgeb/hmclab.git@master#egg=hmclab
89 | ```
90 |
91 | From the project root directory:
92 |
93 | ```
94 | pip install -e .
95 | ```
96 |
97 | ### Development dependencies
98 |
99 | If you want to develop within this repo, we recommend a few extra packages. They can also be installed using pip.
100 |
101 | In Bash:
102 |
103 | ```
104 | pip install -e git+git@github.com:larsgeb/hmclab.git@master#egg=hmclab[dev] # from github repo
105 | pip install -e .[dev] # from local clone
106 | ```
107 |
108 | ... or Zsh (which requires escapes for brackets):
109 |
110 | ```
111 | pip install -e git+git@github.com:larsgeb/hmclab.git@master#egg=hmclab\[dev\] # from github repo
112 | pip install -e .\[dev\] # from local clone
113 | ```
114 |
115 |
--------------------------------------------------------------------------------
/codecov.yml:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/codecov.yml
--------------------------------------------------------------------------------
/docs-source/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 | SHELL := '/bin/bash'
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SOURCEDIR = source
8 | BUILDDIR = ../docs/build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 |
17 |
18 | # Catch-all target: route all unknown targets to Sphinx using the new
19 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
20 | %: Makefile
21 | rm -rf source/notebooks
22 | rm -rf ../docs/*
23 | mkdir source/notebooks
24 | mkdir source/notebooks/tutorials
25 | mkdir source/notebooks/examples
26 | # mkdir source/notebooks/inverse_problems
27 | cp -r ../notebooks/tutorials/*.ipynb source/notebooks/tutorials
28 | cp -r ../notebooks/examples/*.ipynb source/notebooks/examples
29 | mkdir -p source/notebooks/examples/locatingquakes/data/
30 | cp ../notebooks/examples/locatingquakes/data/aerial-snowcat-truck-people-grimsvotn.jpg source/notebooks/examples/locatingquakes/data/aerial-snowcat-truck-people-grimsvotn.jpg
31 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
32 |
--------------------------------------------------------------------------------
/docs-source/source/_static/HMC Lab-dark.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/HMC Lab-dark.png
--------------------------------------------------------------------------------
/docs-source/source/_static/HMC Lab.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/HMC Lab.png
--------------------------------------------------------------------------------
/docs-source/source/_static/HMC Lab.svg:
--------------------------------------------------------------------------------
1 |
2 |
3 |
4 |
93 |
--------------------------------------------------------------------------------
/docs-source/source/_static/ajax-loader.gif:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/ajax-loader.gif
--------------------------------------------------------------------------------
/docs-source/source/_static/comment-bright.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/comment-bright.png
--------------------------------------------------------------------------------
/docs-source/source/_static/comment-close.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/comment-close.png
--------------------------------------------------------------------------------
/docs-source/source/_static/comment.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/comment.png
--------------------------------------------------------------------------------
/docs-source/source/_static/css/badge_only.css:
--------------------------------------------------------------------------------
1 | .fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../fonts/fontawesome-webfont.eot");src:url("../fonts/fontawesome-webfont.eot?#iefix") format("embedded-opentype"),url("../fonts/fontawesome-webfont.woff") format("woff"),url("../fonts/fontawesome-webfont.ttf") format("truetype"),url("../fonts/fontawesome-webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-book:before{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}
2 |
--------------------------------------------------------------------------------
/docs-source/source/_static/documentation_options.js:
--------------------------------------------------------------------------------
1 | var DOCUMENTATION_OPTIONS = {
2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
3 | VERSION: '',
4 | LANGUAGE: 'None',
5 | COLLAPSE_INDEX: false,
6 | FILE_SUFFIX: '.html',
7 | HAS_SOURCE: true,
8 | SOURCELINK_SUFFIX: '.txt',
9 | NAVIGATION_WITH_KEYS: false,
10 | };
--------------------------------------------------------------------------------
/docs-source/source/_static/down-pressed.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/down-pressed.png
--------------------------------------------------------------------------------
/docs-source/source/_static/down.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/down.png
--------------------------------------------------------------------------------
/docs-source/source/_static/file.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/file.png
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-bold.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-bold.eot
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-bold.ttf
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-bold.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-bold.woff
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-bold.woff2
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-bolditalic.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-bolditalic.eot
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-bolditalic.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-bolditalic.ttf
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-bolditalic.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-bolditalic.woff
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-bolditalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-bolditalic.woff2
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-italic.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-italic.eot
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-italic.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-italic.ttf
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-italic.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-italic.woff
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-italic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-italic.woff2
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-regular.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-regular.eot
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-regular.ttf
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-regular.woff
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/Lato/lato-regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/Lato/lato-regular.woff2
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/fontawesome-webfont.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/fontawesome-webfont.eot
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/fontawesome-webfont.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/fontawesome-webfont.ttf
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/fontawesome-webfont.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/fontawesome-webfont.woff
--------------------------------------------------------------------------------
/docs-source/source/_static/fonts/fontawesome-webfont.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/_static/fonts/fontawesome-webfont.woff2
--------------------------------------------------------------------------------
/docs-source/source/_static/js/theme.js:
--------------------------------------------------------------------------------
1 | /* sphinx_rtd_theme version 0.4.3 | MIT license */
2 | /* Built 20190212 16:02 */
3 | require=function r(s,a,l){function c(e,n){if(!a[e]){if(!s[e]){var i="function"==typeof require&&require;if(!n&&i)return i(e,!0);if(u)return u(e,!0);var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}var o=a[e]={exports:{}};s[e][0].call(o.exports,function(n){return c(s[e][1][n]||n)},o,o.exports,r,s,a,l)}return a[e].exports}for(var u="function"==typeof require&&require,n=0;n"),i("table.docutils.footnote").wrap(""),i("table.docutils.citation").wrap(""),i(".wy-menu-vertical ul").not(".simple").siblings("a").each(function(){var e=i(this);expand=i(''),expand.on("click",function(n){return t.toggleCurrent(e),n.stopPropagation(),!1}),e.prepend(expand)})},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),i=e.find('[href="'+n+'"]');if(0===i.length){var t=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(i=e.find('[href="#'+t.attr("id")+'"]')).length&&(i=e.find('[href="#"]'))}0this.docHeight||(this.navBar.scrollTop(i),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",function(){this.linkScroll=!1})},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current"),e.siblings().find("li.current").removeClass("current"),e.find("> ul li.current").removeClass("current"),e.toggleClass("current")}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:e.exports.ThemeNav,StickyNav:e.exports.ThemeNav}),function(){for(var r=0,n=["ms","moz","webkit","o"],e=0;e $ git clone https://github.com/larsgeb/hmclab.git
22 | > $ cd hmclab
23 | > $ conda env create -f environment.yml
24 | > $ conda activate hmclab
25 | > $ pip install -e .
26 |
27 | The resulting Conda environment should be able to run all notebooks found in
28 | hmclab/notebooks. See the installation page for more detailed instructions.
29 |
30 |
31 | .. toctree::
32 | :maxdepth: 1
33 | :caption: Contents:
34 | :hidden:
35 |
36 | self
37 | hmc
38 | setup
39 | notebooks
40 | api/index
41 | py-modindex
42 | genindex
43 |
44 |
45 | .. centered:: Andrea Zunino, Andreas Fichtner, Lars Gebraad
46 |
47 |
48 |
--------------------------------------------------------------------------------
/docs-source/source/notebooks.rst:
--------------------------------------------------------------------------------
1 | Notebooks: Examples and tutorials
2 | =================================
3 |
4 | All these tutorials are generated from Jupyter Notebooks that can be found in the
5 | project folder :code:`/notebooks`.
6 |
7 | If you don't understand a particular piece of inverse theory related to sampling, write
8 | us a message (issue) on the GitHub repo and we will try to throw a tutorial together.
9 |
10 | `Tutorials:`
11 |
12 | .. toctree::
13 | :maxdepth: 1
14 |
15 | index-tutorials
16 | index-examples
17 |
18 |
19 |
20 |
--------------------------------------------------------------------------------
/docs-source/source/notebooks/examples/locatingquakes/data/aerial-snowcat-truck-people-grimsvotn.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/docs-source/source/notebooks/examples/locatingquakes/data/aerial-snowcat-truck-people-grimsvotn.jpg
--------------------------------------------------------------------------------
/docs-source/source/py-modindex.rst:
--------------------------------------------------------------------------------
1 | ############
2 | Module index
3 | ############
4 |
--------------------------------------------------------------------------------
/docs-source/source/setup.rst:
--------------------------------------------------------------------------------
1 | Installation
2 | ============
3 |
4 | Installing the :code:`hmclab` package is dead-simple. It requires you to have
5 | Python 3.7 on your system. Using either virtual environments or :code:`Conda` is
6 | recommended.
7 |
8 | Quick installation
9 | ******************
10 |
11 | Run the following inside your local copy of the source code:
12 |
13 | .. code-block:: bash
14 |
15 | > $ conda env create -f environment.yml
16 | > $ conda activate hmclab
17 | > $ pip install -e .
18 |
19 | After this you can use the code anywhere on your system, or start a Jupyter Notebook
20 | server and have a look at our examples.
21 |
22 | Installing the package
23 | **********************
24 |
25 | Activate your environment of choice. This can be your system Python, a virtualenv, Conda
26 | environment, etc.
27 |
28 | Environment setup
29 | ^^^^^^^^^^^^^^^^^
30 |
31 | To create e.g. a new :code:`Conda` environment with the appropriate
32 | Python version and dependencies, run the following from your terminal:
33 |
34 | .. code-block:: bash
35 |
36 | > $ git clone https://github.com/larsgeb/hmclab.git
37 | > $ conda create -n hmclab python=3.8
38 | > $ conda activate hmclab
39 | > $ conda install h5py # Needed, as Pip itself can't install hdf5
40 |
41 | You can also create this environment byu first downloading this repo, and running the
42 | following command inside of it:
43 |
44 | .. code-block:: bash
45 |
46 | > $ conda env create -f environment.yml
47 | > $ conda activate hmclab
48 |
49 | This does exactly the same as the previous commands.
50 |
51 | Installing the package
52 | ^^^^^^^^^^^^^^^^^^^^^^
53 |
54 | There's at the moment three ways to install the package:
55 |
56 | 1. Install the code directly from GitHub;
57 | 2. Clone the GitHub repo and install from that directory.
58 | 3. Download the :code:`.zip` file of the repo, unzip, and install from that directory.
59 |
60 |
61 | Installation option 1
62 | ---------------------
63 |
64 | Option one simply requires you to run the following command from your shell (with the
65 | appropriate environment activated):
66 |
67 | .. code-block:: bash
68 |
69 | > $ pip install -e git+git@github.com:larsgeb/hmclab.git@master#egg=hmclab
70 |
71 | This won't work as long as the GitHub repo is private. If you've set up SSH keys with
72 | your GitHub account, and we've granted you access, you can run the following command
73 | instead:
74 |
75 | .. code-block:: bash
76 |
77 | > $ pip install -e git+ssh://git@github.com/larsgeb/hmclab.git#egg=hmclab
78 |
79 | Installation option 2
80 | ---------------------
81 |
82 | Option two requires you to run the following commands (with the appropriate environment
83 | activated):
84 |
85 | .. code-block:: bash
86 |
87 | > $ git clone git@github.com:larsgeb/hmclab.git
88 | > $ cd hmclab
89 | > $ pip install -e .
90 |
91 | This also won't work as long as the GitHub repo is private and you don't have access.
92 |
93 | Installation option 3
94 | ---------------------
95 |
96 | Option three requires you to decompress the :code:`.zip` file and open a terminal in
97 | the resulting folder (such that you see the files :code:`setup.py`, :code:`README.md`,
98 | etc. Once you have activated the proper environment in your shell, run the following:
99 |
100 | .. code-block:: bash
101 |
102 | > $ pip install -e .
103 |
104 |
105 | Installing development dependencies
106 | ***********************************
107 |
108 | If you want to develop within this repo, we recommend a few extra packages. They can
109 | also be installed using pip.
110 |
111 | In :code:`Bash`:
112 |
113 | .. code-block:: bash
114 |
115 | # from github
116 | > $ pip install -e \
117 | git+git@github.com:larsgeb/hmclab.git@master#egg=hmclab[dev]
118 |
119 | # from local clone
120 | > $ pip install -e .[dev]
121 |
122 | ... or :code:`Zsh`, which requires escapes for brackets:
123 |
124 | .. code-block:: bash
125 |
126 | # from github
127 | > $ pip install -e\
128 | git+git@github.com:larsgeb/hmclab.git@master#egg=hmclab\[dev\]
129 |
130 | # from local clone
131 | > $ pip install -e .\[dev\]
132 |
133 | One could now test if all the package components run well by invoking PyTest:
134 |
135 | .. code-block:: bash
136 |
137 | > $ pytest .
138 |
--------------------------------------------------------------------------------
/environment.yml:
--------------------------------------------------------------------------------
1 | name: hmclab
2 | dependencies:
3 | - python=3.9
4 | - h5py
5 |
--------------------------------------------------------------------------------
/github_deploy_key.enc:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/github_deploy_key.enc
--------------------------------------------------------------------------------
/hmclab/Distributions/Transforms.py:
--------------------------------------------------------------------------------
1 | import numpy as _numpy
2 | from hmclab.Distributions import _AbstractDistribution
3 | from hmclab.Distributions import Normal as _Normal
4 |
5 |
6 | class TransformToLogSpace(_AbstractDistribution):
7 |
8 | grad_logdetjac_jax = None
9 |
10 | def __init__(self, distribution: _AbstractDistribution, base: float = 10):
11 |
12 | assert issubclass(type(distribution), _AbstractDistribution)
13 |
14 | self.base: float = base
15 | self.dimensions: int = distribution.dimensions
16 | self.distribution: _AbstractDistribution = distribution
17 |
18 | def misfit(self, m):
19 |
20 | assert m.shape == (m.size, 1)
21 |
22 | _m = self.transform_forward(m)
23 |
24 | if _numpy.any(_numpy.isnan(_m)):
25 | return _numpy.inf
26 |
27 | return (
28 | self.distribution.misfit(_m)
29 | # - _numpy.log(_numpy.linalg.det(self.jacobian(m)))
30 | # For diagonal jacobians:
31 | - _numpy.sum(_numpy.log(_numpy.diag(self.jacobian(m))))
32 | + self.misfit_bounds(m)
33 | )
34 |
35 | def gradient(self, m):
36 |
37 | assert m.shape == (m.size, 1)
38 |
39 | _m = self.transform_forward(m)
40 |
41 | J = self.jacobian(m)
42 |
43 | return (
44 | self.distribution.gradient(_m).T @ J - self.manual_grad_logdetjac(m).T
45 | ).T + self.misfit_bounds(m)
46 |
47 | @staticmethod
48 | def create_default(dimensions: int) -> "TransformToLogSpace":
49 | return TransformToLogSpace(_Normal.create_default(dimensions))
50 |
51 | def generate(self, repeat=1, rng=_numpy.random.default_rng()) -> _numpy.ndarray:
52 | return self.transform_backward(self.distribution.generate(repeat, rng))
53 |
54 | def transform_backward(self, m):
55 | assert m.shape[0] == self.dimensions
56 | return _numpy.power(self.base, m)
57 |
58 | def transform_forward(self, m):
59 | assert m.shape[0] == self.dimensions
60 | return _numpy.log(m) / _numpy.log(self.base)
61 |
62 | def jacobian(self, m):
63 | assert m.shape == (m.size, 1)
64 | return _numpy.diag((1.0 / m.flatten()) / _numpy.log(self.base))
65 |
66 | def inv_jacobian(self, m):
67 | assert m.shape == (m.size, 1)
68 | return _numpy.diag(m.flatten() * _numpy.log(self.base))
69 |
70 | def hessian(self, m):
71 | assert m.shape == (m.size, 1)
72 | return _numpy.diag((-1.0 / m.flatten() ** 2) / _numpy.log(self.base))
73 |
74 | def manual_grad_logdetjac(self, m):
75 |
76 | # This is computed using Jacobi's rule.
77 | # Additionally, taking the matrix inverse using NumPy is unstable, making it
78 | # prudent to find the inverse jacobian analytically.
79 | # Formula: d/dm log det jac = tr ( jac^-1 @ (d jac / dm) )
80 | # The term d jac / dm is taken to be the Hessian. Note that the tensor order
81 | # of the Hessian should actually be 3, but with a diagonal transformation, the
82 | # cross terms are not important, so we get away with simpler expressions.
83 |
84 | return (_numpy.diag((self.inv_jacobian(m)) @ self.hessian(m)))[:, None]
85 |
86 | """
87 | The following are two functions that don't require one to find analytical
88 | expressions of the gradient of the logarithm of the determinant of the Jacobian.
89 | This gradient is computed using automatic differentiation implemented in JAX. Note
90 | That JAX doesn't play well with the parallel HMC sampler, leading to many a bug.
91 |
92 | def _gradient_using_jax(self, m):
93 |
94 | import jax as _jax
95 |
96 | if self.grad_logdetjac_jax is None:
97 | self.grad_logdetjac_jax = _jax.jit(_jax.grad(self._jax_logdetjac))
98 |
99 | assert m.shape == (m.size, 1)
100 |
101 | _m = self.transform_forward(m)
102 |
103 | J = self.jacobian(m)
104 |
105 | return (
106 | self.distribution.gradient(_m).T @ J
107 | - _numpy.asarray(self.grad_logdetjac(m)).T
108 | ).T + self.misfit_bounds(m)
109 |
110 | def _jax_logdetjac(self, m):
111 |
112 | return _jax.numpy.log(
113 | _jax.numpy.linalg.det(
114 | _jax.numpy.diag((1.0 / m.flatten()) / _jax.numpy.log(self.base))
115 | )
116 | )
117 | """
118 |
--------------------------------------------------------------------------------
/hmclab/Distributions/__init__.py:
--------------------------------------------------------------------------------
1 | """Distribution classes and associated methods.
2 |
3 | The classes in this module describe statistical distributions. Most of them are
4 | non-normalized having implications for quantifying the evidence term of Bayes' rule.
5 |
6 | All of the classes inherit from :class:`._AbstractDistribution`; a base class outlining
7 | required methods and their signatures (required in- and outputs).
8 |
9 | .. note::
10 |
11 | A tutorial on implementing your own distributions can be found at
12 | :ref:`/notebooks/tutorials/3 - Creating your own inverse problem.ipynb`.
13 |
14 | """
15 | # import warnings as _warnings # TODO check why this was here
16 |
17 | from hmclab.Distributions.base import (
18 | AdditiveDistribution,
19 | BayesRule,
20 | CompositeDistribution,
21 | Himmelblau,
22 | Laplace,
23 | Normal,
24 | StandardNormal1D,
25 | Uniform,
26 | Mixture,
27 | EvaluationLimiter_ClassConstructor,
28 | _AbstractDistribution,
29 | )
30 | from hmclab.Distributions.LinearMatrix import LinearMatrix
31 | from hmclab.Distributions.SourceLocation import SourceLocation2D
32 | from hmclab.Distributions.SourceLocation import SourceLocation3D
33 | from hmclab.Distributions.Transforms import TransformToLogSpace
34 | from hmclab.Distributions.LayeredRayTracing2D import LayeredRayTracing2D
35 |
36 |
37 | __all__ = [
38 | "_AbstractDistribution",
39 | "StandardNormal1D",
40 | "Normal",
41 | "Laplace",
42 | "Uniform",
43 | "CompositeDistribution",
44 | "AdditiveDistribution",
45 | "Himmelblau",
46 | "BayesRule",
47 | "LinearMatrix",
48 | "SourceLocation2D",
49 | "SourceLocation3D",
50 | "LayeredRayTracing2D",
51 | "Mixture",
52 | "EvaluationLimiter_ClassConstructor",
53 | "TransformToLogSpace",
54 | ]
55 |
56 | # Try to import 2D FWI examples if psvWave is installed, otherwise, don't fail
57 | try:
58 | from hmclab.Distributions.ElasticFullWaveform2D import ElasticFullWaveform2D
59 |
60 | __all__ += ["ElasticFullWaveform2D"]
61 |
62 | except ModuleNotFoundError: # as e:
63 | pass
64 |
--------------------------------------------------------------------------------
/hmclab/Helpers/AppendNPY.py:
--------------------------------------------------------------------------------
1 | # https://github.com/xor2k/npy-append-array
2 | #
3 | # MIT License
4 | #
5 | # Copyright (c) 2020 Michael Siebert
6 | #
7 | # Permission is hereby granted, free of charge, to any person obtaining a copy
8 | # of this software and associated documentation files (the "Software"), to deal
9 | # in the Software without restriction, including without limitation the rights
10 | # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
11 | # copies of the Software, and to permit persons to whom the Software is
12 | # furnished to do so, subject to the following conditions:
13 | #
14 | # The above copyright notice and this permission notice shall be included in all
15 | # copies or substantial portions of the Software.
16 | #
17 | # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 | # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 | # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
20 | # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
21 | # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
22 | # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
23 | # SOFTWARE.
24 |
25 | import numpy as np
26 | import os.path
27 | from io import BytesIO, SEEK_END, SEEK_SET
28 |
29 |
30 | class AppendableArray:
31 | def __init__(self, filename):
32 | self.filename = filename
33 | self.fp = None
34 | self.__is_init = False
35 | if os.path.isfile(filename):
36 | self.__init()
37 |
38 | def __create_header_bytes(self, spare_space=True):
39 | from struct import pack
40 |
41 | header_map = {
42 | "descr": np.lib.format.dtype_to_descr(self.dtype),
43 | "fortran_order": self.fortran_order,
44 | "shape": tuple(self.shape),
45 | }
46 | io = BytesIO()
47 | np.lib.format.write_array_header_2_0(io, header_map)
48 |
49 | # create array header with 64 byte space space for shape to grow
50 | io.getbuffer()[8:12] = pack(
51 | " np.ndarray:
39 | """Delegator function for double or single precision gemv.
40 |
41 | Parameters
42 | ==========
43 | A : scipy.sparse.csr_matrix
44 | A (sparse) scipy csr matrix of dimensions (i, j) with datatype float32 or
45 | float64. The datatype of A will determine operating precision.
46 | x : numpy.ndarray
47 | A (dense) numpy ndarray of dimension (j, k) with datatype float32 or
48 | float64. If x's datatype does not match A's datatype, x is converted.
49 |
50 | Returns
51 | =======
52 | y : numpy.ndarray
53 | A (dense) numpy ndarray of dimension (i, k) with datatype float32 or
54 | float64, depending on A.
55 | """
56 | if not sparse.isspmatrix_csr(A):
57 | raise ValueError("Matrix must be in csr format")
58 |
59 | if A.dtype == "float64":
60 | return double_sparse_gemv_via_MKL(A, x)
61 | elif A.dtype == "float32":
62 | return single_sparse_gemv_via_MKL(A, x)
63 | else:
64 | raise ValueError("Data type of the matrix not understood")
65 |
66 |
67 | def double_sparse_gemv_via_MKL(A, x):
68 | """Double precision sparse GEMV, M being sparse, V dense."""
69 | # Check input types
70 | if not sparse.isspmatrix_csr(A):
71 | raise ValueError("Matrix must be in csr format.")
72 | if A.dtype != "float64":
73 | raise ValueError("Matrix must be of datatype float64.")
74 | if x.dtype.type is not np.double:
75 | x = x.astype(np.double, copy=True)
76 |
77 | # Matrix dimensions
78 | (i, j) = A.shape
79 |
80 | # Explanation: https://stackoverflow.com/a/52299730/6848887
81 | # The data of the matrix
82 | # data is an array containing all the non zero elements of the sparse matrix.
83 | # indices is an array mapping each element in data to its column in the sparse
84 | # matrix.
85 | # indptr then maps the elements of data and indices to the rows of the sparse
86 | # matrix.
87 | data = A.data.ctypes.data_as(POINTER(c_double))
88 | indptr = A.indptr.ctypes.data_as(POINTER(c_int))
89 | indices = A.indices.ctypes.data_as(POINTER(c_int))
90 |
91 | # Creating the output array y
92 | k = 1
93 | if x.ndim == 1:
94 | # Shape is hopefully (j, ), we can work with this
95 | y = np.empty(i, dtype=np.double, order="F")
96 |
97 | elif x.shape[1] == 1:
98 | # Shape is hopefully (j, 1)
99 | y = np.empty((i, 1), dtype=np.double, order="F")
100 |
101 | else:
102 | # Shape is hopefully (j, k)
103 | k = x.shape[1]
104 | y = np.empty((i, k), dtype=np.double, order="F")
105 |
106 | # Assert that first dimension matches
107 | if x.shape[0] != j:
108 | raise ValueError(f"Vector x must have j entries. x.size is {x.size}, j is {j}")
109 |
110 | # Put vector x in column-major order
111 | if not x.flags["F_CONTIGUOUS"]:
112 | x = x.copy(order="F")
113 |
114 | # If x is a vector, perform the operation once
115 | if k == 1:
116 | pointer_x = x.ctypes.data_as(POINTER(c_double))
117 | pointer_y = y.ctypes.data_as(POINTER(c_double))
118 |
119 | mkl.mkl_cspblas_dcsrgemv(
120 | "N", byref(c_int(i)), data, indptr, indices, pointer_x, pointer_y
121 | )
122 | else:
123 | for columns in range(k):
124 | xx = x[:, columns]
125 | yy = y[:, columns]
126 | pointer_x = xx.ctypes.data_as(POINTER(c_double))
127 | pointer_y = yy.ctypes.data_as(POINTER(c_double))
128 | mkl.mkl_cspblas_dcsrgemv(
129 | "N", byref(c_int(i)), data, indptr, indices, pointer_x, pointer_y
130 | )
131 |
132 | return y
133 |
134 |
135 | def single_sparse_gemv_via_MKL(A, x):
136 | """Single precision sparse GEMV, M being sparse, V dense."""
137 | # Check input types
138 | if not sparse.isspmatrix_csr(A):
139 | raise ValueError("Matrix must be in csr format.")
140 | if A.dtype != "float32":
141 | raise ValueError("Matrix must be of datatype float32.")
142 | if x.dtype.type is not np.single:
143 | x = x.astype(np.single, copy=True)
144 |
145 | # Matrix dimensions
146 | (i, j) = A.shape
147 |
148 | # Explanation: https://stackoverflow.com/a/52299730/6848887
149 | # The data of the matrix
150 | # data is an array containing all the non zero elements of the sparse matrix.
151 | # indices is an array mapping each element in data to its column in the sparse
152 | # matrix.
153 | # indptr then maps the elements of data and indices to the rows of the sparse
154 | # matrix.
155 | data = A.data.ctypes.data_as(POINTER(c_float))
156 | indptr = A.indptr.ctypes.data_as(POINTER(c_int))
157 | indices = A.indices.ctypes.data_as(POINTER(c_int))
158 |
159 | # Creating the output array y
160 | k = 1
161 | if x.ndim == 1:
162 | # Shape is hopefully (j, ), we can work with this
163 | y = np.empty(i, dtype=np.single, order="F")
164 |
165 | elif x.shape[1] == 1:
166 | # Shape is hopefully (j, 1)
167 | y = np.empty((i, 1), dtype=np.single, order="F")
168 |
169 | else:
170 | # Shape is hopefully (j, k)
171 | k = x.shape[1]
172 | y = np.empty((i, k), dtype=np.single, order="F")
173 |
174 | # Assert that first dimension matches
175 | if x.shape[0] != j:
176 | raise ValueError(f"Vector x must have j entries. x.size is {x.size}, j is {j}")
177 |
178 | # Put vector x in column-major order
179 | if not x.flags["F_CONTIGUOUS"]:
180 | x = x.copy(order="F")
181 |
182 | # If x is a vector, perform the operation once
183 | if k == 1:
184 | pointer_x = x.ctypes.data_as(POINTER(c_float))
185 | pointer_y = y.ctypes.data_as(POINTER(c_float))
186 |
187 | mkl.mkl_cspblas_scsrgemv(
188 | "N", byref(c_int(i)), data, indptr, indices, pointer_x, pointer_y
189 | )
190 | else:
191 | for columns in range(k):
192 | xx = x[:, columns]
193 | yy = y[:, columns]
194 | pointer_x = xx.ctypes.data_as(POINTER(c_float))
195 | pointer_y = yy.ctypes.data_as(POINTER(c_float))
196 | mkl.mkl_cspblas_scsrgemv(
197 | "N", byref(c_int(i)), data, indptr, indices, pointer_x, pointer_y
198 | )
199 |
200 | return y
201 |
--------------------------------------------------------------------------------
/hmclab/Helpers/Processing.py:
--------------------------------------------------------------------------------
1 | """A module with assorted samples processing functions.
2 | """
3 | import numpy as _numpy
4 |
5 |
6 | def autocorrelation(x):
7 | result = _numpy.correlate(x - _numpy.mean(x), x - _numpy.mean(x), mode="full")
8 | return result[int(result.size / 2) :] / _numpy.max(result)
9 |
10 |
11 | def crosscorrelation(x, y):
12 | result = _numpy.correlate(x - _numpy.mean(x), y - _numpy.mean(y), mode="full")
13 | return result[int(result.size / 2) :] / _numpy.max(_numpy.abs(result))
14 |
--------------------------------------------------------------------------------
/hmclab/Helpers/RandomMatrices.py:
--------------------------------------------------------------------------------
1 | import numpy as _numpy
2 |
3 |
4 | def random_pd_matrix(dim: int):
5 | """Generate a random symmetric, positive-definite matrix.
6 |
7 | Parameters
8 | ----------
9 | dim : int
10 | The matrix dimension.
11 |
12 | Returns
13 | -------
14 | x : array of shape [n_dim, n_dim]
15 | The random symmetric, positive-definite matrix.
16 |
17 | """
18 | # Create random matrix
19 | a = _numpy.random.rand(dim, dim)
20 | # Create random PD matrix and extract correlation structure
21 | u, _, v = _numpy.linalg.svd(_numpy.dot(a.T, a))
22 | # Reconstruct a new matrix with random variances.
23 | return _numpy.dot(_numpy.dot(u, 1.0 + _numpy.diag(_numpy.random.rand(dim))), v)
24 |
25 |
26 | def random_correlation_matrix(dim: int):
27 | """Generate a random symmetric, positive-definite matrix.
28 |
29 | Parameters
30 | ----------
31 | dim : int
32 | The matrix dimension.
33 |
34 | Returns
35 | -------
36 | x : array of shape [n_dim, n_dim]
37 | The random symmetric, positive-definite matrix.
38 |
39 | """
40 | cov = random_pd_matrix(dim)
41 |
42 | inv_sigma = _numpy.diag(1.0 / _numpy.diag(cov) ** 0.5)
43 |
44 | return inv_sigma @ cov @ inv_sigma
45 |
--------------------------------------------------------------------------------
/hmclab/Helpers/Timers.py:
--------------------------------------------------------------------------------
1 | """ A class that allows one to time functions over many repeated calls. Can be applied
2 | to instance methods.
3 |
4 | Usage:
5 |
6 | ```
7 | def my_function(a):
8 | return a * 100.0
9 |
10 | my_function(5.0) # Does not log total execution time.
11 |
12 | my_function = AccumulatingTimer(my_function)
13 |
14 | my_function(5.0) # Now logs total execution time.
15 |
16 | my_function.print_statistics() # Look into the statistics
17 |
18 | ```
19 |
20 | """
21 | from datetime import datetime as _datetime
22 |
23 |
24 | class AccumulatingTimer:
25 | def __init__(self, function):
26 | """Constructor that initializes number of calls and time spent."""
27 |
28 | if type(function) == AccumulatingTimer:
29 | # If we try to apply AccumulatingTimer twice, extract function from the
30 | # encapsulating object
31 | self.function = function.function
32 | else:
33 | self.function = function
34 |
35 | self.time_spent = 0.0
36 | self.calls = 0
37 |
38 | def __call__(self, *args, **kwargs):
39 | """Call the function, keeping track of time and number of calls."""
40 |
41 | self.calls += 1
42 | start = _datetime.now()
43 | return_value = self.function(*args, **kwargs)
44 | end = _datetime.now()
45 | self.time_spent += (end - start).total_seconds()
46 |
47 | # Return original value
48 | return return_value
49 |
50 | def print_statistics(self):
51 | print(f"Function: {self.function}")
52 | print(f"Calls: {self.calls}")
53 | print(f"Total time spent: {self.time_spent} seconds")
54 |
55 | def __str__(self) -> str:
56 | return (
57 | f"Function: {self.function}, "
58 | f"Calls: {self.calls}, "
59 | f"Total time spent: {self.time_spent} seconds, "
60 | )
61 |
--------------------------------------------------------------------------------
/hmclab/Helpers/__init__.py:
--------------------------------------------------------------------------------
1 | from hmclab.Helpers import (
2 | BetterABC,
3 | RandomMatrices,
4 | InterfaceMKL,
5 | CustomExceptions,
6 | Processing,
7 | CaptureStdout,
8 | Timers,
9 | AppendNPY,
10 | )
11 |
12 | __all__ = [
13 | "BetterABC",
14 | "RandomMatrices",
15 | "InterfaceMKL",
16 | "CustomExceptions",
17 | "Processing",
18 | "CaptureStdout",
19 | "Timers",
20 | "AppendNPY",
21 | ]
22 |
--------------------------------------------------------------------------------
/hmclab/Optimizers.py:
--------------------------------------------------------------------------------
1 | """Optimizer methods.
2 |
3 | The methods in this module describe various numerical optimization routines. These
4 | routines can be used to find the minima of misfit function. This is directly related to
5 | deterministic inversion.
6 |
7 | """
8 | from abc import ABC as _ABC
9 | from abc import abstractmethod as _abstractmethod
10 |
11 | import numpy as _numpy
12 | import tqdm.auto as _tqdm_au
13 |
14 | from hmclab.Distributions import _AbstractDistribution
15 |
16 | from typing import Tuple as _Tuple, List as _List
17 |
18 |
19 | def gradient_descent(
20 | target: _AbstractDistribution,
21 | initial_model: _numpy.ndarray = None,
22 | epsilon: float = 0.1,
23 | iterations: int = 100,
24 | regularization: float = None,
25 | strictly_monotonic=False,
26 | disable_progressbar=False,
27 | ) -> _Tuple[_numpy.ndarray, float, _List[_numpy.ndarray], _List[float]]:
28 | """Gradient descent on the target misfit."""
29 |
30 | dimensions = target.dimensions
31 |
32 | # If no initial model is given, start at zeros
33 | if initial_model is None:
34 | m = _numpy.zeros((dimensions, 1))
35 | else:
36 | assert initial_model.shape == (dimensions, 1)
37 | m = initial_model
38 |
39 | # Create progress bar
40 | try:
41 | progressbar = _tqdm_au.trange(
42 | iterations,
43 | desc="Iterating",
44 | leave=True,
45 | dynamic_ncols=True,
46 | disable=disable_progressbar,
47 | )
48 | except Exception:
49 | progressbar = _tqdm_au.trange(
50 | iterations,
51 | desc="Iterating",
52 | leave=True,
53 | disable=disable_progressbar,
54 | )
55 |
56 | try:
57 |
58 | # Compute initial misfit
59 | x = target.misfit(m)
60 |
61 | # Create the returns
62 | xs = []
63 | ms = []
64 |
65 | # Add starting model and misfit to the returns
66 | xs.append(x)
67 | ms.append(m)
68 |
69 | for _ in progressbar:
70 |
71 | # Compute gradient
72 | g = target.gradient(m)
73 |
74 | if regularization is not None:
75 | preconditioner = _numpy.diag(
76 | 1.0 / (_numpy.diag(g @ g.T) + regularization)
77 | )
78 | # Update model
79 | g = preconditioner @ g
80 |
81 | # Update model
82 | m = m - epsilon * g
83 |
84 | # Compute misfit and store
85 | x = target.misfit(m)
86 |
87 | if _numpy.isnan(x) or _numpy.isinf(x):
88 | # Reset model and misfit
89 | x = xs[-1]
90 | m = ms[-1]
91 | # And exit loop
92 | progressbar.close()
93 | print("Encountered infinite or NaN values, terminating")
94 | break
95 |
96 | if x > xs[-1] and strictly_monotonic:
97 | # Reset model and misfit
98 | x = xs[-1]
99 | m = ms[-1]
100 | # And exit loop
101 | progressbar.close()
102 | print("Value is not strictly decreasing, terminating")
103 | break
104 |
105 | progressbar.set_description(f"Misfit: {x:.1e}")
106 | # Place current model and misfit
107 | xs.append(x)
108 | ms.append(m)
109 | except KeyboardInterrupt:
110 | pass
111 |
112 | return m, x, _numpy.array(ms), _numpy.array(xs)
113 |
--------------------------------------------------------------------------------
/hmclab/__init__.py:
--------------------------------------------------------------------------------
1 | """HMC Tomography module.
2 | Copyright 2019-2020 Andrea Zunino, Andreas Fichtner, Lars Gebraad
3 | """
4 | from hmclab import (
5 | MassMatrices,
6 | Distributions,
7 | Samplers,
8 | Optimizers,
9 | Visualization,
10 | )
11 | from hmclab.Samples import Samples, combine_samples
12 |
13 | from ._version import get_versions
14 |
15 | __version__ = get_versions()["version"]
16 | del get_versions
17 |
18 | name = "hmclab"
19 | __all__ = [
20 | "MassMatrices",
21 | "Distributions",
22 | "Samplers",
23 | "Optimizers",
24 | "Visualization",
25 | "Samples",
26 | "combine_samples",
27 | ]
28 |
--------------------------------------------------------------------------------
/notebooks/Home.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# HMCLab notebooks\n",
8 | "\n",
9 | "This notebook collection contains tutorials and exmaple usages of the algorithms\n",
10 | "provided by HMCLab."
11 | ]
12 | },
13 | {
14 | "cell_type": "markdown",
15 | "metadata": {},
16 | "source": [
17 | "**Tutorials**\n",
18 | "\n",
19 | "0. [Getting started](./tutorials/0%20-%20Getting%20started.ipynb)\n",
20 | "1. [Tuning Hamiltonian Monte Carlo](./tutorials/1%20-%20Tuning%20Hamiltonian%20Monte%20Carlo.ipynb)\n",
21 | "2. [Separate priors per dimenions](./tutorials/2%20-%20Separate%20priors%20per%20dimension.ipynb)\n",
22 | "3. [Creating your own inverse problem](./tutorials/3%20-%20Creating%20your%20own%20inverse%20problem.ipynb)\n",
23 | "4. [Running parallel Markov chains](./tutorials/4%20-%20Running%20parallel%20Markov%20chains.ipynb)\n",
24 | "5. [Transforming parameters](./tutorials/5%20-%20Transforming%20parameters.ipynb)"
25 | ]
26 | },
27 | {
28 | "cell_type": "markdown",
29 | "metadata": {},
30 | "source": [
31 | "**Examples**\n",
32 | "- [Sampling Linear Equations](./examples/Sampling%20linear%20equations.ipynb)\n",
33 | "- [Sampling Sparse Equations](./examples/Sampling%20sparse%20linear%20equations.ipynb)\n",
34 | "- [Locating quakes](./examples/Locating%20quakes.ipynb)\n",
35 | "- [Elastic 2d FWI](./examples/Elastic%202d%20FWI.ipynb)\n"
36 | ]
37 | }
38 | ],
39 | "metadata": {
40 | "kernelspec": {
41 | "display_name": "Python 3.9.7 ('base')",
42 | "language": "python",
43 | "name": "python3"
44 | },
45 | "language_info": {
46 | "name": "python",
47 | "version": "3.9.7"
48 | },
49 | "orig_nbformat": 4,
50 | "vscode": {
51 | "interpreter": {
52 | "hash": "7c1bdad425484a1abab6401f6d03a1a21275d78af8feb7660d444a4146aa874c"
53 | }
54 | }
55 | },
56 | "nbformat": 4,
57 | "nbformat_minor": 2
58 | }
59 |
--------------------------------------------------------------------------------
/notebooks/Tests.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "This notebook allows you to test all parts of HMCLab Python. It requires you to install the development dependencies of hmclab using pip."
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": null,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "import pytest"
17 | ]
18 | },
19 | {
20 | "cell_type": "markdown",
21 | "metadata": {},
22 | "source": [
23 | "Testing just one component."
24 | ]
25 | },
26 | {
27 | "cell_type": "code",
28 | "execution_count": null,
29 | "metadata": {},
30 | "outputs": [],
31 | "source": [
32 | "pytest.main([\"../tests/test_sampling.py\", \"--show-capture=no\", \"-x\"]);"
33 | ]
34 | },
35 | {
36 | "cell_type": "markdown",
37 | "metadata": {},
38 | "source": [
39 | "Testing all parts."
40 | ]
41 | },
42 | {
43 | "cell_type": "code",
44 | "execution_count": null,
45 | "metadata": {},
46 | "outputs": [],
47 | "source": [
48 | "pytest.main([\"..\", \"--show-capture=no\", \"-x\"]);"
49 | ]
50 | }
51 | ],
52 | "metadata": {
53 | "kernelspec": {
54 | "display_name": "Python 3.8.13 ('hmclab-dev')",
55 | "language": "python",
56 | "name": "python3"
57 | },
58 | "language_info": {
59 | "codemirror_mode": {
60 | "name": "ipython",
61 | "version": 3
62 | },
63 | "file_extension": ".py",
64 | "mimetype": "text/x-python",
65 | "name": "python",
66 | "nbconvert_exporter": "python",
67 | "pygments_lexer": "ipython3",
68 | "version": "3.8.13"
69 | },
70 | "orig_nbformat": 4,
71 | "vscode": {
72 | "interpreter": {
73 | "hash": "81112416095e422dc5dd51b3786c7090d0e8fe528db53aa5039aa29f86745f2f"
74 | }
75 | }
76 | },
77 | "nbformat": 4,
78 | "nbformat_minor": 2
79 | }
80 |
--------------------------------------------------------------------------------
/notebooks/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/__init__.py
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/.gitignore
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/10.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/10.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/10.picks.csv:
--------------------------------------------------------------------------------
1 | ,channel,pick 1618401998.23,uncertainty,snr 2.5068560727245424
2 | 0,1112.0,,,0.783922793023
3 | 1,1113.0,,,2.40780467078
4 | 2,1114.0,573.311258559,-0.820608537269,1.2565473995
5 | 3,1115.0,574.028571429,0.133771344377,2.12573303446
6 | 4,1116.0,571.707142857,0.212074846461,2.06311519304
7 | 5,1117.0,568.977495867,0.134383947533,1.16683819615
8 | 6,1118.0,570.057142857,0.0892201382387,2.25936105758
9 | 7,1119.0,568.735714286,0.075760443586,1.54448401543
10 | 8,1120.0,566.814285714,0.103695887356,1.97240177558
11 | 9,1121.0,566.792857143,0.0917233462282,3.42355974581
12 | 10,1122.0,566.471428571,0.0869900021161,3.34181479595
13 | 11,1123.0,565.65,0.0711418264499,4.15227839206
14 | 12,1124.0,565.328571429,0.140377714722,6.24382673191
15 | 13,1125.0,563.269106733,0.23600863361,1.29739028783
16 | 14,1126.0,560.938982068,0.148935420072,1.32709762515
17 | 15,1127.0,562.548294941,0.0994050920089,1.28519858625
18 | 16,1128.0,561.735714286,0.102620222037,6.24382809876
19 | 17,1129.0,560.414285714,0.0862406523664,1.60864529743
20 | 18,1130.0,560.392857143,0.121146067939,5.36080830052
21 | 19,1131.0,560.971428571,0.0795658235367,2.61405099949
22 | 20,1132.0,560.65,0.0995353077538,3.43232867774
23 | 21,1133.0,557.371428571,0.129538252416,2.15041685326
24 | 22,1134.0,555.95,0.172009877881,3.42564476229
25 | 23,1135.0,554.328571429,0.128885879338,2.33625188327
26 | 24,1136.0,555.707142857,0.0880436557099,1.75032996373
27 | 25,1137.0,555.785714286,0.0592727972343,2.24740905989
28 | 26,1138.0,556.964285714,0.07741161826,4.16675513869
29 | 27,1139.0,556.842857143,0.0688997411523,3.21201464346
30 | 28,1140.0,556.721428571,0.0606026190714,8.96919072267
31 | 29,1141.0,556.9,0.0835641750118,19.4426558408
32 | 30,1142.0,557.378571429,0.151279556687,1.57079930807
33 | 31,1143.0,554.957142857,0.0665213191677,2.81306721868
34 | 32,1144.0,554.835714286,0.0706869811722,2.95591275452
35 | 33,1145.0,553.514285714,0.0784017368079,3.27138505036
36 | 34,1146.0,553.592857143,0.066688089452,2.52063755774
37 | 35,1147.0,552.071428571,0.0482769017695,3.44681388899
38 | 36,1148.0,552.85,0.0329517230791,2.66429550676
39 | 37,1149.0,552.232203682,0.0639534314091,1.49005280423
40 | 38,1150.0,551.057142857,0.0598422749763,1.87688256342
41 | 39,1151.0,551.935714286,0.0775349574046,1.63990243445
42 | 40,1152.0,552.214285714,0.0730218435639,2.43605708606
43 | 41,1153.0,553.592857143,0.0769807613966,3.19383670196
44 | 42,1154.0,553.571428571,0.107700524587,2.11288568852
45 | 43,1155.0,552.55,0.264075770269,2.1170925643
46 | 44,1156.0,554.428571429,0.0608753628487,2.90595532675
47 | 45,1157.0,551.15,0.0484991001746,2.69056940664
48 | 46,1158.0,549.946556509,0.13556098864,1.11888624638
49 | 47,1159.0,549.79885259,0.141841902234,1.04894986437
50 | 48,1160.0,549.935650948,0.148122815827,1.27056710526
51 | 49,1161.0,548.585714286,0.250265686,2.49285197106
52 | 50,1162.0,547.149505182,0.0586516243869,1.38800396076
53 | 51,1163.0,546.892857143,0.0806618953412,1.95198242593
54 | 52,1164.0,547.971428571,0.0685542510164,2.79488950219
55 | 53,1165.0,547.85,0.0933782056366,2.90196338964
56 | 54,1166.0,546.628571429,0.0568930244329,1.79527991519
57 | 55,1167.0,545.907142857,0.124173226892,2.75386811349
58 | 56,1168.0,544.785714286,0.0556793019678,2.15011830101
59 | 57,1169.0,544.214732195,0.137150377831,1.19834893599
60 | 58,1170.0,546.010307465,0.11941692048,1.35174552498
61 | 59,1171.0,544.664285714,0.0412703503661,1.52823662587
62 | 60,1172.0,544.542857143,0.0286618142615,2.59408959379
63 | 61,1173.0,544.421428571,0.0509294321321,1.75961677752
64 | 62,1174.0,542.742857143,0.0697787680187,4.52211511427
65 | 63,1175.0,544.364026731,0.463822083638,0.978126542416
66 | 64,1176.0,545.321428571,0.857865399257,3.7004813822
67 | 65,1177.0,542.142857143,0.0794437882832,1.63968463868
68 | 66,1178.0,542.545405259,0.0568722395961,1.20695176036
69 | 67,1179.0,541.707194291,0.0524977898017,1.1329644818
70 | 68,1180.0,541.778571429,0.114461251592,2.77823732104
71 | 69,1181.0,541.057142857,0.144892995893,1.72622160608
72 | 70,1182.0,539.635714286,0.181683806996,4.45513032149
73 | 71,1183.0,538.35960655,0.0619620561596,1.35556567222
74 | 72,1184.0,538.783664141,0.0491461848787,0.552983290017
75 | 73,1185.0,539.481428718,0.0363303135978,0.898183701023
76 | 74,1186.0,540.40739432,0.023514442317,1.13171357449
77 | 75,1187.0,540.135102607,0.194140573274,1.14310614359
78 | 76,1188.0,542.0,0.0853540573057,1.74383408234
79 | 77,1189.0,540.962206476,0.0548912030897,1.48397144877
80 | 78,1190.0,541.057917192,0.0837696426506,1.06815842899
81 | 79,1191.0,541.221810384,0.112648082212,0.521014340725
82 | 80,1192.0,541.442135718,0.141526521772,1.03395845722
83 | 81,1193.0,541.707142857,0.170404961333,1.53260769299
84 | 82,1194.0,542.669609459,0.129082441676,0.991972712813
85 | 83,1195.0,543.676139433,0.0877599220181,0.923225543008
86 | 84,1196.0,544.737864676,0.0464374023605,1.18186382485
87 | 85,1197.0,544.151631375,0.0645772376158,1.16484414934
88 | 86,1198.0,543.642857143,0.0692118789406,3.31546053963
89 | 87,1199.0,545.421428571,0.0526695122984,2.1964833631
90 | 88,1200.0,545.7,0.0445138270672,3.71075653712
91 | 89,1201.0,545.978571429,0.0355095567837,1.67023377461
92 | 90,1202.0,547.957142857,0.0887754806528,2.32350561982
93 | 91,1203.0,548.861769603,0.098844690253,1.31786426132
94 | 92,1204.0,550.564285714,0.0390807159951,19.7488438751
95 | 93,1205.0,551.342857143,0.0496889095647,2.72818297494
96 | 94,1206.0,553.021428571,0.0522171267363,3.71574391209
97 | 95,1207.0,552.8,0.0491022255602,2.30742865861
98 | 96,1208.0,553.378571429,0.0638863477156,2.7704798619
99 | 97,1209.0,553.857142857,0.202142965753,2.00791527079
100 | 98,1210.0,553.05029001,0.356095398404,1.24924466358
101 | 99,1211.0,553.374551631,0.176720057538,1.18674680087
102 | 100,1212.0,553.292857143,0.0737570499359,1.7762307467
103 | 101,1213.0,554.571428571,0.0514119676155,2.05495415091
104 | 102,1214.0,554.65,0.0798665829905,3.2049143678
105 | 103,1215.0,555.528571429,0.0747640756668,2.41010061825
106 | 104,1216.0,556.507142857,0.0898194968652,1.64906861678
107 | 105,1217.0,556.385714286,0.0743589723983,2.05854139286
108 | 106,1218.0,557.064285714,0.0818157128054,2.14619352251
109 | 107,1219.0,558.236208958,0.0539034136332,1.36305732807
110 | 108,1220.0,558.95288446,0.0838585161932,1.37064851487
111 | 109,1221.0,560.0,0.0830768944735,2.01638819746
112 | 110,1222.0,561.078571429,0.115374912729,4.50932099988
113 | 111,1223.0,560.737300851,0.0956946079075,1.497964135
114 | 112,1224.0,562.185714286,0.223900928342,1.72496383436
115 | 113,1225.0,561.776919716,0.155349330235,1.33862057315
116 | 114,1226.0,564.692857143,0.160218221062,2.07126094184
117 | 115,1227.0,567.971428571,0.0641118567004,1.61915795353
118 | 116,1228.0,571.245552001,,1.20126594842
119 | 117,1229.0,572.118328002,,1.07113629681
120 | 118,1230.0,,,1.84585100271
121 | 119,1231.0,,,1.36003458348
122 |
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/12.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/12.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/15.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/15.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/15.picks.csv:
--------------------------------------------------------------------------------
1 | ,channel,pick 1618405043.23,uncertainty,snr 3.226657442514936
2 | 0,1173.0,,,1.19625197348
3 | 1,1174.0,,,0.677665521534
4 | 2,1175.0,,,2.29415557937
5 | 3,1176.0,154.55,0.0373889147332,1.71516628108
6 | 4,1177.0,152.928571429,0.0399885994753,1.92520491652
7 | 5,1178.0,152.807142857,0.0390508651735,1.62791280982
8 | 6,1179.0,152.685714286,0.0492684158585,2.22151074048
9 | 7,1180.0,152.207714163,0.0501957368595,1.44768951307
10 | 8,1181.0,151.261565844,0.0396000032151,1.12172358221
11 | 9,1182.0,150.321428571,0.0299796850207,2.22452711826
12 | 10,1183.0,149.756269285,0.0321761504316,0.702558016291
13 | 11,1184.0,149.3,0.0343726158424,2.22440832549
14 | 12,1185.0,147.678571429,0.0298579905916,2.98009160983
15 | 13,1186.0,147.057142857,0.0294962588583,2.50044728397
16 | 14,1187.0,146.235714286,0.0489506718583,3.33801806352
17 | 15,1188.0,144.489061863,0.0531072594344,1.23335543954
18 | 16,1189.0,143.742857143,0.0392713329428,3.51917830078
19 | 17,1190.0,142.921428571,0.0379932589622,1.74230272332
20 | 18,1191.0,141.3,0.0283531505387,4.24436538476
21 | 19,1192.0,140.175532509,0.0371640634593,0.760598456235
22 | 20,1193.0,138.977068891,0.0459749763799,1.45655330193
23 | 21,1194.0,139.231142256,0.0461695112084,1.26238154119
24 | 22,1195.0,137.535714286,0.0346992445106,2.13012223283
25 | 23,1196.0,137.114285714,0.03570250172,2.13435300269
26 | 24,1197.0,137.192857143,0.0310235859414,2.1490478803
27 | 25,1198.0,136.471428571,0.0341946590842,2.9119010698
28 | 26,1199.0,135.85,0.0317028618513,1.97977722576
29 | 27,1200.0,134.828571429,0.0389077711983,5.99627022275
30 | 28,1201.0,134.007142857,0.0297768188028,2.59930182842
31 | 29,1202.0,133.685714286,0.0297134533907,3.85712674109
32 | 30,1203.0,132.864285714,0.0281739872741,32.3338198591
33 | 31,1204.0,131.642857143,0.027466917308,24.1354187632
34 | 32,1205.0,130.821428571,0.0291214764372,5.44150110849
35 | 33,1206.0,129.7,0.0259767318394,3.1018609789
36 | 34,1207.0,129.578571429,0.0264787155551,7.39295689348
37 | 35,1208.0,128.057142857,0.0269256344336,7.57842965359
38 | 36,1209.0,128.535714286,0.0291029118484,3.69269339557
39 | 37,1210.0,127.214285714,0.0247731140102,1.80897326618
40 | 38,1211.0,127.092857143,0.0279240811887,3.22661310243
41 | 39,1212.0,126.21671679,0.0290814721745,0.99457356861
42 | 40,1213.0,125.271428571,0.0302388631602,3.36244697276
43 | 41,1214.0,125.25,0.0417593110713,2.36152534898
44 | 42,1215.0,123.717298119,0.0327765707093,1.48322205501
45 | 43,1216.0,123.745692301,0.0367747329821,1.31551817195
46 | 44,1217.0,122.985714286,0.0275967562272,4.86246567497
47 | 45,1218.0,122.764285714,0.0341530587866,2.13445604021
48 | 46,1219.0,122.442857143,0.0708284800469,2.89280220125
49 | 47,1220.0,122.21163054,0.0502607714332,0.802038003875
50 | 48,1221.0,121.721428571,0.0296930628195,1.52467044424
51 | 49,1222.0,121.0,0.127327026588,1.56659323693
52 | 50,1223.0,121.649650449,0.0324468041577,1.47230497009
53 | 51,1224.0,121.607142857,0.0421574252738,1.69385243688
54 | 52,1225.0,121.046903937,0.0334087497524,1.01762554872
55 | 53,1226.0,120.085714286,0.024660074231,2.37897140857
56 | 54,1227.0,120.164285714,0.027001843255,3.52655130651
57 | 55,1228.0,120.842857143,0.0274942998678,1.71349068648
58 | 56,1229.0,121.221428571,0.033499922594,3.46001039481
59 | 57,1230.0,121.3,0.0749502576262,1.58826736352
60 | 58,1231.0,121.978571429,0.0414203730729,3.18284941234
61 | 59,1232.0,122.057142857,0.0302338387473,1.52934318042
62 | 60,1233.0,121.935714286,0.0402605607888,2.45923263976
63 | 61,1234.0,121.875629841,0.0353454964634,0.768329517108
64 | 62,1235.0,121.914285714,0.030430432138,2.06211922783
65 | 63,1236.0,123.092857143,0.0245260812931,2.92247121875
66 | 64,1237.0,122.271428571,0.0213973119885,4.10417918702
67 | 65,1238.0,122.05,0.079419330328,2.23311552229
68 | 66,1239.0,122.697848371,0.0537829168974,0.77679075803
69 | 67,1240.0,123.328571429,0.0281465034668,2.10944961195
70 | 68,1241.0,123.007142857,0.0286412286156,3.65218137535
71 | 69,1242.0,122.885714286,0.0230107345192,2.35131132097
72 | 70,1243.0,122.764285714,0.0612299778504,2.84457635528
73 | 71,1244.0,121.981756844,0.0453512036072,0.827953621732
74 | 72,1245.0,121.742857143,0.0294724293641,2.26065495864
75 | 73,1246.0,124.021428571,0.0355991145394,3.11160802522
76 | 74,1247.0,125.3,0.0661510370566,2.42829797493
77 | 75,1248.0,127.167456595,0.0502425261536,0.461503533837
78 | 76,1249.0,128.978571429,0.0343340152507,20.6516486851
79 | 77,1250.0,130.157142857,0.0396662416634,2.52060541864
80 | 78,1251.0,131.435714286,0.0549668986908,4.34018430519
81 | 79,1252.0,131.514285714,0.0519015782081,1.53970507791
82 | 80,1253.0,131.534041793,0.0433299258108,0.483817590758
83 | 81,1254.0,131.992857143,0.0347582734134,1.73808212085
84 | 82,1255.0,133.881631459,0.0522070063345,1.21695221701
85 | 83,1256.0,133.6,0.0314128754295,2.36836792037
86 | 84,1257.0,133.978571429,0.070531562385,2.31525142222
87 | 85,1258.0,137.157142857,0.0552503963861,2.33723279671
88 | 86,1259.0,137.435714286,0.0425189784359,2.37817145029
89 | 87,1260.0,138.514285714,0.171938401222,2.34990355639
90 | 88,1261.0,138.692857143,0.103013677779,12.2986200069
91 | 89,1262.0,141.442443958,0.0515222615707,1.39372060174
92 | 90,1263.0,144.2,0.103864198121,7.35837550017
93 | 91,1264.0,147.378571429,0.0547794666341,2.3632700257
94 | 92,1265.0,148.557142857,0.0621157106326,1.80823792781
95 | 93,1266.0,149.757678265,0.163183728343,0.718643302428
96 | 94,1267.0,150.735714286,0.264251746053,5.53297439182
97 | 95,1268.0,153.378589434,0.196671637317,1.33030558825
98 | 96,1269.0,152.485714286,0.244631913448,1.62229877708
99 | 97,1270.0,153.764285714,0.051887078348,6.41190262908
100 | 98,1271.0,156.042857143,0.0414163771352,3.53414606706
101 | 99,1272.0,157.021428571,0.0335536597622,2.32591395626
102 | 100,1273.0,158.3,0.0339944976781,6.66941320736
103 | 101,1274.0,158.778571429,0.0319563998432,2.66171820414
104 | 102,1275.0,159.057142857,0.0276442324982,8.63165196951
105 | 103,1276.0,159.935714286,0.0461898328083,3.16697051767
106 | 104,1277.0,160.594706174,0.0315414202811,1.15072335996
107 | 105,1278.0,161.542857143,0.0269760525052,2.65604106239
108 | 106,1279.0,161.821428571,0.0286710384659,5.32689240849
109 | 107,1280.0,162.7,0.030989608062,6.04032483201
110 | 108,1281.0,163.978571429,0.0256418559307,2.42126055665
111 | 109,1282.0,163.957142857,0.034647879805,2.21111493457
112 | 110,1283.0,163.517539649,0.032968905236,1.49076451451
113 | 111,1284.0,164.364285714,0.0377877217907,3.29857841933
114 | 112,1285.0,164.993044107,0.0367389190526,0.877932197496
115 | 113,1286.0,165.368225759,0.0356901163145,1.08010923785
116 | 114,1287.0,166.142857143,0.0346413135764,1.88584477708
117 | 115,1288.0,167.321428571,,2.63341991097
118 | 116,1289.0,,,1.62068359011
119 | 117,1290.0,,,0.860216197842
120 |
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/19.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/19.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/19.picks.csv:
--------------------------------------------------------------------------------
1 | ,channel,pick 1618406930.25,uncertainty,snr 8.316258264081466
2 | 0,216.0,,,1.3093769064
3 | 1,217.0,,,1.21386424095
4 | 2,218.0,,,0.902038755664
5 | 3,219.0,,,1.41002444031
6 | 4,220.0,,,1.24415284928
7 | 5,221.0,,,0.991274173767
8 | 6,222.0,,,1.62708593569
9 | 7,223.0,124.534045353,0.0297900737342,1.28025746851
10 | 8,224.0,124.467616782,0.0264857750966,1.48321653663
11 | 9,225.0,123.054879819,0.023800020369,0.863845458528
12 | 10,226.0,120.957142857,0.0211142656414,2.82569149408
13 | 11,227.0,119.835714286,0.0213106887903,5.60744959508
14 | 12,228.0,119.614285714,0.0219314830579,13.6753233203
15 | 13,229.0,117.792857143,0.0215026511578,6.3747373285
16 | 14,230.0,116.371428571,0.0193867600021,39.9981458597
17 | 15,231.0,114.55,0.0183537724983,47.6494721756
18 | 16,232.0,113.528571429,0.0176418475244,23.7730332583
19 | 17,233.0,113.307142857,0.0191016219745,12.3170007272
20 | 18,234.0,111.585714286,0.0202528520075,10.5236454461
21 | 19,235.0,111.164285714,0.0196442390113,13.2533353655
22 | 20,236.0,109.142857143,0.0468261179055,10.5545649344
23 | 21,237.0,111.421428571,0.0459327532387,2.35707018043
24 | 22,238.0,114.7,0.0213424388645,30.16627737
25 | 23,239.0,113.978571429,0.0211542307968,8.71538909494
26 | 24,240.0,116.157142857,0.0182397825779,17.9769178679
27 | 25,241.0,117.735714286,0.0185435464161,18.9348967868
28 | 26,242.0,118.314285714,0.0191008180775,5.89796904047
29 | 27,243.0,118.992857143,0.0227073724657,5.53757040105
30 | 28,244.0,119.807774348,0.0246635155367,1.39727575135
31 | 29,245.0,121.3,0.0437127044001,4.44063176065
32 | 30,246.0,121.064762749,0.0386530140355,1.10690184613
33 | 31,247.0,121.066629095,0.0297123555481,1.06348092016
34 | 32,248.0,121.407142857,0.0207716970606,2.31837961398
35 | 33,249.0,122.985714286,0.0283573199669,4.55077436199
36 | 34,250.0,122.964647923,0.0394867874451,1.21368989186
37 | 35,251.0,124.768400456,,1.48844031913
38 | 36,252.0,,,1.65835429366
39 |
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/2.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/2.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/2.picks.csv:
--------------------------------------------------------------------------------
1 | ,channel,pick 1618398483.48,uncertainty,snr 2.5658437512597487
2 | 0,189.0,,,1.16484169202
3 | 1,190.0,,,1.69281105844
4 | 2,191.0,122.7,0.0246267331757,4.15177530083
5 | 3,192.0,121.478571429,0.0288184787594,2.14927515108
6 | 4,193.0,120.457142857,0.0245920935927,1.50164059744
7 | 5,194.0,120.135714286,0.0246651463187,2.45275911588
8 | 6,195.0,119.114285714,0.0231619778214,3.39946635362
9 | 7,196.0,118.092857143,0.0234879201087,7.0643891107
10 | 8,197.0,117.571428571,0.0232005298372,5.94789201417
11 | 9,198.0,117.35,0.0266510489858,4.70160146297
12 | 10,199.0,116.328571429,0.0713142725849,1.80446693958
13 | 11,200.0,116.120205499,0.0745867113748,0.553606787948
14 | 12,201.0,116.036809566,0.0778591501647,1.15355199942
15 | 13,202.0,114.435714286,0.0275582534162,2.89256153985
16 | 14,203.0,114.214285714,0.0506987983875,2.59503205663
17 | 15,204.0,113.535714286,0.0402027909008,1.93290339077
18 | 16,205.0,114.914285714,0.0258173615587,3.68645705867
19 | 17,206.0,114.192857143,0.0627186999799,7.32580331355
20 | 18,207.0,115.213900933,0.0692322977904,1.36849774115
21 | 19,208.0,115.4,0.0345866398038,1.7672365323
22 | 20,209.0,116.178571429,0.0288181449375,1.76126797378
23 | 21,210.0,117.510673368,0.0265859219851,0.940615628857
24 | 22,211.0,118.557142857,0.0243536990327,1.87350816708
25 | 23,212.0,117.835714286,0.0235201547029,4.01390881601
26 | 24,213.0,119.414285714,0.0260021613775,4.28836180935
27 | 25,214.0,119.792857143,0.0274693369608,1.8964565549
28 | 26,215.0,121.371428571,0.0249362450078,5.37710201732
29 | 27,216.0,122.25,0.029781330357,2.76778624164
30 | 28,217.0,122.028571429,0.0332821594629,1.54938848011
31 | 29,218.0,122.616952446,0.0299700481679,0.946593167135
32 | 30,219.0,123.707142857,0.0266579368729,2.05106584778
33 | 31,220.0,123.40873904,0.030075008301,1.30715472212
34 | 32,221.0,124.916488982,0.0270585751317,1.4287208396
35 | 33,222.0,125.642857143,0.0251226264706,2.34793445349
36 | 34,223.0,126.821428571,0.0306854328746,1.52357647559
37 | 35,224.0,126.97027067,,1.16804462778
38 | 36,225.0,,,1.84397319476
39 | 37,226.0,,,1.11003431351
40 |
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/21.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/21.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/21.picks.csv:
--------------------------------------------------------------------------------
1 | ,channel,pick 1618407268.26,uncertainty,snr 3.0809164129701005
2 | 0,195.0,,,1.5610169906
3 | 1,196.0,111.5,0.0242767492828,3.72586617278
4 | 2,197.0,111.578571429,0.0250786138201,2.59999148282
5 | 3,198.0,112.057142857,0.0254822691761,2.0231889533
6 | 4,199.0,113.435714286,0.0230570530379,5.83626633602
7 | 5,200.0,112.814285714,0.0207410871488,6.27764630523
8 | 6,201.0,113.792857143,0.0207064576209,2.79758406534
9 | 7,202.0,114.571428571,0.0991242051928,3.19721093546
10 | 8,203.0,114.15,0.0741152099928,1.5945797674
11 | 9,204.0,115.428571429,0.0235068131178,2.52313766223
12 | 10,205.0,117.307142857,0.0242644620097,6.9254995586
13 | 11,206.0,117.985714286,0.026678130002,2.84316864732
14 | 12,207.0,118.264285714,0.026371661316,1.71973594133
15 | 13,208.0,118.142857143,0.0247815898211,2.16837830906
16 | 14,209.0,119.621428571,0.0251628096976,5.24963187836
17 | 15,210.0,120.1,0.0245494677972,2.44717843386
18 | 16,211.0,121.778571429,0.0256172789755,2.62531087139
19 | 17,212.0,122.557142857,0.0243048623737,3.01778295827
20 | 18,213.0,123.235714286,0.0239386680568,4.17216660166
21 | 19,214.0,124.214285714,0.0273464682086,2.3861380835
22 | 20,215.0,124.792857143,0.0278113138147,2.17177924081
23 | 21,216.0,127.222718702,,1.2006848956
24 | 22,217.0,,,1.79713340737
25 |
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/25.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/25.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/25.picks.csv:
--------------------------------------------------------------------------------
1 | ,channel,pick 1618410408.05,uncertainty,snr 132.39877812421153
2 | 0,78.0,,,5.23233027951
3 | 1,79.0,137.9,0.0337933970314,1.71342812885
4 | 2,80.0,137.778571429,0.0398063167544,3.42941824335
5 | 3,81.0,135.852186201,0.0464058249162,1.43848475069
6 | 4,82.0,134.985714286,0.0710739564377,1.60626400823
7 | 5,83.0,134.065362338,0.11088073737,1.18228256217
8 | 6,84.0,131.335714286,0.032236228863,1.72808669606
9 | 7,85.0,130.564935876,0.0401822383987,1.18635629954
10 | 8,86.0,129.542857143,0.0313588057594,7.00604278056
11 | 9,87.0,128.221428571,0.0330001549941,3.26786895531
12 | 10,88.0,127.4,0.0379786942352,2.32294573621
13 | 11,89.0,125.078571429,0.0392935771601,1.57285458166
14 | 12,90.0,124.157142857,0.0381328174097,2.02847695533
15 | 13,91.0,122.735714286,0.0318913721678,7.94694771878
16 | 14,92.0,122.414285714,0.0490955094759,1.65394889525
17 | 15,93.0,120.892857143,0.0376822846422,2.69004641848
18 | 16,94.0,119.171428571,0.0352049577547,2.62725412817
19 | 17,95.0,118.85,0.0360423553053,4.40553069162
20 | 18,96.0,116.928571429,0.0407002636287,40.3535152093
21 | 19,97.0,116.607142857,0.0351234122909,7.68535623275
22 | 20,98.0,114.785714286,0.0316198037167,29.0135838103
23 | 21,99.0,113.364285714,0.0286135837983,138.375333202
24 | 22,100.0,111.942857143,0.0273827536199,117.696229534
25 | 23,101.0,110.521428571,0.0262939375848,506.272066345
26 | 24,102.0,108.1,0.025982617712,110.26682402
27 | 25,103.0,105.678571429,0.0247781461052,41.2043283341
28 | 26,104.0,104.957142857,0.0727534797888,152.914583497
29 | 27,105.0,104.735714286,0.0409603899352,66.9560139181
30 | 28,106.0,103.714285714,0.0284599198927,323.361085584
31 | 29,107.0,104.092857143,0.0778196575898,256.416976551
32 | 30,108.0,100.814285714,0.0388928628545,77.5025693972
33 | 31,109.0,99.7928571429,0.0531882500267,2226.92335867
34 | 32,110.0,98.7714285714,0.100424702546,486.887663682
35 | 33,111.0,95.4928571429,-0.913049285596,169.069517724
36 | 34,112.0,93.0714285714,0.0366519359847,2137.47672538
37 | 35,113.0,96.35,0.0281403355216,139.774184634
38 | 36,114.0,97.8285714286,0.0356320090867,156.405240829
39 | 37,115.0,97.8071428571,0.0751093804809,10.3410264027
40 | 38,116.0,97.3857142857,0.0213408790321,31.3302099588
41 | 39,117.0,97.8642857143,0.0240249281665,5.25429771882
42 | 40,118.0,99.1428571429,0.0229864174358,102.161206137
43 | 41,119.0,100.021428571,0.0249750079633,80.1574686928
44 | 42,120.0,101.1,0.028629745617,20.0157105255
45 | 43,121.0,102.378571429,0.0343086761208,3.46637691111
46 | 44,122.0,103.057142857,0.0345672945636,12.7890882454
47 | 45,123.0,103.835714286,0.0329137142389,15.3054794439
48 | 46,124.0,104.614285714,0.0241297467111,6.49754014308
49 | 47,125.0,105.492857143,0.0250596766871,5.51575401954
50 | 48,126.0,106.371428571,0.0313674957398,2.07388334822
51 | 49,127.0,107.008195724,0.0402098252928,1.30408720979
52 | 50,128.0,108.378571429,0.047215020243,1.59809067828
53 | 51,129.0,109.757142857,0.0266632224342,3.55045244936
54 | 52,130.0,109.835714286,,3.36873017012
55 | 53,131.0,,,1.76948549753
56 | 54,132.0,,,1.18492606753
57 | 55,133.0,,,0.242652731673
58 | 56,134.0,,,1.21016234032
59 |
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/5.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/5.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/54.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/54.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/6.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/6.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/9.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/9.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/Data processing.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "id": "ba5e104e",
7 | "metadata": {},
8 | "outputs": [
9 | {
10 | "name": "stderr",
11 | "output_type": "stream",
12 | "text": [
13 | "/opt/miniforge3/envs/hmclab-dev/lib/python3.8/site-packages/pkg_resources/__init__.py:123: PkgResourcesDeprecationWarning: 1.0-beta-1-7-g3f59f5a is an invalid version and will not be supported in a future release\n",
14 | " warnings.warn(\n"
15 | ]
16 | }
17 | ],
18 | "source": [
19 | "import numpy\n",
20 | "import pandas\n",
21 | "import glob\n",
22 | "import sys\n",
23 | "import os\n",
24 | "\n",
25 | "sys.path.append(os.path.dirname(os.path.realpath(\".\")))\n",
26 | "\n",
27 | "from helpers import *"
28 | ]
29 | },
30 | {
31 | "cell_type": "code",
32 | "execution_count": 2,
33 | "id": "b5285225",
34 | "metadata": {},
35 | "outputs": [],
36 | "source": [
37 | "df = pandas.read_csv(\"xyz_polar.csv\", delimiter=\",\")\n",
38 | "channel = df[\"channels\"].values"
39 | ]
40 | },
41 | {
42 | "cell_type": "code",
43 | "execution_count": 3,
44 | "id": "55026d94",
45 | "metadata": {},
46 | "outputs": [],
47 | "source": [
48 | "csv_files = glob.glob(\"*.picks.csv\")\n",
49 | "picks_collection = [\n",
50 | " pandas.read_csv(\n",
51 | " csv_file,\n",
52 | " index_col=0,\n",
53 | " names=[\"channel\", \"pick\", \"uncertainty\", \"snr\"],\n",
54 | " skiprows=1,\n",
55 | " )\n",
56 | " for csv_file in csv_files\n",
57 | "]\n",
58 | "\n",
59 | "numpy.save(\"csv_files.npy\", csv_files)"
60 | ]
61 | },
62 | {
63 | "cell_type": "markdown",
64 | "id": "7e07cf2b",
65 | "metadata": {},
66 | "source": [
67 | "Only keep data points that contain values for all columns. i.e. no missing uncertainty or SNR"
68 | ]
69 | },
70 | {
71 | "cell_type": "code",
72 | "execution_count": 4,
73 | "id": "3bc7d876",
74 | "metadata": {},
75 | "outputs": [],
76 | "source": [
77 | "picks_without_nans_collection = [\n",
78 | " pick[~rows_contain_nans(pick)] for pick in picks_collection\n",
79 | "]"
80 | ]
81 | },
82 | {
83 | "cell_type": "code",
84 | "execution_count": 5,
85 | "id": "1fb9d710",
86 | "metadata": {},
87 | "outputs": [],
88 | "source": [
89 | "n_events = len(picks_without_nans_collection)\n",
90 | "observable_arrivals_per_event = len(channel)\n",
91 | "\n",
92 | "data_array = numpy.empty((observable_arrivals_per_event, n_events))\n",
93 | "data_array[:] = numpy.nan\n",
94 | "\n",
95 | "uncertainty_array = numpy.empty((observable_arrivals_per_event, n_events))\n",
96 | "uncertainty_array[:] = numpy.nan\n",
97 | "\n",
98 | "snr_array = numpy.empty((observable_arrivals_per_event, n_events))\n",
99 | "snr_array[:] = numpy.nan\n",
100 | "\n",
101 | "for i_pick, pick in enumerate(picks_without_nans_collection):\n",
102 | " data_array[match_arrays(channel, pick.channel.to_list()), i_pick] = (\n",
103 | " pick.pick - pick.pick.min()\n",
104 | " ) / 200\n",
105 | "\n",
106 | " uncertainty_array[\n",
107 | " match_arrays(channel, pick.channel.to_list()), i_pick\n",
108 | " ] = pick.uncertainty\n",
109 | "\n",
110 | " snr_array[match_arrays(channel, pick.channel.to_list()), i_pick] = pick.snr"
111 | ]
112 | },
113 | {
114 | "cell_type": "code",
115 | "execution_count": 6,
116 | "id": "5a0a00c7",
117 | "metadata": {},
118 | "outputs": [],
119 | "source": [
120 | "numpy.save(\"data_array.npy\", data_array)\n",
121 | "numpy.save(\"uncertainty_array.npy\", uncertainty_array)\n",
122 | "numpy.save(\"snr_array.npy\", snr_array)"
123 | ]
124 | }
125 | ],
126 | "metadata": {
127 | "kernelspec": {
128 | "display_name": "Python 3 (ipykernel)",
129 | "language": "python",
130 | "name": "python3"
131 | },
132 | "language_info": {
133 | "codemirror_mode": {
134 | "name": "ipython",
135 | "version": 3
136 | },
137 | "file_extension": ".py",
138 | "mimetype": "text/x-python",
139 | "name": "python",
140 | "nbconvert_exporter": "python",
141 | "pygments_lexer": "ipython3",
142 | "version": "3.8.13"
143 | }
144 | },
145 | "nbformat": 4,
146 | "nbformat_minor": 5
147 | }
148 |
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/aerial-snowcat-truck-people-grimsvotn.jpg:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/aerial-snowcat-truck-people-grimsvotn.jpg
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/data/allgps_channels.xlsx:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/data/allgps_channels.xlsx
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/helpers.py:
--------------------------------------------------------------------------------
1 | import numpy
2 | import tilemapbase
3 | import obspy.signal
4 | from matplotlib.patches import Ellipse
5 | import matplotlib.transforms as transforms
6 |
7 |
8 |
9 | def tilemapbase_project_array(x, y, tilemapbase_project):
10 | # For some stupid reason the package does not work on arrays.
11 | assert numpy.array(x).shape == numpy.array(y).shape
12 | x_p, y_p = numpy.empty_like(x), numpy.empty_like(y)
13 |
14 | for _i, (_x, _y) in enumerate(zip(x, y)):
15 | _x_p, _y_p = tilemapbase_project(_x, _y)
16 |
17 | x_p[_i] = _x_p
18 | y_p[_i] = _y_p
19 |
20 | return x_p, y_p
21 |
22 |
23 | def tilemapbase_create_extent(midpoint, degree_range):
24 | extent = tilemapbase.Extent.from_lonlat(
25 | midpoint[0] - degree_range * 2,
26 | midpoint[0] + degree_range * 2,
27 | midpoint[1] - degree_range,
28 | midpoint[1] + degree_range,
29 | )
30 | extent = extent.to_aspect(1.0)
31 | return extent
32 |
33 |
34 | def rows_contain_nans(df):
35 | return (
36 | numpy.sum(numpy.vstack([(numpy.isnan(df[r].values)) for r in df]).T, axis=1) > 0
37 | )
38 |
39 |
40 | def confidence_ellipse(x, y, ax, n_std=3.0, facecolor="none", **kwargs):
41 | """
42 | See also: https://matplotlib.org/devdocs/gallery/statistics/confidence_ellipse.html
43 |
44 |
45 | Create a plot of the covariance confidence ellipse of *x* and *y*.
46 |
47 | Parameters
48 | ----------
49 | x, y : array-like, shape (n, )
50 | Inumpyut data.
51 |
52 | ax : matplotlib.axes.Axes
53 | The axes object to draw the ellipse into.
54 |
55 | n_std : float
56 | The number of standard deviations to determine the ellipse's radiuses.
57 |
58 | **kwargs
59 | Forwarded to `~matplotlib.patches.Ellipse`
60 |
61 | Returns
62 | -------
63 | matplotlib.patches.Ellipse
64 | """
65 | if x.size != y.size:
66 | raise ValueError("x and y must be the same size")
67 |
68 | cov = numpy.cov(x, y)
69 | pearson = cov[0, 1] / numpy.sqrt(cov[0, 0] * cov[1, 1])
70 | # Using a special case to obtain the eigenvalues of this
71 | # two-dimensionl dataset.
72 | ell_radius_x = numpy.sqrt(1 + pearson)
73 | ell_radius_y = numpy.sqrt(1 - pearson)
74 | ellipse = Ellipse(
75 | (0, 0),
76 | width=ell_radius_x * 2,
77 | height=ell_radius_y * 2,
78 | facecolor=facecolor,
79 | **kwargs
80 | )
81 |
82 | # Calculating the stdandard deviation of x from
83 | # the squareroot of the variance and multiplying
84 | # with the given number of standard deviations.
85 | scale_x = numpy.sqrt(cov[0, 0]) * n_std
86 | mean_x = numpy.mean(x)
87 |
88 | # calculating the stdandard deviation of y ...
89 | scale_y = numpy.sqrt(cov[1, 1]) * n_std
90 | mean_y = numpy.mean(y)
91 |
92 | transf = (
93 | transforms.Affine2D()
94 | .rotate_deg(45)
95 | .scale(scale_x, scale_y)
96 | .translate(mean_x, mean_y)
97 | )
98 |
99 | ellipse.set_transform(transf + ax.transData)
100 | return ax.add_patch(ellipse)
101 |
102 |
103 | def to_xyz(lon, lat, origin):
104 | _x, _y = numpy.hsplit(
105 | numpy.vstack(
106 | [obspy.signal.util.util_geo_km(*origin, *point) for point in zip(lon, lat)]
107 | ),
108 | 2,
109 | )
110 |
111 | _x.shape = lon.shape
112 | _y.shape = lon.shape
113 |
114 | return _x, _y
115 |
116 |
117 | def to_lonlat(x, y, origin):
118 | _lon, _lat = numpy.hsplit(
119 | numpy.vstack(
120 | [obspy.signal.util.util_lon_lat(*origin, *point) for point in zip(x, y)]
121 | ),
122 | 2,
123 | )
124 |
125 | _lon.shape = x.shape
126 | _lat.shape = y.shape
127 |
128 | return _lon, _lat
129 |
130 |
131 | def match_arrays(arr1, arr2):
132 | return numpy.argmax(arr1[:, None] == arr2, axis=0)
133 |
--------------------------------------------------------------------------------
/notebooks/examples/locatingquakes/samples/.gitignore:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/examples/locatingquakes/samples/.gitignore
--------------------------------------------------------------------------------
/notebooks/examples/settings.ini:
--------------------------------------------------------------------------------
1 | [domain]
2 | nt = 1400
3 | nx_inner = 20
4 | nz_inner = 20
5 | nx_inner_boundary = 0
6 | nz_inner_boundary = 0
7 | dx = 1.249
8 | dz = 1.249
9 | dt = 0.0002
10 |
11 | [boundary]
12 | np_boundary = 10
13 | np_factor = 0.015
14 |
15 | [medium]
16 | scalar_rho = 1500.0
17 | scalar_vp = 2000.0
18 | scalar_vs = 800.0
19 |
20 | [sources]
21 | peak_frequency = 50.0
22 | n_sources = 2
23 | n_shots = 1
24 | source_timeshift = 0.005
25 | delay_cycles_per_shot = 6
26 | moment_angles = {90, 180}
27 | ix_sources = {2, 18}
28 | iz_sources = {2, 2}
29 | which_source_to_fire_in_which_shot = {{0, 1}}
30 |
31 | [receivers]
32 | nr = 4
33 | ix_receivers = {4, 8, 12, 16}
34 | iz_receivers = {18, 18, 18, 18}
35 |
36 | [inversion]
37 | snapshot_interval = 10
38 |
39 | [basis]
40 | npx = 1
41 | npz = 1
42 |
43 | [output]
44 | observed_data_folder = .
45 | stf_folder = .
46 |
47 |
--------------------------------------------------------------------------------
/notebooks/tutorials/__init__.py:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/tutorials/__init__.py
--------------------------------------------------------------------------------
/notebooks/tutorials/bin_samples/.gitignore:
--------------------------------------------------------------------------------
1 | # Ignore everything in this directory
2 | *
3 | # Except this file
4 | !.gitignore
--------------------------------------------------------------------------------
/notebooks/tutorials/reproducibility:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/larsgeb/hmclab/a3328daa6ffbaf663c23958cb187f957abb7b62d/notebooks/tutorials/reproducibility
--------------------------------------------------------------------------------
/pyproject.toml:
--------------------------------------------------------------------------------
1 | [build-system]
2 | requires = [
3 | "setuptools>=42",
4 | "wheel",
5 | "versioneer-518"
6 | ]
7 | build-backend = "setuptools.build_meta"
--------------------------------------------------------------------------------
/requirements-dev.txt:
--------------------------------------------------------------------------------
1 | .
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | .
--------------------------------------------------------------------------------
/setup.cfg:
--------------------------------------------------------------------------------
1 |
2 | # See the docstring in versioneer.py for instructions. Note that you must
3 | # re-run 'versioneer.py setup' after changing this section, and commit the
4 | # resulting files.
5 |
6 | [versioneer]
7 | VCS = git
8 | style = git-describe
9 | versionfile_source = hmclab/_version.py
10 | versionfile_build =
11 | tag_prefix =
12 | parentdir_prefix =
13 |
14 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | import setuptools
2 | import versioneer
3 |
4 | with open("README.md", "r") as fh:
5 | long_description = fh.read()
6 |
7 | setuptools.setup(
8 | version=versioneer.get_version(),
9 | cmdclass=versioneer.get_cmdclass(),
10 | name="hmclab",
11 | author="Lars Gebraad, Andrea Zunino, Andreas Fichtner",
12 | author_email="lars.gebraad@erdw.ethz.ch",
13 | description="A numerical laboratory for Bayesian Seismology",
14 | long_description=long_description,
15 | long_description_content_type="text/markdown",
16 | url="https://github.com/larsgeb/hmclab",
17 | project_urls={
18 | "Bug Tracker": "https://github.com/larsgeb/hmclab/issues",
19 | },
20 | packages=setuptools.find_packages(),
21 | classifiers=[
22 | "Development Status :: 5 - Production/Stable",
23 | "Programming Language :: Python :: 3.9",
24 | "License :: OSI Approved :: BSD License",
25 | "Operating System :: OS Independent",
26 | ],
27 | python_requires=">=3.9",
28 | install_requires=[
29 | "dill==0.3.5.1",
30 | "numpy",
31 | "scipy",
32 | "termcolor",
33 | "matplotlib",
34 | "tqdm",
35 | "h5py",
36 | "pyyaml",
37 | "ipywidgets",
38 | "multiprocess",
39 | "tilemapbase",
40 | "pandas",
41 | "obspy",
42 | ],
43 | extras_require={
44 | "testing": [
45 | "pytest",
46 | "pytest-notebook",
47 | "pytest-harvest",
48 | "pytest-cov",
49 | "pytest-ordering",
50 | "nbformat",
51 | "black",
52 | ],
53 | "dev": [
54 | "pytest",
55 | "pytest-notebook",
56 | "pytest-harvest",
57 | "pytest-cov",
58 | "pytest-ordering",
59 | "nbformat",
60 | "black",
61 | "autoclasstoc",
62 | "codecov",
63 | "flake8",
64 | "furo",
65 | "ipywidgets",
66 | "nbconvert",
67 | "nbsphinx",
68 | "numpydoc",
69 | "pandoc",
70 | "pre-commit",
71 | "sphinx",
72 | "sphinx-git",
73 | "sphinxcontrib-bibtex",
74 | "versioneer",
75 | ],
76 | },
77 | )
78 |
--------------------------------------------------------------------------------
/tests/configurations/default_testing_configuration.ini:
--------------------------------------------------------------------------------
1 | [domain]
2 | nt = 3000; int
3 | nx_inner = 40; int
4 | nz_inner = 60; int
5 | nx_inner_boundary = 0; int, defines inner limits in which to compute kernels. Limits wavefield storage and computation burden.
6 | nz_inner_boundary = 10; int, defines inner limits in which to compute kernels. Limits wavefield storage and computation burden.
7 | dx = 1.249; float
8 | dz = 1.249; float
9 | dt = 0.00025; float
10 |
11 | [boundary]
12 | np_boundary = 25; int
13 | np_factor = 0.015; float
14 |
15 | [medium]
16 | scalar_rho = 1500.0; float
17 | scalar_vp = 2000.0; float
18 | scalar_vs = 800.0; float
19 |
20 | [sources]
21 | peak_frequency = 50.0; float
22 | n_sources = 4; int
23 | n_shots = 1; int
24 | source_timeshift = 0.005;
25 | delay_cycles_per_shot = 9 // over f
26 | moment_angles = {47, 134, 165, 73} ;
27 | ix_sources = {5, 15, 25, 35};
28 | iz_sources = {5, 5, 5, 5};
29 | which_source_to_fire_in_which_shot = {{0, 1, 2, 3}};
30 |
31 | [receivers]
32 | nr = 9; !!
33 | ix_receivers = {4, 8, 12, 16, 20, 24, 28, 32, 36}; !!
34 | iz_receivers = {55, 55, 55, 55, 55, 55, 55, 55, 55}
35 |
36 | [inversion]
37 | snapshot_interval = 10; int, snapshots of forward wavefield to store.
38 |
39 | [basis]
40 | npx = 1
41 | npz = 1
42 |
43 | [output]
44 | observed_data_folder = ./tests
45 | stf_folder = ./tests
--------------------------------------------------------------------------------
/tests/conftest.py:
--------------------------------------------------------------------------------
1 | # content of conftest.py
2 | import pytest
3 |
4 |
5 | def pytest_addoption(parser):
6 | parser.addoption(
7 | "--plot", action="store_true", default=False, help="run plot tests"
8 | )
9 |
10 |
11 | def pytest_configure(config):
12 | config.addinivalue_line("markers", "plot: mark test as plot to run")
13 |
14 |
15 | def pytest_collection_modifyitems(config, items):
16 | if config.getoption("--plot"):
17 | # --plot given in cli: do not skip plot tests
18 | return
19 | skip_plot = pytest.mark.skip(reason="need --plot option to run")
20 | for item in items:
21 | if "plot" in item.keywords:
22 | item.add_marker(skip_plot)
23 |
--------------------------------------------------------------------------------
/tests/test_break.py:
--------------------------------------------------------------------------------
1 | import hmclab as _hmclab
2 | import numpy as _numpy
3 | import os as _os
4 | import pytest as _pytest
5 | import time as _time
6 | import threading
7 | import _thread
8 | import uuid as _uuid
9 |
10 |
11 | def interruptor():
12 | # Simulate a CTRL+C event
13 | _time.sleep(0.3)
14 | _thread.interrupt_main()
15 |
16 |
17 | # Build a slow version just so that we don't generate crazy amounts of samples
18 | class SlowStandardNormal(_hmclab.Distributions.StandardNormal1D):
19 | def misfit(self, m):
20 | _time.sleep(0.01)
21 | return super().misfit(m)
22 |
23 | def gradient(self, m):
24 | _time.sleep(0.01)
25 | return super().gradient(m)
26 |
27 |
28 | @_pytest.mark.parametrize("execution_number", range(10))
29 | def test_break(execution_number):
30 |
31 | # Create some arbitrary posterior
32 | prior = _hmclab.Distributions.Uniform([-1], [1])
33 | posterior = _hmclab.Distributions.BayesRule([prior, SlowStandardNormal()])
34 |
35 | unique_name = _uuid.uuid4().hex.upper()
36 | filename = f"temporary_file_{unique_name}.h5"
37 | if _os.path.exists(filename):
38 | _os.remove(filename) # pragma: no cover
39 |
40 | sampler = _hmclab.Samplers.HMC()
41 |
42 | # Start an interrupt timer
43 | x = threading.Thread(target=interruptor)
44 | x.start()
45 |
46 | # Start sampling, which should take longer than the timer (1000x2x0.0005x2)
47 | sampler.sample(
48 | filename,
49 | posterior,
50 | proposals=10000,
51 | amount_of_steps=2,
52 | stepsize=0.03,
53 | disable_progressbar=True,
54 | )
55 |
56 | if not _os.path.exists(filename):
57 | _pytest.fail("Samples file wasn't created")
58 |
59 | # Assert that the last sample was written out correctly
60 | with _hmclab.Samples(filename=filename) as samples:
61 | assert not _numpy.all(samples[:, -1] == 0.0)
62 |
63 | _os.remove(filename)
64 |
--------------------------------------------------------------------------------
/tests/test_copying.py:
--------------------------------------------------------------------------------
1 | """A collection of integrated tests.
2 | """
3 | from hmclab import Distributions
4 | import os as _os
5 | import copy as _copy
6 | import h5py as _h5py
7 | import uuid as _uuid
8 | import time as _time
9 |
10 |
11 | import numpy as _numpy
12 | import pytest as _pytest
13 |
14 | import hmclab as _hmclab
15 |
16 | from hmclab.Helpers.CustomExceptions import InvalidCaseError as _InvalidCaseError
17 |
18 | _ad = _hmclab.Distributions._AbstractDistribution
19 | _as = _hmclab.Samplers._AbstractSampler
20 |
21 | dimensions = [1, 2, 100]
22 | distribution_classes = [Distributions.Normal]
23 | sampler_classes = _as.__subclasses__()
24 | sampler_classes.remove(_hmclab.Samplers._AbstractVisualSampler)
25 | proposals = [10, 1000]
26 | autotuning = [True, False]
27 |
28 |
29 | @_pytest.mark.parametrize("sampler_class", sampler_classes)
30 | def test_basic_copying(
31 | sampler_class: _as,
32 | ):
33 |
34 | sampler_instance = sampler_class()
35 |
36 | assert isinstance(sampler_instance, _as)
37 |
38 | sampler_instance_copy = _copy.deepcopy(sampler_instance)
39 |
40 | assert isinstance(sampler_instance_copy, _as)
41 |
42 | assert sampler_instance is not sampler_instance_copy
43 |
44 | assert type(sampler_instance) == type(sampler_instance_copy)
45 |
46 |
47 | @_pytest.mark.parametrize("sampler_class", sampler_classes)
48 | @_pytest.mark.parametrize("distribution_class", distribution_classes)
49 | @_pytest.mark.parametrize("dimensions", dimensions)
50 | @_pytest.mark.parametrize("proposals", proposals)
51 | @_pytest.mark.parametrize("autotuning", autotuning)
52 | def test_samples_file(
53 | sampler_class: _as,
54 | distribution_class: _ad,
55 | dimensions: int,
56 | proposals: int,
57 | autotuning: bool,
58 | ):
59 |
60 | try:
61 | distribution: _ad = distribution_class.create_default(dimensions)
62 | except _InvalidCaseError:
63 | return _pytest.skip("Invalid case")
64 |
65 | sampler_instance = sampler_class()
66 |
67 | unique_name = _uuid.uuid4().hex.upper()
68 | filename = f"temporary_file_{unique_name}.npy"
69 |
70 | # Remove file before attempting to sample
71 | if _os.path.exists(filename):
72 | _os.remove(filename) # pragma: no cover
73 |
74 | sampler_instance.sample(
75 | filename,
76 | distribution,
77 | proposals=proposals,
78 | max_time=0.1,
79 | autotuning=autotuning,
80 | disable_progressbar=True,
81 | )
82 |
83 | # Check if the file was created. If it wasn't, fail
84 | if not _os.path.exists(filename):
85 | _pytest.fail("Samples file wasn't created")
86 |
87 | samples_written_expected = int(
88 | _numpy.floor(
89 | sampler_instance.current_proposal / sampler_instance.online_thinning
90 | )
91 | + 1
92 | )
93 |
94 | with _hmclab.Samples(filename) as samples:
95 | # Assert that the HDF array has the right dimensions
96 | assert samples.numpy.shape == (
97 | distribution.dimensions + 1,
98 | samples_written_expected,
99 | )
100 |
101 | # Assert that the actual written samples have the right dimensions
102 | assert samples[:, :].shape == (
103 | distribution.dimensions + 1,
104 | samples_written_expected,
105 | )
106 |
107 | # sampler_instance_copy = _copy.deepcopy(sampler_instance)
108 |
109 | # assert isinstance(sampler_instance_copy, _as)
110 |
111 | # assert sampler_instance is not sampler_instance_copy
112 |
113 | # assert type(sampler_instance) == type(sampler_instance_copy)
114 |
115 | _os.remove(f"{filename}.pkl")
116 | _os.remove(filename)
117 |
--------------------------------------------------------------------------------
/tests/test_diagnostic_mode.py:
--------------------------------------------------------------------------------
1 | """A collection of integrated tests.
2 | """
3 | from hmclab.Distributions import Normal
4 | import os as _os
5 | import hmclab as _hmclab
6 | import uuid as _uuid
7 |
8 |
9 | _ad = _hmclab.Distributions._AbstractDistribution
10 | _as = _hmclab.Samplers._AbstractSampler
11 |
12 |
13 | def test_basic_sampling():
14 |
15 | distribution = Normal.create_default(dimensions=10)
16 |
17 | sampler_instance = _hmclab.Samplers.HMC()
18 |
19 | unique_name = _uuid.uuid4().hex.upper()
20 | filename = f"temporary_file_{unique_name}.h5"
21 |
22 | # Remove file before attempting to sample
23 | if _os.path.exists(filename):
24 | _os.remove(filename) # pragma: no cover
25 |
26 | sampler_instance.sample(
27 | filename,
28 | distribution,
29 | diagnostic_mode=True,
30 | proposals=10000,
31 | online_thinning=1,
32 | max_time=0.1,
33 | autotuning=True,
34 | disable_progressbar=True,
35 | )
36 |
37 | print()
38 |
39 | timers = sampler_instance.get_diagnostics()
40 | for timer in timers:
41 | print(timer)
42 |
43 | # Remove the file
44 | _os.remove(filename)
45 |
--------------------------------------------------------------------------------
/tests/test_distributions.py:
--------------------------------------------------------------------------------
1 | """A collection of tests for likelihood functions.
2 | """
3 | import matplotlib.pyplot as _plt
4 | import numpy as _numpy
5 | import pytest as _pytest
6 |
7 | from hmclab import Distributions as _Distributions
8 | from hmclab.Helpers.CustomExceptions import InvalidCaseError as _InvalidCaseError
9 |
10 | dimensions = [1, 2, 5, 50]
11 | subclasses = _Distributions._AbstractDistribution.__subclasses__()
12 | deltas = [1e-10, 1e-2, -1e-10, -1e-2]
13 |
14 |
15 | @_pytest.mark.parametrize("pclass", subclasses)
16 | @_pytest.mark.parametrize("dimensions", dimensions)
17 | def test_creation(pclass: _Distributions._AbstractDistribution, dimensions: int):
18 | # Create the object
19 | try:
20 | distribution: _Distributions._AbstractDistribution = pclass.create_default(
21 | dimensions
22 | )
23 | except _InvalidCaseError:
24 | return _pytest.skip("Invalid case")
25 |
26 | # Check if a subtype of mass matrices
27 | assert issubclass(type(distribution), _Distributions._AbstractDistribution)
28 |
29 | # Check if the right amount of dimensions
30 | assert distribution.dimensions == dimensions
31 |
32 |
33 | @_pytest.mark.parametrize("pclass", subclasses)
34 | @_pytest.mark.parametrize("dimensions", dimensions)
35 | def test_generation(pclass: _Distributions._AbstractDistribution, dimensions: int):
36 | try:
37 | distribution: _Distributions._AbstractDistribution = pclass.create_default(
38 | dimensions
39 | )
40 | except _InvalidCaseError:
41 | return _pytest.skip("Invalid case")
42 |
43 | try:
44 | samples = distribution.generate(100)
45 | except NotImplementedError:
46 | return _pytest.skip("Note implemented case")
47 |
48 | assert samples.shape == (distribution.dimensions, 100)
49 |
50 |
51 | @_pytest.mark.parametrize("pclass", subclasses)
52 | @_pytest.mark.parametrize("dimensions", dimensions)
53 | def test_normalization(pclass: _Distributions._AbstractDistribution, dimensions: int):
54 | try:
55 | distribution: _Distributions._AbstractDistribution = pclass.create_default(
56 | dimensions
57 | )
58 | except _InvalidCaseError:
59 | return _pytest.skip("Invalid case")
60 |
61 | try:
62 | distribution.normalize()
63 | except AttributeError:
64 | return _pytest.skip("Not normalizable, skipping")
65 |
66 |
67 | @_pytest.mark.parametrize("pclass", subclasses)
68 | @_pytest.mark.parametrize("dimensions", dimensions)
69 | def test_misfit(pclass: _Distributions._AbstractDistribution, dimensions: int):
70 | try:
71 | distribution: _Distributions._AbstractDistribution = pclass.create_default(
72 | dimensions
73 | )
74 | except _InvalidCaseError:
75 | return _pytest.skip("Invalid case")
76 |
77 | location = _numpy.ones((dimensions, 1)) + _numpy.random.rand(1)
78 | misfit = distribution.misfit(location)
79 |
80 | assert (
81 | type(misfit) == float
82 | or type(misfit) == _numpy.float64
83 | or type(misfit) == _numpy.float32
84 | )
85 |
86 | return
87 |
88 |
89 | @_pytest.mark.parametrize("pclass", subclasses)
90 | @_pytest.mark.parametrize("dimensions", dimensions)
91 | def test_misfit_bounds(pclass: _Distributions._AbstractDistribution, dimensions: int):
92 | try:
93 | distribution: _Distributions._AbstractDistribution = pclass.create_default(
94 | dimensions
95 | )
96 | except _InvalidCaseError:
97 | return _pytest.skip("Invalid case")
98 |
99 | lower_bounds = _numpy.ones((dimensions, 1))
100 | distribution.update_bounds(lower=lower_bounds)
101 |
102 | # Compute misfit above lower bounds
103 |
104 | location = _numpy.ones((dimensions, 1)) + _numpy.random.rand(1) + 0.1
105 | misfit = distribution.misfit(location)
106 |
107 | assert (
108 | type(misfit) == float
109 | or type(misfit) == _numpy.float64
110 | or type(misfit) == _numpy.float32
111 | )
112 |
113 | # Compute misfit below lower bounds
114 |
115 | location = _numpy.ones((dimensions, 1)) - _numpy.random.rand(1) - 0.1
116 | misfit = distribution.misfit(location)
117 |
118 | assert misfit == _numpy.inf
119 |
120 | # Create upper bounds
121 |
122 | upper_bounds = 3 * _numpy.ones((dimensions, 1))
123 | distribution.update_bounds(upper=upper_bounds)
124 |
125 | # Compute misfit between the two limits
126 |
127 | location = _numpy.ones((dimensions, 1)) + _numpy.random.rand(1) + 0.1
128 | misfit = distribution.misfit(location)
129 |
130 | assert (
131 | type(misfit) == float
132 | or type(misfit) == _numpy.float64
133 | or type(misfit) == _numpy.float32
134 | )
135 |
136 | # Compute misfit above the upper limit
137 |
138 | location = 3 * _numpy.ones((dimensions, 1)) + _numpy.random.rand(1) + 0.1
139 | misfit = distribution.misfit(location)
140 |
141 | assert misfit == _numpy.inf, " ds"
142 | return
143 |
144 |
145 | @_pytest.mark.parametrize("pclass", subclasses)
146 | @_pytest.mark.parametrize("dimensions", dimensions)
147 | def test_misfit_bounds_impossible(
148 | pclass: _Distributions._AbstractDistribution, dimensions: int
149 | ):
150 | try:
151 | distribution: _Distributions._AbstractDistribution = pclass.create_default(
152 | dimensions
153 | )
154 | except _InvalidCaseError:
155 | return _pytest.skip("Invalid case")
156 |
157 | lower_bounds = _numpy.ones((dimensions, 1))
158 | upper_bounds = 3 * _numpy.ones((dimensions, 1))
159 |
160 | # Try to switch the bounds s.t. lower > upper
161 | try:
162 | distribution.update_bounds(lower=upper_bounds, upper=lower_bounds)
163 | except ValueError as e:
164 | # Assert that the exception is raised by the bounds, else re-raise
165 | if e.args[0] != "Bounds vectors are incompatible.":
166 | raise e
167 |
168 |
169 | @_pytest.mark.parametrize("pclass", subclasses)
170 | @_pytest.mark.parametrize("dimensions", dimensions)
171 | @_pytest.mark.parametrize("delta", deltas)
172 | def test_gradient(
173 | pclass: _Distributions._AbstractDistribution,
174 | dimensions: int,
175 | delta: float,
176 | results_bag,
177 | ):
178 | results_bag.test_type = "gradient"
179 | results_bag.class_name = pclass.__name__
180 |
181 | try:
182 | distribution: _Distributions._AbstractDistribution = pclass.create_default(
183 | dimensions
184 | )
185 | except _InvalidCaseError:
186 | _pytest.skip("Invalid case") # pragma: no cover
187 |
188 | location = _numpy.ones((dimensions, 1)) + _numpy.random.rand(1)
189 | gradient = distribution.gradient(location)
190 |
191 | assert gradient.dtype == _numpy.dtype("float32") or gradient.dtype == _numpy.dtype(
192 | "float64"
193 | )
194 | assert gradient.shape == location.shape
195 |
196 | # Gradient test
197 | dot_product = (gradient.T @ location).item(0)
198 | misfit_1 = distribution.misfit(location)
199 | misfit_2 = distribution.misfit(location + delta * location)
200 | if (misfit_2 - misfit_1) != 0:
201 | relative_error = (misfit_2 - misfit_1 - dot_product * delta) / (
202 | misfit_2 - misfit_1
203 | )
204 | try:
205 | assert abs(relative_error) < 1e-2
206 | except AssertionError:
207 | _pytest.xfail("Error bigger than 10% in gradient test, not failing pytest.")
208 |
209 | results_bag.relative_error = relative_error
210 | else:
211 | assert _numpy.allclose(gradient, 0.0)
212 |
213 | results_bag.relative_error = 0
214 |
215 | return
216 |
--------------------------------------------------------------------------------
/tests/test_elasticFWI.py:
--------------------------------------------------------------------------------
1 | import os as _os
2 |
3 | import numpy as _numpy
4 | import pytest as _pytest
5 | import uuid as _uuid
6 |
7 |
8 | import hmclab as _hmclab
9 | from hmclab.Helpers.CustomExceptions import InvalidCaseError
10 | import sys
11 |
12 | installed = "psvWave" in sys.modules
13 |
14 |
15 | @_pytest.mark.skipif(
16 | not installed, reason="Skipping test for which required packages are not installed."
17 | )
18 | def test_elasticFWI_creation():
19 | likelihood = _hmclab.Distributions.ElasticFullWaveform2D.create_default(
20 | 4800,
21 | "tests/configurations/default_testing_configuration.ini",
22 | )
23 |
24 | # This should fail with a InvalidCaseError
25 | try:
26 | likelihood.generate()
27 | except InvalidCaseError:
28 | pass
29 |
30 | _hmclab.Distributions.ElasticFullWaveform2D(likelihood, temperature=2)
31 |
32 | # This should fail with a ValueError
33 | try:
34 | _hmclab.Distributions.ElasticFullWaveform2D(42)
35 | except ValueError as e:
36 | print(e)
37 |
38 | # This should fail with a ValueError
39 | try:
40 | _hmclab.Distributions.ElasticFullWaveform2D(
41 | "tests/configurations/default_testing_configuration.ini",
42 | )
43 | except ValueError as e:
44 | print(e)
45 |
46 | ux, uz = likelihood.fdModel.get_observed_data()
47 | _hmclab.Distributions.ElasticFullWaveform2D(
48 | "tests/configurations/default_testing_configuration.ini",
49 | ux_obs=ux,
50 | uz_obs=uz,
51 | )
52 |
53 |
54 | @_pytest.mark.skipif(
55 | not installed, reason="Skipping test for which required packages are not installed."
56 | )
57 | def test_elasticFWI_gradient():
58 | likelihood = _hmclab.Distributions.ElasticFullWaveform2D.create_default(
59 | 4800,
60 | "tests/configurations/default_testing_configuration.ini",
61 | )
62 |
63 | likelihood.temperature = 1.0
64 |
65 | print(f"Free parameters: {likelihood.dimensions}")
66 |
67 | starting_model = likelihood.get_model_vector()
68 |
69 | X1 = likelihood.misfit(starting_model)
70 | print(f"Misfit 1: {X1:.2f}")
71 | g = likelihood.gradient(starting_model)
72 | X2 = likelihood.misfit(starting_model - 0.1 * g)
73 | print(f"Misfit 2: {X2:.2f}")
74 |
75 | # This is to trigger the 'if self.forward_up_to_date' up to date line.
76 | X2 = likelihood.misfit(starting_model - 0.1 * g)
77 |
78 |
79 | @_pytest.mark.skipif(
80 | not installed, reason="Skipping test for which required packages are not installed."
81 | )
82 | def test_elasticFWI_sampling():
83 | likelihood = _hmclab.Distributions.ElasticFullWaveform2D.create_default(
84 | 4800,
85 | "tests/configurations/default_testing_configuration.ini",
86 | )
87 | likelihood.temperature = 100.0
88 |
89 | template = _numpy.ones((int(likelihood.dimensions / 3), 1))
90 |
91 | lower_vp = template * 1800
92 | lower_vs = template * 600
93 | lower_rho = template * 1300
94 | lower_bounds = _numpy.vstack((lower_vp, lower_vs, lower_rho))
95 |
96 | upper_vp = template * 2200
97 | upper_vs = template * 1000
98 | upper_rho = template * 1700
99 | upper_bounds = _numpy.vstack((upper_vp, upper_vs, upper_rho))
100 |
101 | prior = _hmclab.Distributions.Uniform(lower_bounds, upper_bounds)
102 |
103 | posterior = _hmclab.Distributions.BayesRule([prior, likelihood])
104 |
105 | unique_name = _uuid.uuid4().hex.upper()
106 | filename = f"temporary_file_{unique_name}.h5"
107 |
108 | # Remove file before attempting to sample
109 | if _os.path.exists(filename):
110 | _os.remove(filename) # pragma: no cover
111 |
112 | _hmclab.Samplers.HMC().sample(
113 | filename,
114 | posterior,
115 | proposals=10,
116 | amount_of_steps=2,
117 | initial_model=(upper_bounds + lower_bounds) / 2.0,
118 | stepsize=0.03,
119 | )
120 |
121 | # Remove the file
122 | _os.remove(filename)
123 |
--------------------------------------------------------------------------------
/tests/test_failed_sampling.py:
--------------------------------------------------------------------------------
1 | """A collection of integrated tests.
2 | """
3 | import os as _os
4 |
5 | import numpy as _numpy
6 | import pytest as _pytest
7 | import uuid as _uuid
8 |
9 | import hmclab as _hmclab
10 | from hmclab.Helpers.CustomExceptions import InvalidCaseError as _InvalidCaseError
11 |
12 | _ad = _hmclab.Distributions._AbstractDistribution
13 | _as = _hmclab.Samplers._AbstractSampler
14 |
15 | dimensions = [1, 2, 10]
16 | distribution_classes = _ad.__subclasses__()
17 | sampler_classes = _as.__subclasses__()
18 | sampler_classes.remove(_hmclab.Samplers._AbstractVisualSampler)
19 | proposals = [10, 1000] # , 731, 1500]
20 |
21 |
22 | @_pytest.mark.parametrize("sampler_class", sampler_classes)
23 | @_pytest.mark.parametrize("distribution_class", distribution_classes)
24 | @_pytest.mark.parametrize("dimensions", dimensions)
25 | @_pytest.mark.parametrize("proposals", proposals)
26 | def test_basic_sampling(
27 | sampler_class: _as,
28 | distribution_class: _ad,
29 | dimensions: int,
30 | proposals: int,
31 | ):
32 | try:
33 | distribution: _ad = distribution_class.create_default(dimensions)
34 | except _InvalidCaseError:
35 | return _pytest.skip("Invalid case")
36 |
37 | sampler = sampler_class()
38 |
39 | assert isinstance(sampler, _as)
40 |
41 | unique_name = _uuid.uuid4().hex.upper()
42 | filename = f"temporary_file_{unique_name}.h5"
43 |
44 | # Remove file before attempting to sample
45 | if _os.path.exists(filename):
46 | _os.remove(filename) # pragma: no cover
47 |
48 | try:
49 | sampler.sample()
50 | except Exception as e:
51 | print(e)
52 |
53 | try:
54 | initial_model = distribution.generate()
55 | except:
56 | initial_model = _numpy.ones((distribution.dimensions, 1))
57 |
58 | try:
59 | sampler.sample(
60 | filename,
61 | distribution,
62 | proposals=proposals,
63 | initial_model=initial_model,
64 | max_time=0.1,
65 | mass_matrix=_hmclab.MassMatrices.Unit(434),
66 | disable_progressbar=True,
67 | )
68 | except Exception as e:
69 | print(e)
70 |
71 | sampler.sample(
72 | filename,
73 | distribution,
74 | proposals=proposals,
75 | initial_model=initial_model,
76 | online_thinning=10,
77 | max_time=0.1,
78 | overwrite_existing_file=True,
79 | disable_progressbar=True,
80 | )
81 |
82 | # Check if the file was created. If it wasn't, fail
83 | if not _os.path.exists(filename):
84 | _pytest.fail("Samples file wasn't created")
85 |
86 | # Remove the file
87 | _os.remove(filename)
88 |
--------------------------------------------------------------------------------
/tests/test_himmelblau.py:
--------------------------------------------------------------------------------
1 | import os as _os
2 |
3 | import numpy as _numpy
4 | import pytest as _pytest
5 | import uuid as _uuid
6 |
7 |
8 | import hmclab as _hmclab
9 |
10 | sampler_classes = _hmclab.Samplers._AbstractSampler.__subclasses__()
11 | sampler_classes.remove(_hmclab.Samplers._AbstractVisualSampler)
12 |
13 |
14 | @_pytest.mark.parametrize("sampler_class", sampler_classes)
15 | def test_sampling_interrupt_himmelblau(sampler_class):
16 | dist = _hmclab.Distributions.Himmelblau(temperature=100)
17 |
18 | unique_name = _uuid.uuid4().hex.upper()
19 | filename = f"temporary_file_{unique_name}.h5"
20 |
21 | # Remove file before attempting to sample
22 | if _os.path.exists(filename):
23 | _os.remove(filename) # pragma: no cover
24 |
25 | sampler = sampler_class()
26 |
27 | sampler.sample(
28 | filename,
29 | dist,
30 | proposals=1000000,
31 | online_thinning=100,
32 | max_time=0.1,
33 | disable_progressbar=True,
34 | )
35 |
36 | samples_written_expected = int(
37 | _numpy.floor(sampler.current_proposal / sampler.online_thinning) + 1
38 | )
39 |
40 | with _hmclab.Samples(filename) as samples:
41 | assert samples[:, :].shape == (3, samples_written_expected)
42 |
43 | _os.remove(filename)
44 |
--------------------------------------------------------------------------------
/tests/test_integrators.py:
--------------------------------------------------------------------------------
1 | import os as _os
2 |
3 | import numpy as _numpy
4 | import uuid as _uuid
5 |
6 |
7 | import hmclab as _hmclab
8 |
9 |
10 | def test_leapfrog():
11 | dist = _hmclab.Distributions.Himmelblau(temperature=100)
12 |
13 | unique_name = _uuid.uuid4().hex.upper()
14 | filename = f"temporary_file_{unique_name}.h5"
15 |
16 | # Remove file before attempting to sample
17 | if _os.path.exists(filename):
18 | _os.remove(filename) # pragma: no cover
19 |
20 | sampler = _hmclab.Samplers.HMC()
21 |
22 | sampler.sample(
23 | filename,
24 | dist,
25 | proposals=1000,
26 | stepsize=1.0,
27 | integrator="lf",
28 | max_time=0.1,
29 | disable_progressbar=True,
30 | )
31 |
32 | samples_written_expected = int(
33 | _numpy.floor(sampler.current_proposal / sampler.online_thinning) + 1
34 | )
35 |
36 | print(f"Samples written to disk: {samples_written_expected}")
37 |
38 | with _hmclab.Samples(filename) as samples:
39 | assert samples[:, :].shape == (3, samples_written_expected)
40 |
41 | _os.remove(filename)
42 |
43 |
44 | def test_four_stage():
45 | dist = _hmclab.Distributions.Himmelblau(temperature=100)
46 |
47 | unique_name = _uuid.uuid4().hex.upper()
48 | filename = f"temporary_file_{unique_name}.h5"
49 |
50 | # Remove file before attempting to sample
51 | if _os.path.exists(filename):
52 | _os.remove(filename) # pragma: no cover
53 |
54 | sampler = _hmclab.Samplers.HMC()
55 |
56 | sampler.sample(
57 | filename,
58 | dist,
59 | proposals=1000,
60 | stepsize=3.0,
61 | integrator="4s",
62 | max_time=0.1,
63 | disable_progressbar=True,
64 | )
65 |
66 | samples_written_expected = int(
67 | _numpy.floor(sampler.current_proposal / sampler.online_thinning) + 1
68 | )
69 |
70 | print(f"Samples written to disk: {samples_written_expected}")
71 |
72 | with _hmclab.Samples(filename) as samples:
73 | assert samples[:, :].shape == (3, samples_written_expected)
74 |
75 | _os.remove(filename)
76 |
77 |
78 | def test_three_stage():
79 | dist = _hmclab.Distributions.Himmelblau(temperature=100)
80 |
81 | unique_name = _uuid.uuid4().hex.upper()
82 | filename = f"temporary_file_{unique_name}.h5"
83 |
84 | # Remove file before attempting to sample
85 | if _os.path.exists(filename):
86 | _os.remove(filename) # pragma: no cover
87 |
88 | sampler = _hmclab.Samplers.HMC()
89 |
90 | sampler.sample(
91 | filename,
92 | dist,
93 | proposals=1000,
94 | stepsize=3.0,
95 | integrator="3s",
96 | max_time=0.1,
97 | disable_progressbar=True,
98 | )
99 |
100 | samples_written_expected = int(
101 | _numpy.floor(sampler.current_proposal / sampler.online_thinning) + 1
102 | )
103 |
104 | print(f"Samples written to disk: {samples_written_expected}")
105 |
106 | with _hmclab.Samples(filename) as samples:
107 | assert samples[:, :].shape == (3, samples_written_expected)
108 |
109 | _os.remove(filename)
110 |
--------------------------------------------------------------------------------
/tests/test_mass_matrices.py:
--------------------------------------------------------------------------------
1 | """A collection of tests for mass matrices.
2 | """
3 | import numpy as _numpy
4 | import pytest as _pytest
5 | import os as _os
6 | import uuid as _uuid
7 |
8 |
9 | from hmclab import MassMatrices as _MassMatrices
10 | from hmclab import Samplers as _Samplers
11 | from hmclab import Distributions as _Distributions
12 | from hmclab.Distributions import Normal as _Normal
13 |
14 |
15 | dimensions = [1, 10, 100]
16 | subclasses = _MassMatrices._AbstractMassMatrix.__subclasses__()
17 |
18 |
19 | @_pytest.mark.parametrize("mmclass", subclasses)
20 | @_pytest.mark.parametrize("dimensions", dimensions)
21 | def test_creation(mmclass: _MassMatrices._AbstractMassMatrix, dimensions: int):
22 | # Create the object
23 | mass_matrix: _MassMatrices._AbstractMassMatrix = mmclass.create_default(dimensions)
24 |
25 | # Check if a subtype of mass matrices
26 | assert issubclass(type(mass_matrix), _MassMatrices._AbstractMassMatrix)
27 |
28 | # Check if the right amount of dimensions
29 | assert mass_matrix.dimensions == dimensions
30 |
31 | assert mass_matrix.matrix.shape == (dimensions, dimensions)
32 |
33 | return
34 |
35 |
36 | @_pytest.mark.parametrize("mmclass", subclasses)
37 | @_pytest.mark.parametrize("dimensions", dimensions)
38 | @_pytest.mark.parametrize("rng", [None, _numpy.random.default_rng()])
39 | def test_generate(
40 | mmclass: _MassMatrices._AbstractMassMatrix,
41 | dimensions: int,
42 | rng: _numpy.random.Generator,
43 | ):
44 | # Create the object
45 | mass_matrix: _MassMatrices._AbstractMassMatrix = mmclass.create_default(
46 | dimensions, rng=rng
47 | )
48 |
49 | # Generate momentum
50 | momentum = mass_matrix.generate_momentum()
51 |
52 | # Assert column vector shape
53 | assert momentum.shape == (dimensions, 1)
54 |
55 | # Assert float type
56 | assert momentum.dtype is _numpy.dtype("float")
57 |
58 | return
59 |
60 |
61 | @_pytest.mark.parametrize("mmclass", subclasses)
62 | @_pytest.mark.parametrize("dimensions", dimensions)
63 | def test_kinetic_energy(mmclass: _MassMatrices._AbstractMassMatrix, dimensions: int):
64 | # Create the object
65 | mass_matrix: _MassMatrices._AbstractMassMatrix = mmclass.create_default(dimensions)
66 |
67 | # Generate momentum
68 | momentum = mass_matrix.generate_momentum()
69 | kinetic_energy = mass_matrix.kinetic_energy(momentum)
70 |
71 | # Assert float type
72 | assert type(kinetic_energy) == float or type(kinetic_energy) == _numpy.float64
73 |
74 | # Try to compute kinetic energy which SHOULD fail
75 | momentum = _numpy.vstack((momentum, _numpy.ones((1, 1))))
76 | with _pytest.raises(ValueError):
77 | kinetic_energy = mass_matrix.kinetic_energy(momentum)
78 |
79 | return
80 |
81 |
82 | @_pytest.mark.parametrize("mmclass", subclasses)
83 | @_pytest.mark.parametrize("dimensions", dimensions)
84 | @_pytest.mark.parametrize("stepsize_delta", [1e-10, 1e-5, 1e-2, -1e-10, -1e-5, -1e-2])
85 | def test_kinetic_energy_gradient(
86 | mmclass: _MassMatrices._AbstractMassMatrix, dimensions: int, stepsize_delta: float
87 | ):
88 | # Create the object
89 | mass_matrix: _MassMatrices._AbstractMassMatrix = mmclass.create_default(dimensions)
90 |
91 | # Generate momentum
92 | momentum = mass_matrix.generate_momentum()
93 | kinetic_energy_gradient = mass_matrix.kinetic_energy_gradient(momentum)
94 |
95 | # Assert column vector shape
96 | assert kinetic_energy_gradient.shape == (dimensions, 1)
97 |
98 | # Assert float type
99 | assert kinetic_energy_gradient.dtype is _numpy.dtype("float")
100 |
101 | # Gradient test
102 | dot_product = (kinetic_energy_gradient.T @ momentum).item(0)
103 |
104 | kinetic_1 = mass_matrix.kinetic_energy(momentum)
105 | kinetic_2 = mass_matrix.kinetic_energy(momentum + stepsize_delta * momentum)
106 | if (kinetic_2 - kinetic_1) != 0:
107 | relative_error = (kinetic_2 - kinetic_1 - dot_product * stepsize_delta) / (
108 | kinetic_2 - kinetic_1
109 | )
110 | assert relative_error < 1e-2
111 | else:
112 | assert _numpy.allclose(kinetic_energy_gradient, 0.0) # pragma: no cover
113 |
114 | # Try to compute kinetic energy gradient which SHOULD fail
115 | momentum = _numpy.vstack((momentum, _numpy.ones((1, 1))))
116 | with _pytest.raises(ValueError):
117 | kinetic_energy_gradient = mass_matrix.kinetic_energy_gradient(momentum)
118 |
119 | return
120 |
121 |
122 | @_pytest.mark.parametrize("dimensions", dimensions)
123 | def test_basic_sampling(
124 | dimensions: int,
125 | ):
126 | means = _numpy.zeros((dimensions, 1))
127 | covariance = _numpy.eye(dimensions)
128 | distribution = _Normal(means, covariance)
129 |
130 | sampler_instance = _Samplers.HMC()
131 |
132 | unique_name = _uuid.uuid4().hex.upper()
133 | filename = f"temporary_file_{unique_name}.h5"
134 |
135 | # Remove file before attempting to sample
136 | if _os.path.exists(filename):
137 | _os.remove(filename) # pragma: no cover # pragma: no cover
138 |
139 | proposals = 1000
140 |
141 | sampler_instance.sample(
142 | filename,
143 | distribution,
144 | proposals=proposals,
145 | online_thinning=10,
146 | max_time=0.1,
147 | autotuning=False,
148 | disable_progressbar=True,
149 | )
150 | if sampler_instance.amount_of_writes > 0:
151 | # 10 percent burn_in
152 | burn_in = int(0.1 * sampler_instance.amount_of_writes)
153 | sampler_instance.load_results(burn_in=burn_in)
154 |
155 | # Check if the file was created. If it wasn't, fail
156 | if not _os.path.exists(filename):
157 | _pytest.fail("Samples file wasn't created") # pragma: no cover
158 |
159 | # Remove the file
160 | _os.remove(filename)
161 |
162 |
163 | def test_full_massmatrix():
164 | """Test all parts of the full mass matrix that aren't hit yet."""
165 | non_symmetric_matrix = _numpy.tri(10)
166 |
167 | with _pytest.raises(AssertionError):
168 | _MassMatrices.Full(non_symmetric_matrix)
169 |
--------------------------------------------------------------------------------
/tests/test_mkl.py:
--------------------------------------------------------------------------------
1 | """A collection of tests for likelihood functions.
2 | """
3 | import matplotlib.pyplot as _plt
4 | import numpy as _numpy
5 | import pytest as _pytest
6 |
7 | from hmclab.Distributions.LinearMatrix import (
8 | _LinearMatrix_sparse_forward_simple_covariance,
9 | )
10 |
11 | dimensions = [100, 10000]
12 | deltas = [1e-10, 1e-4, 1e-2, -1e-10, -1e-4, -1e-2]
13 | dtype = [_numpy.dtype("float64"), _numpy.dtype("float32")]
14 |
15 | use_mkl = [True, False]
16 |
17 | try:
18 | from ctypes import cdll
19 |
20 | mkl = cdll.LoadLibrary("libmkl_rt.so")
21 | except OSError:
22 | skip = True
23 |
24 |
25 | @_pytest.mark.skipif(skip, reason="MKL not installed")
26 | @_pytest.mark.parametrize("dimensions", dimensions)
27 | @_pytest.mark.parametrize("use_mkl", use_mkl)
28 | @_pytest.mark.parametrize("dtype", dtype)
29 | def test_creation(dimensions: int, use_mkl: bool, dtype):
30 | # Create the object
31 | distribution = _LinearMatrix_sparse_forward_simple_covariance.create_default(
32 | dimensions, use_mkl=use_mkl, dtype=dtype
33 | )
34 |
35 | assert distribution.use_mkl == use_mkl
36 |
37 | # Check if the right amount of dimensions
38 | assert distribution.dimensions == dimensions
39 |
40 | return
41 |
42 |
43 | @_pytest.mark.skipif(skip, reason="MKL not installed")
44 | @_pytest.mark.parametrize("dimensions", dimensions)
45 | @_pytest.mark.parametrize("use_mkl", use_mkl)
46 | @_pytest.mark.parametrize("dtype", dtype)
47 | def test_misfit(dimensions: int, use_mkl: bool, dtype):
48 | distribution = _LinearMatrix_sparse_forward_simple_covariance.create_default(
49 | dimensions, use_mkl=use_mkl, dtype=dtype
50 | )
51 |
52 | assert distribution.use_mkl == use_mkl
53 |
54 | location = _numpy.ones((dimensions, 1)) + _numpy.random.rand(1)
55 |
56 | misfit = distribution.misfit(location)
57 |
58 | assert type(misfit) == dtype or type(misfit) == _numpy.dtype("float64")
59 |
60 | return
61 |
62 |
63 | @_pytest.mark.skipif(skip, reason="MKL not installed")
64 | @_pytest.mark.parametrize("dimensions", dimensions)
65 | @_pytest.mark.parametrize("use_mkl", use_mkl)
66 | @_pytest.mark.parametrize("dtype", dtype)
67 | def test_misfit_bounds(dimensions: int, use_mkl: bool, dtype):
68 | distribution = _LinearMatrix_sparse_forward_simple_covariance.create_default(
69 | dimensions, use_mkl=use_mkl, dtype=dtype
70 | )
71 |
72 | assert distribution.use_mkl == use_mkl
73 |
74 | lower_bounds = _numpy.ones((dimensions, 1))
75 | distribution.update_bounds(lower=lower_bounds)
76 |
77 | # Compute misfit above lower bounds
78 |
79 | location = _numpy.ones((dimensions, 1)) + _numpy.random.rand(1) + 0.1
80 | misfit = distribution.misfit(location)
81 |
82 | assert type(misfit) == dtype or type(misfit) == _numpy.dtype("float64")
83 |
84 | # Compute misfit below lower bounds
85 |
86 | location = _numpy.ones((dimensions, 1)) - _numpy.random.rand(1) - 0.1
87 | misfit = distribution.misfit(location)
88 |
89 | assert misfit == _numpy.inf
90 |
91 | # Create upper bounds
92 |
93 | upper_bounds = 3 * _numpy.ones((dimensions, 1))
94 | distribution.update_bounds(upper=upper_bounds)
95 |
96 | # Compute misfit between the two limits
97 |
98 | location = _numpy.ones((dimensions, 1)) + _numpy.random.rand(1) + 0.1
99 | misfit = distribution.misfit(location)
100 |
101 | assert type(misfit) == dtype or type(misfit) == _numpy.dtype("float64")
102 |
103 | # Compute misfit above the upper limit
104 |
105 | location = 3 * _numpy.ones((dimensions, 1)) + _numpy.random.rand(1) + 0.1
106 | misfit = distribution.misfit(location)
107 |
108 | assert misfit == _numpy.inf, " ds"
109 | return
110 |
111 |
112 | @_pytest.mark.skipif(skip, reason="MKL not installed")
113 | @_pytest.mark.parametrize("dimensions", dimensions)
114 | @_pytest.mark.parametrize("use_mkl", use_mkl)
115 | @_pytest.mark.parametrize("dtype", dtype)
116 | def test_misfit_bounds_impossible(dimensions: int, use_mkl: bool, dtype):
117 | distribution = _LinearMatrix_sparse_forward_simple_covariance.create_default(
118 | dimensions, use_mkl=use_mkl, dtype=dtype
119 | )
120 |
121 | assert distribution.use_mkl == use_mkl
122 |
123 | lower_bounds = _numpy.ones((dimensions, 1))
124 | upper_bounds = 3 * _numpy.ones((dimensions, 1))
125 |
126 | # Try to switch the bounds s.t. lower > upper
127 | try:
128 | distribution.update_bounds(lower=upper_bounds, upper=lower_bounds)
129 | except ValueError as e:
130 | # Assert that the exception is raised by the bounds
131 | assert e.args[0] == "Bounds vectors are incompatible."
132 |
133 | # Expected fail
134 | _pytest.xfail("Impossible test case, failure is required")
135 |
136 |
137 | @_pytest.mark.skipif(skip, reason="MKL not installed")
138 | @_pytest.mark.parametrize("dimensions", dimensions)
139 | @_pytest.mark.parametrize("delta", deltas)
140 | @_pytest.mark.parametrize("use_mkl", use_mkl)
141 | @_pytest.mark.parametrize("dtype", dtype)
142 | def test_gradient(dimensions: int, delta: float, results_bag, use_mkl: bool, dtype):
143 | results_bag.test_type = "gradient"
144 | results_bag.class_name = "Using mkl" if use_mkl else "Not using mkl"
145 |
146 | distribution = _LinearMatrix_sparse_forward_simple_covariance.create_default(
147 | dimensions, use_mkl=use_mkl, dtype=dtype
148 | )
149 |
150 | assert distribution.use_mkl == use_mkl
151 |
152 | location = (_numpy.ones((dimensions, 1)) + _numpy.random.rand(1)).astype(dtype)
153 | gradient = distribution.gradient(location)
154 |
155 | assert gradient.dtype == _numpy.dtype("float32") or gradient.dtype == _numpy.dtype(
156 | "float64"
157 | )
158 | assert gradient.shape == location.shape
159 |
160 | # Gradient test
161 | dot_product = (gradient.T @ location).item(0)
162 | misfit_1 = distribution.misfit(location)
163 | misfit_2 = distribution.misfit(location + delta * location)
164 | if (misfit_2 - misfit_1) != 0:
165 | relative_error = (misfit_2 - misfit_1 - dot_product * delta) / (
166 | misfit_2 - misfit_1
167 | )
168 | try:
169 | assert abs(relative_error) < 1e-2
170 | except AssertionError:
171 | _pytest.xfail("Error bigger than 10% in gradient test, not failing pytest.")
172 |
173 | results_bag.relative_error = relative_error
174 | elif _numpy.allclose(location + delta * location, location):
175 | # This means that the delta in location is so small that we are up to machine
176 | # precision in the same point
177 | results_bag.relative_error = 0
178 | else:
179 | assert _numpy.allclose(gradient, 0.0)
180 | results_bag.relative_error = 0
181 |
182 | return
183 |
184 |
185 | @_pytest.mark.skipif(skip, reason="MKL not installed")
186 | @_pytest.mark.plot
187 | def test_gradient_plots(module_results_df):
188 | """
189 | Shows that the `module_results_df` fixture already contains what you need
190 | """
191 | # drop the 'pytest_obj' column
192 | module_results_df.drop("pytest_obj", axis=1, inplace=True)
193 |
194 | for name, df in module_results_df[
195 | module_results_df.test_type == "gradient"
196 | ].groupby("class_name"):
197 | for dimensions, df_dim in df.groupby("dimensions"):
198 | if not _numpy.all(_numpy.isnan(df_dim.relative_error)):
199 | _plt.scatter(
200 | df_dim.delta,
201 | _numpy.abs(df_dim.relative_error),
202 | alpha=0.5,
203 | label=dimensions,
204 | )
205 | ax = _plt.gca()
206 | _plt.grid(True)
207 | _plt.xlim([-2e-2, 2e-2])
208 | _plt.ylim([-1e-7, 1e0])
209 | ax.set_xscale("symlog", linthreshx=1e-11)
210 | ax.set_yscale("symlog", linthreshy=1e-8)
211 | _plt.legend()
212 | _plt.title(name)
213 | ax.set_xticks([-1e-3, -1e-6, -1e-9, 0, 1e-9, 1e-6, 1e-3])
214 |
215 | _plt.show()
216 |
--------------------------------------------------------------------------------
/tests/test_notebooks.py:
--------------------------------------------------------------------------------
1 | import glob
2 | import nbformat
3 | from black import format_str, FileMode
4 | import pytest
5 | from pytest_notebook.nb_regression import NBRegressionFixture
6 | import warnings
7 |
8 | # Setup the fixture for testing notebooks using pytest_notebook
9 | fixture = NBRegressionFixture(
10 | exec_timeout=600,
11 | diff_ignore=("/cells/*/outputs/", "/cells/*/execution_count", "/metadata"),
12 | )
13 | fixture.diff_color_words = False
14 |
15 | # Find all notebook files
16 | notebooks = glob.glob("notebooks/tutorials/*.ipynb")
17 |
18 | @pytest.mark.parametrize("notebook_fh", notebooks)
19 | def test_notebook(notebook_fh):
20 |
21 | # Clean notebook
22 | notebook = nbformat.read(notebook_fh, as_version=nbformat.NO_CONVERT)
23 |
24 | notebook.metadata = {}
25 | notebook.cells = [cell for cell in notebook.cells if len(cell["source"]) > 0]
26 |
27 | # Clear cells
28 | for cell in notebook.cells:
29 | cell["metadata"] = {}
30 | if cell["cell_type"] == "code":
31 | cell["execution_count"] = None
32 | cell["outputs"] = []
33 |
34 | # Format the cell using black, removing trailing newlines
35 | cell.source = format_str(
36 | cell.source.replace("%matplotlib notebook", ""), mode=FileMode()
37 | ).rstrip()
38 |
39 | # Write to file
40 | nbformat.write(notebook, notebook_fh)
41 |
42 | # Test notebooks
43 | with warnings.catch_warnings():
44 | warnings.filterwarnings("ignore", category=DeprecationWarning)
45 | result = fixture.check(notebook_fh)
46 |
47 | if not(notebook_fh == "notebooks/tutorials/1 - Tuning Hamiltonian Monte Carlo.ipynb"):
48 | # Write out final version to original file if all tests succeeded
49 | nbformat.write(nb=result.nb_final, fp=notebook_fh)
50 |
--------------------------------------------------------------------------------
/tests/test_optimization.py:
--------------------------------------------------------------------------------
1 | """A collection of optimization tests.
2 | """
3 | from re import A
4 | import pytest as _pytest, numpy as _numpy
5 |
6 | import hmclab as _hmclab
7 | from hmclab.Helpers.CustomExceptions import InvalidCaseError as _InvalidCaseError
8 | from hmclab.Optimizers import gradient_descent as _gradient_descent
9 |
10 | _ad = _hmclab.Distributions._AbstractDistribution
11 |
12 | distribution_classes = _ad.__subclasses__()
13 | optimizer_methods = [_gradient_descent]
14 |
15 |
16 | @_pytest.mark.parametrize("optimizer_method", optimizer_methods)
17 | @_pytest.mark.parametrize("distribution_class", distribution_classes)
18 | @_pytest.mark.parametrize("dimensions", [1, 2, 5, 100])
19 | @_pytest.mark.parametrize("iterations", [1, 100])
20 | @_pytest.mark.parametrize("epsilon", [0.1])
21 | @_pytest.mark.parametrize("strictly_monotonic", [True, False])
22 | def test_basic_optimization(
23 | optimizer_method,
24 | distribution_class: _ad,
25 | dimensions: int,
26 | iterations: int,
27 | epsilon: float,
28 | strictly_monotonic: bool,
29 | ):
30 | """Test optimization algorithms in general"""
31 |
32 | try:
33 | distribution: _ad = distribution_class.create_default(dimensions)
34 | except _InvalidCaseError:
35 | return _pytest.skip("Invalid case")
36 |
37 | try:
38 | initial_model = distribution.generate()
39 | except:
40 | initial_model = _numpy.ones((distribution.dimensions, 1))
41 |
42 | m, x, ms, xs = optimizer_method(
43 | target=distribution,
44 | initial_model=initial_model,
45 | iterations=iterations,
46 | epsilon=epsilon,
47 | strictly_monotonic=strictly_monotonic,
48 | disable_progressbar=True,
49 | )
50 |
51 | assert len(ms) == len(xs)
52 |
53 | if strictly_monotonic:
54 | assert xs[0] >= xs[-1]
55 |
56 |
57 | @_pytest.mark.parametrize("distribution_class", distribution_classes)
58 | @_pytest.mark.parametrize("dimensions", [1, 2, 5, 100])
59 | @_pytest.mark.parametrize("iterations", [1, 100])
60 | @_pytest.mark.parametrize("epsilon", [0.1])
61 | @_pytest.mark.parametrize("regularization", [1e-3, 1e0, 1e3])
62 | @_pytest.mark.parametrize("strictly_monotonic", [True, False])
63 | def test_gradient_descent(
64 | distribution_class: _ad,
65 | dimensions: int,
66 | iterations: int,
67 | regularization: float,
68 | epsilon: float,
69 | strictly_monotonic: bool,
70 | ):
71 | """Test all settings of the gradient descent algorithm"""
72 |
73 | try:
74 | distribution: _ad = distribution_class.create_default(dimensions)
75 | except _InvalidCaseError:
76 | return _pytest.skip("Invalid case")
77 |
78 | try:
79 | initial_model = distribution.generate()
80 | except:
81 | initial_model = _numpy.ones((distribution.dimensions, 1))
82 |
83 | m, x, ms, xs = _gradient_descent(
84 | target=distribution,
85 | initial_model=initial_model,
86 | iterations=iterations,
87 | epsilon=epsilon,
88 | regularization=regularization,
89 | strictly_monotonic=strictly_monotonic,
90 | disable_progressbar=True,
91 | )
92 |
93 | assert m.shape == (distribution.dimensions, 1)
94 | for i_m in ms:
95 | assert m.shape == i_m.shape
96 | assert isinstance(x, (float, _numpy.floating))
97 | assert len(ms) == len(xs)
98 | if strictly_monotonic:
99 | assert xs[0] >= xs[-1]
100 |
--------------------------------------------------------------------------------
/tests/test_pickling.py:
--------------------------------------------------------------------------------
1 | import pytest as _pytest
2 | import dill as _dill
3 | from hmclab import Distributions as _Distributions
4 | from hmclab.Helpers.CustomExceptions import InvalidCaseError as _InvalidCaseError
5 | import numpy as _numpy
6 | import pytest as _pytest
7 | import hmclab as _hmclab
8 |
9 | _ad = _hmclab.Distributions._AbstractDistribution
10 | _as = _hmclab.Samplers._AbstractSampler
11 |
12 |
13 | dimensions = [1, 2, 5, 50]
14 | distribution_classes = _ad.__subclasses__()
15 | sampler_classes = _as.__subclasses__()
16 | sampler_classes.remove(_hmclab.Samplers._AbstractVisualSampler)
17 |
18 |
19 | @_pytest.mark.parametrize("pclass", distribution_classes)
20 | @_pytest.mark.parametrize("dimensions", dimensions)
21 | def test_creation(pclass: _Distributions._AbstractDistribution, dimensions: int):
22 | # Create the object
23 | try:
24 | distribution: _Distributions._AbstractDistribution = pclass.create_default(
25 | dimensions
26 | )
27 | except _InvalidCaseError:
28 | return _pytest.skip("Invalid case")
29 |
30 | # Check if a subtype of mass matrices
31 | assert issubclass(type(distribution), _Distributions._AbstractDistribution)
32 |
33 | # Check if the right amount of dimensions
34 | assert distribution.dimensions == dimensions
35 |
36 | try:
37 | assert _dill.pickles(distribution)
38 | except AssertionError as e:
39 | print(_dill.detect.badtypes(distribution, depth=1))
40 | raise e
41 |
42 |
43 | @_pytest.mark.parametrize("sampler_class", sampler_classes)
44 | def test_basic_sampling(
45 | sampler_class: _as,
46 | ):
47 | sampler_instance = sampler_class()
48 |
49 | assert isinstance(sampler_instance, _as)
50 |
51 | try:
52 | assert _dill.pickles(sampler_instance)
53 | except AssertionError as e:
54 | print(_dill.detect.badtypes(sampler_instance, depth=1))
55 | raise e
56 |
--------------------------------------------------------------------------------
/tests/test_samples.py:
--------------------------------------------------------------------------------
1 | """A collection of integrated tests.
2 | """
3 | from hmclab.Samples import combine_samples as _combine_samples
4 | import os as _os
5 |
6 | import numpy as _numpy
7 | import pytest as _pytest
8 | import uuid as _uuid
9 |
10 |
11 | import hmclab as _hmclab
12 | from hmclab.Helpers.CustomExceptions import InvalidCaseError as _InvalidCaseError
13 |
14 | _ad = _hmclab.Distributions._AbstractDistribution
15 | _as = _hmclab.Samplers._AbstractSampler
16 |
17 | dimensions = [1, 2, 50]
18 | distribution_classes = _ad.__subclasses__()
19 | sampler_classes = [_hmclab.Samplers.RWMH] # Doesn't impact the test
20 | proposals = [5, 100]
21 | extensions = ["h5", "npy"]
22 |
23 |
24 | @_pytest.mark.parametrize("sampler_class", sampler_classes)
25 | @_pytest.mark.parametrize("distribution_class", distribution_classes)
26 | @_pytest.mark.parametrize("dimensions", dimensions)
27 | @_pytest.mark.parametrize("proposals", proposals)
28 | @_pytest.mark.parametrize("extension", extensions)
29 | def test_samples_detail(
30 | sampler_class: _as,
31 | distribution_class: _ad,
32 | dimensions: int,
33 | proposals: int,
34 | extension: str,
35 | ):
36 | try:
37 | distribution: _ad = distribution_class.create_default(dimensions)
38 | except _InvalidCaseError:
39 | return _pytest.skip("Invalid case")
40 |
41 | sampler_instance = sampler_class()
42 |
43 | assert isinstance(sampler_instance, _as)
44 |
45 | unique_name = _uuid.uuid4().hex.upper()
46 | filename = f"temporary_file_{unique_name}.{extension}"
47 |
48 | # Remove file before attempting to sample
49 | if _os.path.exists(filename):
50 | _os.remove(filename) # pragma: no cover
51 |
52 | try:
53 | initial_model = distribution.generate()
54 | except:
55 | initial_model = _numpy.ones((distribution.dimensions, 1))
56 |
57 | sampler_instance.sample(
58 | filename,
59 | distribution,
60 | proposals=proposals,
61 | initial_model=initial_model,
62 | max_time=0.1,
63 | disable_progressbar=True,
64 | )
65 |
66 | # Check if the file was created. If it wasn't, fail
67 | if not _os.path.exists(filename):
68 | _pytest.fail("Samples file wasn't created") # pragma: no cover
69 |
70 | with _hmclab.Samples(filename) as samples:
71 | samples.print_details()
72 |
73 | # Remove the file
74 | _os.remove(filename)
75 | if extension == "npy":
76 | _os.remove(f"{filename}.pkl")
77 |
78 |
79 | @_pytest.mark.parametrize("sampler_class", sampler_classes)
80 | @_pytest.mark.parametrize("distribution_class", distribution_classes)
81 | @_pytest.mark.parametrize("dimensions", dimensions)
82 | @_pytest.mark.parametrize("proposals", proposals)
83 | @_pytest.mark.parametrize("extension", extensions)
84 | def test_samples_concat(
85 | sampler_class: _as,
86 | distribution_class: _ad,
87 | dimensions: int,
88 | proposals: int,
89 | extension: str,
90 | ):
91 | try:
92 | distribution: _ad = distribution_class.create_default(dimensions)
93 | except _InvalidCaseError:
94 | return _pytest.skip("Invalid case")
95 |
96 | sampler_instance = sampler_class()
97 |
98 | assert isinstance(sampler_instance, _as)
99 |
100 | filename_1 = f"temporary_file_1.{extension}"
101 | filename_2 = f"temporary_file_2.{extension}"
102 | filenames = [filename_1, filename_2]
103 |
104 | # Remove file before attempting to sample
105 | for filename in filenames:
106 | if _os.path.exists(filename):
107 | _os.remove(filename) # pragma: no cover
108 |
109 | try:
110 | initial_model = distribution.generate()
111 | except:
112 | initial_model = _numpy.ones((distribution.dimensions, 1))
113 |
114 | sampler_instance.sample(
115 | filename,
116 | distribution,
117 | initial_model=initial_model,
118 | proposals=proposals,
119 | max_time=0.1,
120 | disable_progressbar=True,
121 | )
122 |
123 | # Check if the file was created. If it wasn't, fail
124 | if not _os.path.exists(filename):
125 | _pytest.fail("Samples file wasn't created") # pragma: no cover
126 |
127 | combined_samples = _combine_samples(filenames)
128 |
129 | # The sample files also contain the misfit, so + 1
130 | assert combined_samples.shape[0] == distribution.dimensions + 1
131 |
132 | for filename in filenames:
133 | # Remove the file
134 | _os.remove(filename)
135 | if extension == "npy":
136 | _os.remove(f"{filename}.pkl")
137 |
138 |
139 | def test_samples_exception_cases():
140 | filename = "non_existent_file.h5"
141 |
142 | try:
143 | with _hmclab.Samples(filename) as _:
144 | pass # pragma: no cover
145 | except FileNotFoundError:
146 | pass
147 |
--------------------------------------------------------------------------------
/tests/test_sampling_reproducibility.py:
--------------------------------------------------------------------------------
1 | """A collection of integrated tests.
2 | """
3 | from hmclab import Distributions as _Distributions
4 | import os as _os
5 |
6 | import numpy as _numpy
7 | import pytest as _pytest
8 |
9 | import hmclab as _hmclab
10 | from hmclab.Helpers.CustomExceptions import InvalidCaseError as _InvalidCaseError
11 |
12 | _ad = _hmclab.Distributions._AbstractDistribution
13 | _as = _hmclab.Samplers._AbstractSampler
14 |
15 | dimensions = [1, 2, 3]
16 | distribution_classes = _Distributions._AbstractDistribution.__subclasses__()
17 | sampler_classes = _as.__subclasses__()
18 | sampler_classes.remove(_hmclab.Samplers._AbstractVisualSampler)
19 |
20 | proposals = [3, 10]
21 | autotuning = [True, False]
22 |
23 |
24 | @_pytest.mark.parametrize("sampler_class", sampler_classes)
25 | @_pytest.mark.parametrize("distribution_class", distribution_classes)
26 | @_pytest.mark.parametrize("dimensions", dimensions)
27 | @_pytest.mark.parametrize("proposals", proposals)
28 | @_pytest.mark.parametrize("autotuning", autotuning)
29 | def test_samples_file(
30 | sampler_class: _as,
31 | distribution_class: _ad,
32 | dimensions: int,
33 | proposals: int,
34 | autotuning: bool,
35 | ):
36 | try:
37 | distribution: _ad = distribution_class.create_default(dimensions)
38 | except _InvalidCaseError:
39 | return _pytest.skip("Invalid case")
40 |
41 | sampler_instance_1 = sampler_class(seed=1)
42 |
43 | filename_1 = "temporary_file_1.h5"
44 |
45 | sampler_instance_2 = sampler_class(seed=1)
46 |
47 | filename_2 = "temporary_file_2.h5"
48 |
49 | # Remove file before attempting to sample
50 | if _os.path.exists(filename_1):
51 | _os.remove(filename_1)
52 | if _os.path.exists(filename_2):
53 | _os.remove(filename_2)
54 |
55 | try:
56 | initial_model = distribution.generate()
57 | except:
58 | initial_model = _numpy.ones((distribution.dimensions, 1))
59 |
60 | sampler_instance_1.sample(
61 | filename_1,
62 | distribution,
63 | proposals=proposals,
64 | initial_model=initial_model,
65 | max_time=0.1,
66 | autotuning=autotuning,
67 | disable_progressbar=True,
68 | )
69 | sampler_instance_2.sample(
70 | filename_2,
71 | distribution,
72 | proposals=proposals,
73 | initial_model=initial_model,
74 | max_time=0.1,
75 | autotuning=autotuning,
76 | disable_progressbar=True,
77 | )
78 |
79 | # Check if the file was created. If it wasn't, fail
80 | if not _os.path.exists(filename_1) or not _os.path.exists(filename_2):
81 | _pytest.fail("Samples file wasn't created")
82 |
83 | samples_written_expected_1 = int(
84 | _numpy.floor(
85 | sampler_instance_1.current_proposal / sampler_instance_1.online_thinning
86 | )
87 | + 1
88 | )
89 |
90 | samples_written_expected_2 = int(
91 | _numpy.floor(
92 | sampler_instance_2.current_proposal / sampler_instance_2.online_thinning
93 | )
94 | + 1
95 | )
96 |
97 | with _hmclab.Samples(filename_1) as samples_1, _hmclab.Samples(
98 | filename_2
99 | ) as samples_2:
100 | # Assert that the HDF array has the right dimensions
101 | assert samples_1.numpy.shape == (
102 | distribution.dimensions + 1,
103 | samples_written_expected_1,
104 | )
105 | assert samples_2.numpy.shape == (
106 | distribution.dimensions + 1,
107 | samples_written_expected_2,
108 | )
109 |
110 | min_written_samples = min(
111 | samples_written_expected_1, samples_written_expected_2
112 | )
113 |
114 | var_a = samples_1[:, :min_written_samples]
115 | var_b = samples_2[:, :min_written_samples]
116 |
117 | assert _numpy.all(
118 | samples_1[:, :min_written_samples] == samples_2[:, :min_written_samples]
119 | )
120 |
121 | # Remove the file
122 | _os.remove(filename_1)
123 | _os.remove(filename_2)
124 |
--------------------------------------------------------------------------------
/tests/test_version.py:
--------------------------------------------------------------------------------
1 | import hmclab as _hmclab
2 |
3 |
4 | def test_version():
5 | """Check if the Versioneer code works."""
6 |
7 | versions = _hmclab._version.get_versions()
8 |
9 | for k, i in versions.items():
10 | print(k, i)
11 |
12 | print(_hmclab.__version__)
13 |
--------------------------------------------------------------------------------
/tests/test_visual_samplers.py:
--------------------------------------------------------------------------------
1 | """A collection of integrated tests.
2 | """
3 | from typing import List as _List
4 | import os as _os
5 | import uuid as _uuid
6 |
7 |
8 | import numpy as _numpy
9 | import matplotlib.pyplot as _plt
10 | import pytest as _pytest
11 | import uuid as _uuid
12 |
13 |
14 | import hmclab as _hmclab
15 |
16 | _ad = _hmclab.Distributions._AbstractDistribution
17 | _as = _hmclab.Samplers._AbstractVisualSampler
18 |
19 | dimensions = [2, 4]
20 | sampler_classes = _as.__subclasses__()
21 | proposals = [100]
22 | autotuning = [True]
23 | plot_update_interval = [1, 7]
24 | dims_to_plot = [[0, 1], [4, 9]]
25 | animate_proposals = [True, False]
26 | animation_domain = [None, [-1, 1, -1, 1], [1, 0, -1, -1]]
27 |
28 |
29 | @_pytest.fixture(autouse=True)
30 | def run_before_and_after_tests():
31 | """Fixture to execute asserts before and after a test is run"""
32 | # Setup: fill with any logic you want
33 |
34 | yield # this is where the testing happens
35 |
36 | _plt.close()
37 |
38 |
39 | @_pytest.mark.parametrize("sampler_class", sampler_classes)
40 | @_pytest.mark.parametrize("dimensions", dimensions)
41 | @_pytest.mark.parametrize("proposals", proposals)
42 | @_pytest.mark.parametrize("autotuning", autotuning)
43 | @_pytest.mark.parametrize("plot_update_interval", plot_update_interval)
44 | @_pytest.mark.parametrize("dims_to_plot", dims_to_plot)
45 | @_pytest.mark.parametrize("animate_proposals", animate_proposals)
46 | @_pytest.mark.parametrize("animation_domain", animation_domain)
47 | def test_basic_sampling(
48 | sampler_class: _as,
49 | dimensions: int,
50 | proposals: int,
51 | autotuning: bool,
52 | plot_update_interval: int,
53 | dims_to_plot: _List,
54 | animate_proposals: bool,
55 | animation_domain: _numpy.array,
56 | ):
57 | distribution = _hmclab.Distributions.Normal.create_default(dimensions)
58 |
59 | sampler_instance = sampler_class(
60 | plot_update_interval=plot_update_interval,
61 | dims_to_plot=dims_to_plot,
62 | animate_proposals=animate_proposals,
63 | animation_domain=animation_domain,
64 | )
65 |
66 | assert isinstance(sampler_instance, _as)
67 |
68 | unique_name = _uuid.uuid4().hex.upper()
69 | filename = f"temporary_file_{unique_name}.h5"
70 |
71 | # Remove file before attempting to sample
72 | if _os.path.exists(filename):
73 | _os.remove(filename) # pragma: no cover
74 |
75 | try:
76 | sampler_instance.sample(
77 | filename,
78 | distribution,
79 | proposals=proposals,
80 | online_thinning=10,
81 | max_time=0.1,
82 | autotuning=autotuning,
83 | disable_progressbar=True,
84 | )
85 | if sampler_instance.amount_of_writes > 0:
86 | # 10 percent burn_in
87 | burn_in = int(0.1 * sampler_instance.amount_of_writes)
88 | sampler_instance.load_results(burn_in=burn_in)
89 | except AssertionError:
90 | pass
91 |
92 | # Check if the file was created. If it wasn't, fail
93 | if _os.path.exists(filename):
94 | _os.remove(filename) # pragma: no cover
95 |
--------------------------------------------------------------------------------
/tests/test_visualizations.py:
--------------------------------------------------------------------------------
1 | """A collection of integrated tests.
2 | """
3 | from hmclab.Distributions import Normal
4 | import os as _os
5 | import pytest as _pytest
6 | import hmclab as _hmclab
7 | import matplotlib.pyplot as _plt
8 | import uuid as _uuid
9 |
10 |
11 | _ad = _hmclab.Distributions._AbstractDistribution
12 | _as = _hmclab.Samplers._AbstractSampler
13 |
14 | dimensions = [1, 2, 10]
15 | proposals = [1000]
16 |
17 |
18 | @_pytest.mark.parametrize("dimensions", dimensions)
19 | @_pytest.mark.parametrize("proposals", proposals)
20 | def test_basic_sampling(
21 | dimensions: int,
22 | proposals: int,
23 | ):
24 |
25 | distribution = Normal.create_default(dimensions=dimensions)
26 |
27 | sampler_instance = _hmclab.Samplers.HMC()
28 |
29 | unique_name = _uuid.uuid4().hex.upper()
30 | filename = f"temporary_file_{unique_name}.h5"
31 |
32 | # Remove file before attempting to sample
33 | if _os.path.exists(filename):
34 | _os.remove(filename) # pragma: no cover
35 |
36 | sampler_instance.sample(
37 | filename,
38 | distribution,
39 | proposals=proposals,
40 | online_thinning=1,
41 | max_time=0.1,
42 | autotuning=True,
43 | disable_progressbar=True,
44 | )
45 |
46 | # Check if the file was created. If it wasn't, fail
47 | if not _os.path.exists(filename):
48 | _pytest.fail("Samples file wasn't created")
49 |
50 | with _hmclab.Samples(filename) as samples:
51 | _hmclab.Visualization.marginal(samples, 0, 10, False, "r")
52 | _plt.close()
53 |
54 | try:
55 | _hmclab.Visualization.marginal_grid(
56 | samples,
57 | [0, 1],
58 | 25,
59 | False,
60 | _plt.get_cmap("seismic"),
61 | )
62 | _plt.close()
63 |
64 | # The previous is only allowed to succeed for dimensions > 1, so otherwise
65 | # fail.
66 | if dimensions == 1:
67 | _pytest.fail(
68 | "This test should've failed. Was able to create a 2d plot with 1d "
69 | "data."
70 | )
71 | except AssertionError:
72 | if dimensions != 1:
73 | _pytest.fail(
74 | "This test should not have raise an AssertionError failed. Was not "
75 | "able to create a 2d plot with at least 2d data."
76 | )
77 |
78 | _hmclab.Visualization.visualize_2_dimensions(samples, 0, 1, 25, False)
79 | _plt.close()
80 |
81 | # Remove the file
82 | _os.remove(filename)
83 |
--------------------------------------------------------------------------------