├── .deepsource.toml ├── .gitattributes ├── CITATION.cff ├── LICENSE ├── MANIFEST.in ├── README.md ├── docs ├── _static │ └── custom.css ├── environment.yml └── source │ ├── api │ ├── cdiutils.io.rst │ ├── cdiutils.multibcdi.rst │ ├── cdiutils.pipeline.rst │ ├── cdiutils.plot.rst │ ├── cdiutils.process.rst │ ├── cdiutils.rst │ ├── generated │ │ ├── cdiutils.converter.rst │ │ ├── cdiutils.geometry.rst │ │ ├── cdiutils.io.cristal.rst │ │ ├── cdiutils.io.cxi.rst │ │ ├── cdiutils.io.id01.rst │ │ ├── cdiutils.io.id27.rst │ │ ├── cdiutils.io.load_data.rst │ │ ├── cdiutils.io.loader.rst │ │ ├── cdiutils.io.nanomax.rst │ │ ├── cdiutils.io.p10.rst │ │ ├── cdiutils.io.rst │ │ ├── cdiutils.io.sixs.rst │ │ ├── cdiutils.io.vtk.rst │ │ ├── cdiutils.pipeline.base.rst │ │ ├── cdiutils.pipeline.bcdi.rst │ │ ├── cdiutils.pipeline.parameters.rst │ │ ├── cdiutils.pipeline.pipeline_plotter.rst │ │ ├── cdiutils.pipeline.rst │ │ ├── cdiutils.process.facet_analysis.rst │ │ ├── cdiutils.process.phaser.rst │ │ ├── cdiutils.process.postprocessor.rst │ │ ├── cdiutils.process.rst │ │ ├── cdiutils.process.support_processor.rst │ │ └── cdiutils.utils.rst │ ├── index.rst │ └── modules.rst │ ├── conf.py │ └── index.rst ├── examples ├── bcdi_reconstruction_analysis.ipynb ├── explore_cxi_file.ipynb └── pole_figure.ipynb ├── images ├── XTOP_24_cdiutils_poster_200_dpi.png ├── XU_and_CXI.png ├── arrows.png ├── cdiutils_S311_amplitude_distribution_plot.png ├── cdiutils_S311_different_strain_methods.png ├── cdiutils_S311_summary_slice_plot.png ├── contour.png ├── cross_section_quiver.png ├── electron_density.png ├── multi_cross_sections.png ├── multi_slice_plots_phase.png ├── phase.png ├── reciprocal_space_q_lab.png ├── strain.png └── strain_histograms.png ├── pyproject.toml ├── readthedocs.yml ├── requirements.txt └── src └── cdiutils ├── __init__.py ├── analysis ├── __init__.py ├── stats.py └── stereography.py ├── converter.py ├── facetanalysis ├── facet_correlation.py ├── facet_utils.py └── get_facet_data.py ├── geometry.py ├── io ├── __init__.py ├── cristal.py ├── cxi.py ├── cxi_explorer.py ├── id01.py ├── id27.py ├── loader.py ├── nanomax.py ├── p10.py ├── sixs.py └── vtk.py ├── multibcdi ├── 3D_phase.py ├── __init__.py └── preprocess.py ├── pipeline ├── __init__.py ├── base.py ├── bcdi.py ├── parameters.py ├── pipeline_plotter.py └── pynx-id01-cdi_template.slurm ├── plot ├── __init__.py ├── colormap.py ├── formatting.py ├── interactive.py ├── quiver.py ├── slice.py └── volume.py ├── process ├── __init__.py ├── facet_analysis.py ├── phaser.py ├── postprocessor.py └── support_processor.py ├── scripts ├── __init__.py ├── prepare_bcdi_notebooks.py └── prepare_detector_calibration.py ├── templates ├── bcdi_pipeline.ipynb ├── detector_calibration.ipynb └── step_by_step_bcdi_analysis.ipynb └── utils.py /.deepsource.toml: -------------------------------------------------------------------------------- 1 | version = 1 2 | 3 | [[analyzers]] 4 | name = "shell" 5 | enabled = true 6 | 7 | [[analyzers]] 8 | name = "python" 9 | enabled = true 10 | 11 | [analyzers.meta] 12 | runtime_version = "3.x.x" -------------------------------------------------------------------------------- /.gitattributes: -------------------------------------------------------------------------------- 1 | *.py linguist-language=python 2 | *.ipynb linguist-documentation -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | cff-version: 1.2.0 2 | message: "If you use this package for scientific work, please consider including a citation as below." 3 | type: software 4 | authors: 5 | - family-names: Atlan 6 | given-names: Clement 7 | orcid: https://orcid.org/0000-0001-8531-7864 8 | 9 | doi: 10.5281/zenodo.7656853 10 | version: 0.2.0 11 | date-released: 2025-02 12 | title: "Cdiutils: A python package for Bragg Coherent Diffraction Imaging processing, analysis and visualisation workflows" 13 | 14 | abstract: "A python package to help Bragg Coherent Diffraction Imaging (BCDI) practitioners in their analysis and visualisation workflows. 15 | 16 | The package is designed to handle the three primary stages of a BCDI data processing workflow: 17 | 18 | * **Proprocessing** (data centering and cropping) 19 | * **Phase retrieval**: utilises PyNX for phasing (refer to [PyNX documentation](http://ftp.esrf.fr/pub/scisoft/PyNX/doc/)). 20 | * **Post processing** (orthogonalisation, phase manipulation, strain computation etc.) 21 | 22 | It is assumed that the phase retrieval is conducted using the PyNX package. The `BcdiPipeline` class runs all three stages and can manage connections to different machines, especially for GPU-based phase retrieval." 23 | repository-code: https://github.com/clatlan/cdiutils 24 | url: https://github.com/clatlan/cdiutils 25 | license: MIT -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2024 Clément Atlan 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include examples/*.ipynb -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # cdiutils 2 | 3 | [![DOI](https://zenodo.org/badge/360442527.svg)](https://zenodo.org/badge/latestdoi/360442527) 4 | 5 | [![License](https://img.shields.io/github/license/clatlan/cdiutils)](https://github.com/clatlan/cdiutils/blob/main/LICENSE) 6 | 7 | My python package to help X-ray Bragg Coherent Diffraction Imaging (BCDI) practitioners in their analysis and visualisation workflows. I developed the package during my PhD. 8 | 9 | The package is designed to handle the three primary stages of a BCDI data processing workflow: 10 | 11 | * **Pre-processing** (data centering and cropping) 12 | * **Phase retrieval**: utilises PyNX for accurate phasing (refer to [PyNX documentation](https://pynx.esrf.fr/en/latest/)). 13 | * **Post-processing** (orthogonalisation, phase manipulation, strain computation etc.) 14 | 15 | It is assumed that the phase retrieval is conducted using the PyNX package. The `BcdiPipeline` class runs all three stages and can manage connections to different machines, especially for GPU-based phase retrieval. 16 | 17 | Some features of this package include: 18 | 19 | * **Flexibility in Hardware:** While the phase retrieval stage may leverage GPUs, pre- and post-processing can be executed without GPU support. 20 | * **Utility Functions:** The package provides utility functions to analyse processed data and generate plots suitable for potential publications. 21 | 22 | For a visual wrap-up, see the associated poster presented at [XTOP24](https://xtop2024.sciencesconf.org/): 23 | ![xtop_poster](https://github.com/clatlan/cdiutils/blob/master/images/XTOP_24_cdiutils_poster_200_dpi.png) 24 | 25 | 26 | ## Installation 27 | 28 | You can install the package using the following command: 29 | 30 | ```bash 31 | pip install git+https://github.com/clatlan/cdiutils.git 32 | ``` 33 | 34 | To update your environment with the latest commits: 35 | 36 | ```bash 37 | pip install -I --no-deps git+https://github.com/clatlan/cdiutils.git 38 | ``` 39 | 40 | _Note: Check out the dev branch for the latest features and bug fixes. The dev branch is not guaranteed to be stable._ 41 | 42 | Upgrade your environment with a new version of the package: 43 | 44 | ```bash 45 | pip install -U git+https://github.com/clatlan/cdiutils.git 46 | ``` 47 | 48 | ## Getting started 49 | 50 | Once the package is installed, you can try it right away using the notebook template directly accessible with the command: 51 | 52 | ```bash 53 | prepare_bcdi_notebook [path_to_destination] 54 | ``` 55 | 56 | This will generate a notebook template at the given destination. 57 | 58 | ## Processing BCDI data 59 | 60 | Once data are processed, the `BcdiPipeline` instance saves the data in .npz, .cxi and .vti files following the CXI file format convention (see [https://www.cxidb.org/cxi.html]()). It also plots summary and debug figures such as: 61 | 62 | * **Summary Slice Plot** 63 | ![summary](https://github.com/clatlan/cdiutils/blob/master/images/cdiutils_S311_summary_slice_plot.png) 64 | * **Isosurface determination** 65 | ![isosurface](https://github.com/clatlan/cdiutils/blob/master/images/cdiutils_S311_amplitude_distribution_plot.png) 66 | * **Different strain computation methods** 67 | ![strain](https://github.com/clatlan/cdiutils/blob/master/images/cdiutils_S311_different_strain_methods.png) 68 | 69 | ## BCDI reconstruction analysis 70 | If want to analyse and compare your reconstructions, check out the example notebook [bcdi_reconstruction_analysis.ipynb](https://github.com/clatlan/cdiutils/blob/master/examples/bcdi_reconstruction_analysis.ipynb) in the `examples` folder. This notebook provides a comprehensive overview of the analysis process, including: 71 | * **Slice plots of any quantity you like (here phase) across different conditions:** 72 | ![](https://github.com/clatlan/cdiutils/blob/master/images/multi_slice_plots_phase.png) 73 | 74 | 75 | * **Reciprocal space plots in the orthogonal frame (lab frame)** 76 | ![](https://github.com/clatlan/cdiutils/blob/master/images/reciprocal_space_q_lab.png) 77 | 78 | * **Histogram plots of any quantity you like across different conditions:** 79 | ![](https://github.com/clatlan/cdiutils/blob/master/images/strain_histograms.png) 80 | 81 | 82 | ## Cross section quiver 83 | The cross section quiver is nice tool for visualising the strain and displacement fields and their relationship in BCDI data. 84 | 85 | * The cross section quiver allows to plot cross section of strain and displacement field on the same plot. 86 | ![Cross Section Quiver](https://github.com/clatlan/cdiutils/blob/master/images/cross_section_quiver.png) 87 | * For different conditions 88 | ![Quivers](https://github.com/clatlan/cdiutils/blob/master/images/multi_cross_sections.png) 89 | 90 | ## Contributing 91 | 92 | Contributions are welcome! Please feel free to submit a Pull Request. 93 | 94 | ## Support 95 | 96 | If you encounter any issues or have questions, please open an issue on the [GitHub repository](https://github.com/clatlan/cdiutils/issues). 97 | 98 | -------------------------------------------------------------------------------- /docs/_static/custom.css: -------------------------------------------------------------------------------- 1 | /*default is 60em*/ 2 | .bd-main .bd-content .bd-article-container { 3 | max-width: 120em; 4 | } 5 | 6 | /* default is 88rem*/ 7 | .bd-page-width { 8 | max-width: 120em; 9 | } 10 | 11 | /* Larger header/navbar for Fox logo*/ 12 | .navbar-brand img{ 13 | height: auto; 14 | } 15 | 16 | .navbar { 17 | height: 6em; 18 | } 19 | -------------------------------------------------------------------------------- /docs/environment.yml: -------------------------------------------------------------------------------- 1 | name: readthedocs 2 | channels: 3 | - conda-forge 4 | dependencies: 5 | - python==3.5 6 | - sphinx>=1.4 7 | - pandoc 8 | - nbconvert 9 | - ipykernel 10 | - nbsphinx 11 | - nbsphinx-link 12 | - sphinx-rtd-theme 13 | - pydata-sphinx-theme 14 | - sphinx-autodoc-typehints 15 | -------------------------------------------------------------------------------- /docs/source/api/cdiutils.io.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io package 2 | =================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | cdiutils.io.cristal module 8 | -------------------------- 9 | 10 | .. automodule:: cdiutils.io.cristal 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | cdiutils.io.cxi module 16 | ---------------------- 17 | 18 | .. automodule:: cdiutils.io.cxi 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | cdiutils.io.id01 module 24 | ----------------------- 25 | 26 | .. automodule:: cdiutils.io.id01 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | cdiutils.io.id27 module 32 | ----------------------- 33 | 34 | .. automodule:: cdiutils.io.id27 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | cdiutils.io.load\_data module 40 | ----------------------------- 41 | 42 | .. automodule:: cdiutils.io.load_data 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | cdiutils.io.loader module 48 | ------------------------- 49 | 50 | .. automodule:: cdiutils.io.loader 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | cdiutils.io.nanomax module 56 | -------------------------- 57 | 58 | .. automodule:: cdiutils.io.nanomax 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | cdiutils.io.p10 module 64 | ---------------------- 65 | 66 | .. automodule:: cdiutils.io.p10 67 | :members: 68 | :undoc-members: 69 | :show-inheritance: 70 | 71 | cdiutils.io.sixs module 72 | ----------------------- 73 | 74 | .. automodule:: cdiutils.io.sixs 75 | :members: 76 | :undoc-members: 77 | :show-inheritance: 78 | 79 | cdiutils.io.vtk module 80 | ---------------------- 81 | 82 | .. automodule:: cdiutils.io.vtk 83 | :members: 84 | :undoc-members: 85 | :show-inheritance: 86 | 87 | Module contents 88 | --------------- 89 | 90 | .. automodule:: cdiutils.io 91 | :members: 92 | :undoc-members: 93 | :show-inheritance: 94 | -------------------------------------------------------------------------------- /docs/source/api/cdiutils.multibcdi.rst: -------------------------------------------------------------------------------- 1 | cdiutils.multibcdi package 2 | ========================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | cdiutils.multibcdi.3D\_phase module 8 | ----------------------------------- 9 | 10 | .. automodule:: cdiutils.multibcdi.3D_phase 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | cdiutils.multibcdi.preprocess module 16 | ------------------------------------ 17 | 18 | .. automodule:: cdiutils.multibcdi.preprocess 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | Module contents 24 | --------------- 25 | 26 | .. automodule:: cdiutils.multibcdi 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | -------------------------------------------------------------------------------- /docs/source/api/cdiutils.pipeline.rst: -------------------------------------------------------------------------------- 1 | cdiutils.pipeline package 2 | ========================= 3 | 4 | Submodules 5 | ---------- 6 | 7 | cdiutils.pipeline.base module 8 | ----------------------------- 9 | 10 | .. automodule:: cdiutils.pipeline.base 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | cdiutils.pipeline.bcdi module 16 | ----------------------------- 17 | 18 | .. automodule:: cdiutils.pipeline.bcdi 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | cdiutils.pipeline.parameters module 24 | ----------------------------------- 25 | 26 | .. automodule:: cdiutils.pipeline.parameters 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | cdiutils.pipeline.pipeline\_plotter module 32 | ------------------------------------------ 33 | 34 | .. automodule:: cdiutils.pipeline.pipeline_plotter 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | Module contents 40 | --------------- 41 | 42 | .. automodule:: cdiutils.pipeline 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | -------------------------------------------------------------------------------- /docs/source/api/cdiutils.plot.rst: -------------------------------------------------------------------------------- 1 | cdiutils.plot package 2 | ===================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | cdiutils.plot.colormap module 8 | ----------------------------- 9 | 10 | .. automodule:: cdiutils.plot.colormap 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | cdiutils.plot.formatting module 16 | ------------------------------- 17 | 18 | .. automodule:: cdiutils.plot.formatting 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | cdiutils.plot.interactive module 24 | -------------------------------- 25 | 26 | .. automodule:: cdiutils.plot.interactive 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | cdiutils.plot.quiver module 32 | --------------------------- 33 | 34 | .. automodule:: cdiutils.plot.quiver 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | cdiutils.plot.slice module 40 | -------------------------- 41 | 42 | .. automodule:: cdiutils.plot.slice 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | cdiutils.plot.volume module 48 | --------------------------- 49 | 50 | .. automodule:: cdiutils.plot.volume 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | Module contents 56 | --------------- 57 | 58 | .. automodule:: cdiutils.plot 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | -------------------------------------------------------------------------------- /docs/source/api/cdiutils.process.rst: -------------------------------------------------------------------------------- 1 | cdiutils.process package 2 | ======================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | cdiutils.process.facet\_analysis module 8 | --------------------------------------- 9 | 10 | .. automodule:: cdiutils.process.facet_analysis 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | cdiutils.process.phaser module 16 | ------------------------------ 17 | 18 | .. automodule:: cdiutils.process.phaser 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | cdiutils.process.postprocessor module 24 | ------------------------------------- 25 | 26 | .. automodule:: cdiutils.process.postprocessor 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | cdiutils.process.support\_processor module 32 | ------------------------------------------ 33 | 34 | .. automodule:: cdiutils.process.support_processor 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | Module contents 40 | --------------- 41 | 42 | .. automodule:: cdiutils.process 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | -------------------------------------------------------------------------------- /docs/source/api/cdiutils.rst: -------------------------------------------------------------------------------- 1 | cdiutils package 2 | ================ 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | :maxdepth: 4 9 | 10 | cdiutils.io 11 | cdiutils.multibcdi 12 | cdiutils.pipeline 13 | cdiutils.plot 14 | cdiutils.process 15 | 16 | Submodules 17 | ---------- 18 | 19 | cdiutils.converter module 20 | ------------------------- 21 | 22 | .. automodule:: cdiutils.converter 23 | :members: 24 | :undoc-members: 25 | :show-inheritance: 26 | 27 | cdiutils.geometry module 28 | ------------------------ 29 | 30 | .. automodule:: cdiutils.geometry 31 | :members: 32 | :undoc-members: 33 | :show-inheritance: 34 | 35 | cdiutils.utils module 36 | --------------------- 37 | 38 | .. automodule:: cdiutils.utils 39 | :members: 40 | :undoc-members: 41 | :show-inheritance: 42 | 43 | Module contents 44 | --------------- 45 | 46 | .. automodule:: cdiutils 47 | :members: 48 | :undoc-members: 49 | :show-inheritance: 50 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.converter.rst: -------------------------------------------------------------------------------- 1 | cdiutils.converter 2 | ================== 3 | 4 | .. automodule:: cdiutils.converter 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | Interpolator3D 12 | SpaceConverter 13 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.geometry.rst: -------------------------------------------------------------------------------- 1 | cdiutils.geometry 2 | ================= 3 | 4 | .. automodule:: cdiutils.geometry 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | Geometry 12 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.cristal.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io.cristal 2 | =================== 3 | 4 | .. automodule:: cdiutils.io.cristal 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | CristalLoader 12 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.cxi.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io.cxi 2 | =============== 3 | 4 | .. automodule:: cdiutils.io.cxi 5 | 6 | 7 | .. rubric:: Functions 8 | 9 | .. autosummary:: 10 | 11 | load_cxi 12 | save_as_cxi 13 | 14 | .. rubric:: Classes 15 | 16 | .. autosummary:: 17 | 18 | CXIFile 19 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.id01.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io.id01 2 | ================ 3 | 4 | .. automodule:: cdiutils.io.id01 5 | 6 | 7 | .. rubric:: Functions 8 | 9 | .. autosummary:: 10 | 11 | safe 12 | 13 | .. rubric:: Classes 14 | 15 | .. autosummary:: 16 | 17 | ID01Loader 18 | SpecLoader 19 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.id27.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io.id27 2 | ================ 3 | 4 | .. automodule:: cdiutils.io.id27 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | ID27Loader 12 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.load_data.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io.load\_data 2 | ====================== 3 | 4 | .. automodule:: cdiutils.io.load_data 5 | 6 | 7 | .. rubric:: Functions 8 | 9 | .. autosummary:: 10 | 11 | get_cmap_dict_from_json 12 | get_data_from_npyz 13 | load_amp_phase_strain 14 | load_data_from_cxi 15 | load_post_bcdi_data 16 | load_raw_scan 17 | load_specfile 18 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.loader.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io.loader 2 | ================== 3 | 4 | .. automodule:: cdiutils.io.loader 5 | 6 | 7 | .. rubric:: Functions 8 | 9 | .. autosummary:: 10 | 11 | h5_safe_load 12 | 13 | .. rubric:: Classes 14 | 15 | .. autosummary:: 16 | 17 | H5TypeLoader 18 | Loader 19 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.nanomax.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io.nanomax 2 | =================== 3 | 4 | .. automodule:: cdiutils.io.nanomax 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | NanoMAXLoader 12 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.p10.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io.p10 2 | =============== 3 | 4 | .. automodule:: cdiutils.io.p10 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | P10Loader 12 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io 2 | =========== 3 | 4 | .. automodule:: cdiutils.io 5 | 6 | 7 | .. rubric:: Modules 8 | 9 | .. autosummary:: 10 | :toctree: 11 | :recursive: 12 | 13 | cristal 14 | cxi 15 | id01 16 | id27 17 | load_data 18 | loader 19 | nanomax 20 | p10 21 | sixs 22 | vtk 23 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.sixs.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io.sixs 2 | ================ 3 | 4 | .. automodule:: cdiutils.io.sixs 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | SIXSLoader 12 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.io.vtk.rst: -------------------------------------------------------------------------------- 1 | cdiutils.io.vtk 2 | =============== 3 | 4 | .. automodule:: cdiutils.io.vtk 5 | 6 | 7 | .. rubric:: Functions 8 | 9 | .. autosummary:: 10 | 11 | load_vtk 12 | save_as_vti 13 | 14 | .. rubric:: Exceptions 15 | 16 | .. autosummary:: 17 | 18 | VtkImportError 19 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.pipeline.base.rst: -------------------------------------------------------------------------------- 1 | cdiutils.pipeline.base 2 | ====================== 3 | 4 | .. automodule:: cdiutils.pipeline.base 5 | 6 | 7 | .. rubric:: Functions 8 | 9 | .. autosummary:: 10 | 11 | job 12 | 13 | .. rubric:: Classes 14 | 15 | .. autosummary:: 16 | 17 | LoggerWriter 18 | Pipeline 19 | 20 | .. rubric:: Exceptions 21 | 22 | .. autosummary:: 23 | 24 | JobCancelledError 25 | JobFailedError 26 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.pipeline.bcdi.rst: -------------------------------------------------------------------------------- 1 | cdiutils.pipeline.bcdi 2 | ====================== 3 | 4 | .. automodule:: cdiutils.pipeline.bcdi 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | BcdiPipeline 12 | 13 | .. rubric:: Exceptions 14 | 15 | .. autosummary:: 16 | 17 | PyNXScriptError 18 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.pipeline.parameters.rst: -------------------------------------------------------------------------------- 1 | cdiutils.pipeline.parameters 2 | ============================ 3 | 4 | .. automodule:: cdiutils.pipeline.parameters 5 | 6 | 7 | .. rubric:: Functions 8 | 9 | .. autosummary:: 10 | 11 | check_params 12 | convert_np_arrays 13 | fill_pynx_params 14 | get_params_from_variables 15 | isparameter 16 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.pipeline.pipeline_plotter.rst: -------------------------------------------------------------------------------- 1 | cdiutils.pipeline.pipeline\_plotter 2 | =================================== 3 | 4 | .. automodule:: cdiutils.pipeline.pipeline_plotter 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | PipelinePlotter 12 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.pipeline.rst: -------------------------------------------------------------------------------- 1 | cdiutils.pipeline 2 | ================= 3 | 4 | .. automodule:: cdiutils.pipeline 5 | 6 | 7 | .. rubric:: Modules 8 | 9 | .. autosummary:: 10 | :toctree: 11 | :recursive: 12 | 13 | base 14 | bcdi 15 | parameters 16 | pipeline_plotter 17 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.process.facet_analysis.rst: -------------------------------------------------------------------------------- 1 | cdiutils.process.facet\_analysis 2 | ================================ 3 | 4 | .. automodule:: cdiutils.process.facet_analysis 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | FacetAnalysisProcessor 12 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.process.phaser.rst: -------------------------------------------------------------------------------- 1 | cdiutils.process.phaser 2 | ======================= 3 | 4 | .. automodule:: cdiutils.process.phaser 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | PhasingResultAnalyser 12 | PyNXPhaser 13 | 14 | .. rubric:: Exceptions 15 | 16 | .. autosummary:: 17 | 18 | PynNXImportError 19 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.process.postprocessor.rst: -------------------------------------------------------------------------------- 1 | cdiutils.process.postprocessor 2 | ============================== 3 | 4 | .. automodule:: cdiutils.process.postprocessor 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | PostProcessor 12 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.process.rst: -------------------------------------------------------------------------------- 1 | cdiutils.process 2 | ================ 3 | 4 | .. automodule:: cdiutils.process 5 | 6 | 7 | .. rubric:: Modules 8 | 9 | .. autosummary:: 10 | :toctree: 11 | :recursive: 12 | 13 | facet_analysis 14 | phaser 15 | postprocessor 16 | support_processor 17 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.process.support_processor.rst: -------------------------------------------------------------------------------- 1 | cdiutils.process.support\_processor 2 | =================================== 3 | 4 | .. automodule:: cdiutils.process.support_processor 5 | 6 | 7 | .. rubric:: Classes 8 | 9 | .. autosummary:: 10 | 11 | SupportProcessor 12 | -------------------------------------------------------------------------------- /docs/source/api/generated/cdiutils.utils.rst: -------------------------------------------------------------------------------- 1 | cdiutils.utils 2 | ============== 3 | 4 | .. automodule:: cdiutils.utils 5 | 6 | 7 | .. rubric:: Functions 8 | 9 | .. autosummary:: 10 | 11 | adjust_to_valid_shape 12 | angle 13 | bin_along_axis 14 | center 15 | compute_corrected_angles 16 | compute_distance_from_com 17 | crop_at_center 18 | distance_voxel 19 | energy_to_wavelength 20 | ensure_pynx_shape 21 | error_metrics 22 | extract_reduced_shape 23 | find_hull 24 | find_isosurface 25 | find_max_pos 26 | find_suitable_array_shape 27 | get_centred_slices 28 | get_oversampling_ratios 29 | get_prime_factors 30 | hot_pixel_filter 31 | hybrid_gradient 32 | is_valid_shape 33 | kde_from_histogram 34 | make_support 35 | nan_center_of_mass 36 | nan_to_zero 37 | normalise 38 | normalise_complex_array 39 | num_to_nan 40 | oversampling_from_diffraction 41 | retrieve_original_index 42 | rotation 43 | rotation_x 44 | rotation_y 45 | rotation_z 46 | shape_for_safe_centred_cropping 47 | size_up_support 48 | symmetric_pad 49 | to_bool 50 | unit_vector 51 | v1_to_v2_rotation_matrix 52 | valid_args_only 53 | wavelength_to_energy 54 | zero_to_nan 55 | 56 | .. rubric:: Classes 57 | 58 | .. autosummary:: 59 | 60 | CroppingHandler 61 | -------------------------------------------------------------------------------- /docs/source/api/index.rst: -------------------------------------------------------------------------------- 1 | API Documentation 2 | ================= 3 | 4 | .. autosummary:: 5 | :toctree: generated 6 | :recursive: 7 | 8 | cdiutils.utils 9 | cdiutils.converter 10 | cdiutils.geometry 11 | cdiutils.io 12 | cdiutils.process 13 | cdiutils.pipeline -------------------------------------------------------------------------------- /docs/source/api/modules.rst: -------------------------------------------------------------------------------- 1 | cdiutils 2 | ======== 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | cdiutils 8 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # For the full list of built-in configuration values, see the documentation: 4 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 5 | 6 | # -- Project information ----------------------------------------------------- 7 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information 8 | 9 | project = 'CdiUtils' 10 | copyright = '2025, Clément Atlan' 11 | author = 'Clément Atlan' 12 | release = '0.2.0' 13 | 14 | # -- General configuration --------------------------------------------------- 15 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration 16 | 17 | # Add any Sphinx extension module names here, as strings. They can be 18 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 19 | # ones. 20 | extensions = [ 21 | "sphinx.ext.autodoc", # Extracts docstrings 22 | "sphinx.ext.napoleon", # Supports Google-style & NumPy-style docstrings 23 | "sphinx.ext.viewcode", # Adds links to source code 24 | "sphinx.ext.autosummary", # Generates summary tables 25 | "sphinx_autodoc_typehints", # Shows type hints in docs 26 | 27 | 'sphinx.ext.doctest', 28 | # 'sphinx.ext.intersphinx', 29 | 'sphinx.ext.coverage', 30 | 'sphinx.ext.mathjax', 31 | 'sphinx.ext.githubpages', 32 | 'sphinx.ext.todo', 33 | # 'sphinx.ext.graphviz', 34 | # 'sphinx.ext.inheritance_diagram', 35 | 'nbsphinx', 36 | 'nbsphinx_link', 37 | # 'sphinxarg.ext' 38 | ] 39 | 40 | # Automatically generate stub files 41 | autosummary_generate = True 42 | autodoc_member_order = "bysource" # Keeps methods in order of appearance 43 | 44 | 45 | templates_path = ['_templates'] 46 | exclude_patterns = [] 47 | 48 | # -- Options for HTML output ------------------------------------------------- 49 | # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output 50 | 51 | html_theme = 'alabaster' 52 | html_static_path = ['_static'] 53 | 54 | 55 | # The name of the Pygments (syntax highlighting) style to use. 56 | pygments_style = 'sphinx' 57 | todo_include_todos = True 58 | 59 | autoclass_content = 'both' 60 | 61 | html_theme = "pydata_sphinx_theme" 62 | 63 | html_theme_options = { 64 | "show_nav_level": 2, 65 | "navigation_depth": 2, 66 | "navbar_align": "left", 67 | # "primary_sidebar_end": ["indices.html", "sidebar-ethical-ads.html"] 68 | } 69 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. CdiUtils documentation master file, created by 2 | sphinx-quickstart on Fri Feb 14 15:01:29 2025. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | CdiUtils documentation 7 | ====================== 8 | 9 | Add your content using ``reStructuredText`` syntax. See the 10 | `reStructuredText `_ 11 | documentation for details. 12 | 13 | 14 | .. toctree:: 15 | :maxdepth: 2 16 | :caption: Contents: 17 | 18 | -------------------------------------------------------------------------------- /images/XTOP_24_cdiutils_poster_200_dpi.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/XTOP_24_cdiutils_poster_200_dpi.png -------------------------------------------------------------------------------- /images/XU_and_CXI.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/XU_and_CXI.png -------------------------------------------------------------------------------- /images/arrows.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/arrows.png -------------------------------------------------------------------------------- /images/cdiutils_S311_amplitude_distribution_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/cdiutils_S311_amplitude_distribution_plot.png -------------------------------------------------------------------------------- /images/cdiutils_S311_different_strain_methods.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/cdiutils_S311_different_strain_methods.png -------------------------------------------------------------------------------- /images/cdiutils_S311_summary_slice_plot.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/cdiutils_S311_summary_slice_plot.png -------------------------------------------------------------------------------- /images/contour.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/contour.png -------------------------------------------------------------------------------- /images/cross_section_quiver.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/cross_section_quiver.png -------------------------------------------------------------------------------- /images/electron_density.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/electron_density.png -------------------------------------------------------------------------------- /images/multi_cross_sections.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/multi_cross_sections.png -------------------------------------------------------------------------------- /images/multi_slice_plots_phase.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/multi_slice_plots_phase.png -------------------------------------------------------------------------------- /images/phase.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/phase.png -------------------------------------------------------------------------------- /images/reciprocal_space_q_lab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/reciprocal_space_q_lab.png -------------------------------------------------------------------------------- /images/strain.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/strain.png -------------------------------------------------------------------------------- /images/strain_histograms.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/images/strain_histograms.png -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools>=61", "wheel"] 3 | build-backend = "setuptools.build_meta" 4 | 5 | [project] 6 | name = "cdiutils" 7 | version = "0.2.0" 8 | description = "A Python package for (Bragg) Coherent X-ray Diffraction Imaging processing, analysis and visualisation workflows." 9 | 10 | authors = [ 11 | { name = "Clement Atlan", email = "clement.atlan@esrf.fr" }, 12 | { name = "Corentin Chatelier" }, 13 | { name = "David Simonne" }, 14 | { name = "Matthieu Deschamps" }, 15 | { name = "Kyle Olson" }, 16 | ] 17 | maintainers = [ 18 | {name = "Clement Atlan", email = "clement.atlan@esrf.fr"} 19 | ] 20 | requires-python = ">=3.10" 21 | readme = "README.md" 22 | keywords = [ 23 | "reconstruction", 24 | "coherent X-ray imaging", 25 | "Bragg", 26 | "pipeline", 27 | "synchrotron radiation", 28 | "quantitative analysis", 29 | ] 30 | license = {file = "LICENSE"} 31 | 32 | dependencies = [ 33 | "colorcet>=3.0.0", 34 | "h5py>=3.6.0", 35 | "hdf5plugin>=3.2.0", 36 | "ipykernel", 37 | "matplotlib>=3.8", 38 | "numpy", 39 | "pandas>=1.4.2", 40 | "scikit-image>=0.19.2", 41 | "scikit-learn>=1.1.3", 42 | "scipy>=1.8.0", 43 | "seaborn>=0.12.1", 44 | "silx", 45 | "tabulate", 46 | "xrayutilities>=1.7.3", 47 | "ipyvolume", 48 | "ipython_genutils", 49 | "bokeh", 50 | "panel", 51 | "tornado", 52 | ] 53 | 54 | [project.optional-dependencies] 55 | vtk = ["vtk>=9.0.1"] 56 | 57 | [project.urls] 58 | Homepage = "https://github.com/clatlan/cdiutils" 59 | 60 | 61 | [tool.setuptools] 62 | package-dir = {"" = "src"} 63 | 64 | [tool.setuptools.packages.find] 65 | where = ["src"] 66 | 67 | [tool.setuptools.package-data] 68 | "cdiutils" = [ 69 | "pipeline/pynx-id01-cdi_template.slurm", 70 | "templates/bcdi_pipeline.ipynb", 71 | "templates/step_by_step_bcdi_analysis.ipynb", 72 | "templates/detector_calibration.ipynb" 73 | ] 74 | 75 | 76 | [tool.black] 77 | line-length = 79 78 | 79 | [tool.ruff] 80 | line-length = 79 81 | extend-include = ["*.ipynb"] 82 | 83 | 84 | [project.scripts] 85 | prepare_bcdi_notebooks = "cdiutils.scripts.prepare_bcdi_notebooks:main" 86 | prepare_detector_calibration = "cdiutils.scripts.prepare_detector_calibration:main" 87 | 88 | 89 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | # .readthedocs.yaml 2 | # Read the Docs configuration file 3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details 4 | 5 | # Required 6 | version: 2 7 | 8 | # Set the OS, Python version and other tools you might need 9 | build: 10 | os: "ubuntu-24.04" 11 | tools: 12 | python: "mambaforge-latest" 13 | 14 | # Build documentation in the "docs/" directory with Sphinx 15 | sphinx: 16 | configuration: docs/source/conf.py 17 | 18 | # Optionally build your docs in additional formats such as PDF and ePub 19 | # formats: 20 | # - pdf 21 | # - epub 22 | 23 | conda: 24 | environment: docs/environment.yml 25 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | colorcet>=3.0.0 2 | h5py>=3.6.0 3 | hdf5plugin>=3.2.0 4 | ipykernel 5 | matplotlib>=3.8 6 | numpy 7 | pandas>=1.4.2 8 | scikit-image>=0.19.2 9 | scikit-learn>=1.1.3 10 | scipy>=1.8.0 11 | seaborn>=0.12.1 12 | silx-base 13 | tabulate 14 | xrayutilities>=1.7.3 15 | ipyvolume 16 | ipython_genutils 17 | bokeh 18 | panel 19 | tornado 20 | -------------------------------------------------------------------------------- /src/cdiutils/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | cdiutils - A Python package for (Bragg) Coherent X-ray Diffraction 3 | Imaging processing, analysis and visualisation workflows. 4 | """ 5 | 6 | __version__ = "0.2.0" 7 | __author__ = "Clément Atlan" 8 | __email__ = "clement.atlan@esrf.fr" 9 | __license__ = "MIT" 10 | 11 | 12 | import importlib 13 | 14 | from .utils import ( 15 | energy_to_wavelength, 16 | wavelength_to_energy, 17 | make_support, 18 | get_centred_slices, 19 | hot_pixel_filter, 20 | CroppingHandler 21 | ) 22 | 23 | __submodules__ = { 24 | "utils", 25 | "analysis", 26 | "geometry", 27 | "converter", 28 | "io", 29 | "process", 30 | "pipeline", 31 | "plot" 32 | } 33 | 34 | __class_submodules__ = { 35 | "Geometry": "geometry", 36 | "SpaceConverter": "converter", 37 | "BcdiPipeline": "pipeline", 38 | "Loader": "io", 39 | "CXIFile": "io" 40 | } 41 | 42 | __all__ = [ 43 | "energy_to_wavelength", "wavelength_to_energy", "make_support", 44 | "get_centred_slices", "CroppingHandler", "hot_pixel_filter" 45 | ] 46 | __all__ += list(__submodules__) + list(__class_submodules__) 47 | 48 | 49 | def __getattr__(name): 50 | # Lazy load submodules 51 | if name in __submodules__: 52 | return importlib.import_module(f"{__name__}.{name}") 53 | 54 | # Lazy load specific classes 55 | if name in __class_submodules__: 56 | submodule = importlib.import_module( 57 | f"{__name__}.{__class_submodules__[name]}" 58 | ) 59 | return getattr(submodule, name) 60 | 61 | raise AttributeError(f"module {__name__} has no attribute {name}.") 62 | -------------------------------------------------------------------------------- /src/cdiutils/analysis/__init__.py: -------------------------------------------------------------------------------- 1 | from .stats import get_histogram, plot_histogram, find_isosurface 2 | from .stereography import pole_figure 3 | 4 | __all__ = [ 5 | "get_histogram", 6 | "plot_histogram", 7 | "find_isosurface", 8 | "pole_figure" 9 | ] 10 | -------------------------------------------------------------------------------- /src/cdiutils/analysis/stats.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | from matplotlib.typing import ColorType 3 | 4 | import numpy as np 5 | from scipy.ndimage import binary_erosion 6 | from scipy.stats import gaussian_kde 7 | from seaborn import kdeplot 8 | 9 | from cdiutils.utils import normalise 10 | from cdiutils.plot.formatting import save_fig 11 | 12 | 13 | def kde_from_histogram( 14 | counts: np.ndarray, 15 | bin_edges: np.ndarray, 16 | ) -> tuple[np.ndarray, np.ndarray]: 17 | """ 18 | Compute the Kernel Density Estimate (KDE) from histogram counts and 19 | bin edges provided by numpy.histogram function. 20 | 21 | Args: 22 | counts (np.ndarray): the number of elements in each bin. 23 | bin_edges (np.ndarray): the limits of each bin. 24 | 25 | Returns: 26 | tuple[np.ndarray, np.ndarray]: x values used to compute the KDE 27 | estimate, the y value (KDE estimate) 28 | """ 29 | # Check if the histogram is density or not by checking the sum of 30 | # the counts 31 | bin_widths = np.diff(bin_edges) 32 | is_density = np.isclose(np.sum(counts * bin_widths), 1.0) 33 | 34 | if is_density: 35 | # When density=True, use the bin edges to reconstruct the data 36 | # for KDE 37 | data = [] 38 | for count, left_edge, right_edge in zip( 39 | counts, bin_edges[:-1], bin_edges[1:] 40 | ): 41 | data.extend( 42 | np.random.uniform( 43 | left_edge, 44 | right_edge, 45 | int(count * len(counts) * (right_edge - left_edge)) 46 | ) 47 | ) 48 | data = np.array(data) 49 | 50 | kde = gaussian_kde(data) 51 | x = np.linspace(min(bin_edges), max(bin_edges)) 52 | y = kde(x) 53 | 54 | else: 55 | # Reconstruct the data from histogram counts and bin edges 56 | bin_centers = (bin_edges[:-1] + bin_edges[1:]) / 2 57 | bin_width = bin_edges[1] - bin_edges[0] 58 | reconstructed_data = np.repeat(bin_centers, counts) 59 | 60 | # Calculate KDE using the reconstructed data 61 | kde = gaussian_kde(reconstructed_data) 62 | # Evaluate KDE 63 | x = np.linspace(bin_edges.min(), bin_edges.max()) 64 | y = kde.pdf(x) 65 | 66 | # Scale the KDE values to match the original counts 67 | y *= len(reconstructed_data) * bin_width 68 | 69 | return x, y 70 | 71 | 72 | def find_isosurface( 73 | amplitude: np.ndarray, 74 | nbins: int = 100, 75 | sigma_criterion: float = 3, 76 | plot: bool = False, 77 | show: bool = False, 78 | save: str = None 79 | ) -> tuple[float, plt.Axes] | float: 80 | """ 81 | Estimate the isosurface value from the amplitude distribution. 82 | 83 | This function computes the isosurface value based on the amplitude 84 | distribution of a 3D volume. The isosurface is calculated as: 85 | `mu - sigma_criterion * sigma`, where `mu` is the mean and `sigma` 86 | is the standard deviation of the distribution. 87 | 88 | Args: 89 | amplitude (np.ndarray): The 3D amplitude volume. 90 | nbins (int, optional): The number of bins to use for the histogram. 91 | Defaults to 100. 92 | sigma_criterion (float, optional): The factor used to compute 93 | the isosurface. Defaults to 3. 94 | plot (bool, optional): Whether to generate a plot of the 95 | histogram and density estimate. Defaults to False. 96 | show (bool, optional): Whether to display the plot. Defaults to 97 | False. 98 | save (str, optional): File path to save the plot if generated. 99 | Defaults to None. 100 | 101 | Returns: 102 | tuple[float, plt.Axes] | float: The isosurface value. If `plot` 103 | or `show` is True, also returns the matplotlib figure object. 104 | """ 105 | # normalise and flatten the amplitude 106 | flattened_amplitude = normalise(amplitude).ravel() 107 | 108 | counts, bins = np.histogram(flattened_amplitude, bins=nbins) 109 | 110 | # remove the background 111 | background_value = bins[ 112 | np.where(counts == counts.max())[0] + 1 + nbins//20 113 | ] 114 | filtered_amplitude = flattened_amplitude[ 115 | flattened_amplitude > background_value 116 | ] 117 | 118 | # redo the histogram with the filtered amplitude 119 | counts, bins = np.histogram(filtered_amplitude, bins=nbins, density=True) 120 | bin_centres = (bins[:-1] + bins[1:]) / 2 121 | bin_size = bin_centres[1] - bin_centres[0] 122 | 123 | # fit the amplitude distribution 124 | kernel = gaussian_kde(filtered_amplitude) 125 | x = np.linspace(0, 1, 1000) 126 | fitted_counts = kernel(x) 127 | 128 | max_index = np.argmax(fitted_counts) 129 | right_gaussian_part = np.where(x >= x[max_index], fitted_counts, 0) 130 | 131 | # find the closest indexes 132 | right_HM_index = np.argmin( 133 | np.abs(right_gaussian_part - fitted_counts.max() / 2) 134 | ) 135 | left_HM_index = max_index - (right_HM_index - max_index) 136 | 137 | fwhm = x[right_HM_index] - x[left_HM_index] 138 | sigma_estimate = fwhm / (2 * np.sqrt(2 * np.log(2))) 139 | isosurface = x[max_index] - sigma_criterion * sigma_estimate 140 | 141 | if plot or show: 142 | figsize = (6, 4) # (5.812, 3.592) # golden ratio 143 | fig, ax = plt.subplots(1, 1, layout="tight", figsize=figsize) 144 | ax.bar( 145 | bin_centres, 146 | counts, 147 | width=bin_size, 148 | color="dodgerblue", 149 | alpha=0.9, 150 | edgecolor=(0, 0, 0, 0.25), 151 | label=r"amplitude distribution" 152 | ) 153 | kdeplot( 154 | filtered_amplitude, 155 | ax=ax, 156 | alpha=0.3, 157 | fill=True, 158 | color="navy", 159 | label=r"density estimate" 160 | ) 161 | ax.axvspan( 162 | x[left_HM_index], 163 | x[right_HM_index], 164 | edgecolor="k", 165 | facecolor="green", 166 | alpha=0.2, 167 | label="FWHM" 168 | ) 169 | ax.plot( 170 | [isosurface, isosurface], 171 | [0, fitted_counts[(np.abs(x - isosurface)).argmin()]], 172 | solid_capstyle="round", 173 | color="lightcoral", 174 | lw=5, 175 | label=fr"isosurface estimated at {isosurface:0.3f}" 176 | ) 177 | 178 | ax.set_xlabel(r"normalised amplitude") 179 | ax.set_ylabel("counts") 180 | ax.legend(frameon=False) 181 | fig.suptitle(r"Reconstructed amplitude distribution") 182 | fig.tight_layout() 183 | if save is not None: 184 | save_fig(fig, save, transparent=False) 185 | if show: 186 | plt.show() 187 | return float(isosurface), fig 188 | return float(isosurface) 189 | 190 | 191 | def get_histogram( 192 | data: np.ndarray, 193 | support: np.ndarray = None, 194 | bins: int = 50, 195 | density: bool = False, 196 | region: str = "overall" 197 | ) -> dict: 198 | """ 199 | Calculate histogram and optionally kernel density estimate (KDE) of 200 | the data. 201 | Optionally applies a support mask to the data before and calculates 202 | the surface and bulk histograms. 203 | Args: 204 | data (np.ndarray): the data to be analysed 205 | support (np.ndarray): the support mask to be applied to the data 206 | before histogram calculation. If None, no mask is applied. 207 | Defaults to None. 208 | bins (int, optional): number of bins for the histogram. 209 | Defaults to 50. 210 | density (bool, optional): whether to normalise the histogram 211 | to form a probability density function. Defaults to False. 212 | region (str, optional): region of the data to be analysed. Can 213 | be "overall", "surface", "bulk" or "all". Defaults to 214 | "overall". 215 | 216 | Returns: 217 | dict: a dictionary containing the histograms for the specified 218 | region(s). If kde is True, also includes the KDEs. 219 | """ 220 | if support is None and region != "overall": 221 | raise ValueError( 222 | "Support mask is required for surface or bulk region analysis." 223 | ) 224 | if region not in ["overall", "surface", "bulk", "all"]: 225 | raise ValueError( 226 | "Invalid region specified. Choose from 'overall', " 227 | "'surface', 'bulk', or 'all'." 228 | ) 229 | histograms = {} 230 | means = {} 231 | stds = {} 232 | 233 | if support is not None: 234 | overall_data = data[support > 0] 235 | 236 | # to handle any remaining NaN values, we need to specify the range 237 | histograms["overall"] = np.histogram( 238 | overall_data, bins=bins, density=density, 239 | range=(np.nanmin(overall_data), np.nanmax(overall_data)) 240 | ) 241 | means["overall"] = np.nanmean(overall_data) 242 | stds["overall"] = np.nanstd(overall_data) 243 | 244 | if region != "overall": 245 | bulk = binary_erosion(support) 246 | bulk_data = data[bulk > 0] 247 | 248 | if region == "bulk" or region == "all": 249 | histograms["bulk"] = np.histogram( 250 | bulk_data, bins=bins, density=density, 251 | range=(np.nanmin(bulk_data), np.nanmax(bulk_data)) 252 | ) 253 | means["bulk"] = np.nanmean(bulk_data) 254 | stds["bulk"] = np.nanstd(bulk_data) 255 | 256 | if region == "surface" or region == "all": 257 | surface = support - bulk 258 | surface_data = data[surface > 0] 259 | histograms["surface"] = np.histogram( 260 | surface_data, bins=bins, density=density, 261 | range=(np.nanmin(surface_data), np.nanmax(surface_data)) 262 | ) 263 | means["surface"] = np.nanmean(surface_data) 264 | stds["surface"] = np.nanstd(surface_data) 265 | 266 | kdes = {k: kde_from_histogram(*v) for k, v in histograms.items()} 267 | 268 | return histograms, kdes, means, stds 269 | 270 | 271 | def plot_histogram( 272 | ax: plt.Axes, 273 | counts: np.ndarray, 274 | bin_edges: np.ndarray, 275 | kde_x: np.ndarray = None, 276 | kde_y: np.ndarray = None, 277 | color: ColorType = "lightcoral", 278 | fwhm: bool = True, 279 | bar_args: dict = None, 280 | kde_args: dict = None 281 | ) -> float: 282 | """ 283 | Plot the bars of a histogram as well as the kernel density 284 | estimate. 285 | 286 | Args: 287 | ax (plt.Axes): the matplotlib ax to plot the histogram on. 288 | counts (np.ndarray): the count in each bin from 289 | np.histogram(). 290 | bin_edges (np.ndarray): the bin edge values from 291 | np.histogram(). 292 | kde_x (np.ndarray, optional): the x values used to 293 | calculate the kernel density estimate values. 294 | kde_y (np.ndarray, optional): the (y) values of the kernel 295 | density estimate. 296 | color (ColorType, optional): the colour of the bar and line. 297 | Defaults to "lightcoral". 298 | fwhm (bool, optional): whether to calculate and plot the full 299 | width at half maximum (FWHM) of the kernel density estimate. 300 | Defaults to True. 301 | bar_args (dict, optional): additional arguments for the 302 | matptlotlib bar function. 303 | kde_args (dict, optional): additional arguments for the 304 | matplotlib fill_between function. Can include boolean "fill" 305 | and float "fill_alpha" to control whether to fill the kde 306 | area and the alpha value of the fill. Defaults to None. 307 | 308 | Returns: 309 | float: the fwhm if required else None. 310 | """ 311 | _bar_args = { 312 | "color": color, 313 | "alpha": 0.4, 314 | "edgecolor": color, 315 | "linewidth": 0.5, 316 | "label": "" 317 | } 318 | _bar_args.update(bar_args or {}) 319 | 320 | _kde_args = { 321 | "color": color, 322 | "label": "Kernel density estimate" 323 | } 324 | fill_kde, fill_alpha = False, False 325 | if kde_args is not None: 326 | if "fill" in kde_args: 327 | fill_kde = kde_args.pop("fill") 328 | if "fill_alpha" in kde_args: 329 | fill_alpha = kde_args.pop("fill_alpha") 330 | _kde_args.update(kde_args) 331 | 332 | # Resample the histogram to calculate the kernel density estimate 333 | bin_centres = (bin_edges[:-1] + bin_edges[1:]) / 2 334 | bin_width = bin_edges[1] - bin_edges[0] 335 | 336 | # Plot the histogram bars 337 | ax.bar(bin_centres, counts, bin_width, **_bar_args) 338 | 339 | # Find the x-axis limits 340 | xmax = np.max(np.abs(bin_centres)) 341 | xmin = -xmax 342 | ax.set_xlim(xmin, xmax) 343 | 344 | if kde_x is not None and kde_y is not None: 345 | # Plot the kernel density estimate 346 | ax.plot(kde_x, kde_y, **_kde_args) 347 | 348 | if fill_kde: 349 | ax.fill_between( 350 | kde_x, kde_y, 0, 351 | color=color, alpha=fill_alpha 352 | ) 353 | 354 | # Calculate the FWHM 355 | if fwhm: 356 | halfmax = kde_y.max() / 2 357 | maxpos = kde_y.argmax() 358 | leftpos = (np.abs(kde_y[:maxpos] - halfmax)).argmin() 359 | rightpos = (np.abs(kde_y[maxpos:] - halfmax)).argmin() + maxpos 360 | fwhm_value = kde_x[rightpos] - kde_x[leftpos] 361 | 362 | fwhm_line, = ax.plot( 363 | [], [], 364 | label=f"FWHM = {fwhm_value:.4f}%", 365 | color=color, ls="--", linewidth=1 366 | ) 367 | 368 | def update_fwhm_line(event_ax): 369 | xmin, xmax = event_ax.get_xlim() 370 | fwhm_line.set_data( 371 | [kde_x[leftpos], kde_x[rightpos]], [halfmax, halfmax] 372 | ) 373 | fwhm_line.set_transform(event_ax.transData) 374 | 375 | update_fwhm_line(ax) 376 | ax.callbacks.connect('xlim_changed', update_fwhm_line) 377 | ax.callbacks.connect('ylim_changed', update_fwhm_line) 378 | 379 | return fwhm_value 380 | return None 381 | -------------------------------------------------------------------------------- /src/cdiutils/analysis/stereography.py: -------------------------------------------------------------------------------- 1 | """ 2 | A function for plotting stereographic projections of reciprocal space 3 | data. 4 | """ 5 | 6 | import matplotlib.pyplot as plt 7 | from matplotlib.colors import LogNorm 8 | import numpy as np 9 | from scipy.interpolate import griddata 10 | 11 | from cdiutils.plot import ( 12 | save_fig, 13 | add_colorbar, 14 | add_labels, 15 | plot_volume_slices, 16 | ) 17 | 18 | 19 | def pole_figure( 20 | intensity: np.ndarray, 21 | grid: list, 22 | axis: str = "2", 23 | radius: float = None, 24 | dr: float = None, 25 | resolution: int = 250, 26 | figsize: tuple = (4, 4), 27 | title: str = None, 28 | verbose: bool = False, 29 | save: str = None, 30 | **plot_params, 31 | ) -> tuple: 32 | """ 33 | Generate a crystallographic pole figure using stereographic 34 | projection of 3D diffraction intensity. 35 | 36 | A stereographic projection maps points on a sphere to a plane by 37 | projecting from the opposite pole. In crystallography, this is used 38 | to visualise the distribution of crystal directions. 39 | 40 | Standard convention: 41 | - When axis in ("0", "1", "2"): The upper hemisphere is projected 42 | onto the equatorial plane with projection lines extending towards 43 | the south pole (observer position) 44 | - When axis in ("-0", "-1", "-2"): The lower hemisphere is projected 45 | onto the equatorial plane with projection lines extending towards 46 | the north pole (observer position) 47 | 48 | Args: 49 | intensity (np.ndarray): 3D array of intensity values. 50 | grid (list): list of 1D arrays representing the orthogonal grid. 51 | ex: [x_coords, y_coords, z_coords] 52 | axis (str, optional): Projection axis and hemisphere selection: 53 | Positive values ("0","1","2") select the upper hemisphere 54 | (highest coordinates), projecting from the opposite pole. 55 | Negative values ("-0","-1","-2") select the lower hemisphere 56 | (lowest coordinates), projecting from the opposite pole. 57 | 58 | The absolute value indicates which axis (xu convention): 59 | |axis|=0: project onto yz-plane (normal to x-axis) 60 | |axis|=1: project onto xz-plane (normal to y-axis) 61 | |axis|=2: project onto xy-plane (normal to z-axis) 62 | 63 | Defaults to "2", giving the standard upper hemisphere 64 | projection onto the xy-plane. 65 | 66 | radius (float, optional): Radius of the spherical shell to 67 | select data from, centered at origin. 68 | If None, uses 0.25 * the maximum radial distance in the data 69 | Defaults to None. 70 | dr (float, optional): Thickness of the spherical shell. 71 | If None, uses 0.01 * radius. Defaults to None. 72 | resolution (int, optional): Resolution of the output 2D grid 73 | (number of points per dimension). Defaults to 250. 74 | figsize(tuple, optional): Size of the figure. Defaults to 75 | (4, 4). 76 | title (str, optional): Title for the plot. Defaults to None. 77 | verbose (bool, optional): Whether to print and plot additional 78 | information. Defaults to False. 79 | save (str, optional): File path to save the plot. Defaults to 80 | None. 81 | **plot_params (dict, optional): Additional parameters for the 82 | plotting function. 83 | 84 | Raises: 85 | ValueError: If axis is invalid or no data points found in shell 86 | 87 | Returns: 88 | tuple: (grid_x, grid_y, projected_intensity): The projected 2D 89 | grid coordinates and intensity values. Also returns 90 | (fig, ax): The figure and axis objects. 91 | 92 | Example: 93 | >>> import numpy as np 94 | >>> from cdiutils.analysis import pole_figure 95 | >>> intensity = np.random.random((100, 100, 100)) 96 | >>> grid = [np.linspace(-1, 1, 100)] * 3 97 | 98 | >>> (grid_x, grid_y, projected_intensity), (fig, ax) = pole_figure( 99 | intensity, grid, axis="2" 100 | ) 101 | >>> plt.show() 102 | """ 103 | # parse the axis parameter - handle as a string first 104 | if not isinstance(axis, str): 105 | axis = str(axis) 106 | 107 | # check if we're selecting upper or lower hemisphere 108 | select_upper_hemisphere = not axis.startswith("-") 109 | 110 | # convert to absolute value for determining projection axis 111 | if axis.startswith("-"): 112 | projection_axis = int(axis[1:]) 113 | else: 114 | projection_axis = int(axis) 115 | 116 | if projection_axis >= 3: 117 | raise ValueError("Axis must be in ('-2', '-1', '-0', '0', '1', '2')") 118 | 119 | observer_position = "South" if select_upper_hemisphere else "North" 120 | hemisphere = "upper" if select_upper_hemisphere else "lower" 121 | 122 | if verbose: 123 | print( 124 | f"Projection axis: {projection_axis}, selecting {hemisphere} " 125 | f"hemisphere with observer at {observer_position} Pole" 126 | ) 127 | 128 | # calculate center points for the full grid 129 | centres = [np.mean(g) for g in grid] 130 | 131 | # Determine which part of the data to keep based on hemisphere selection 132 | slices = [slice(None), slice(None), slice(None)] 133 | if select_upper_hemisphere: 134 | # lower hemisphere will be zeroed out (relative to projection axis) 135 | slices[projection_axis] = slice( 136 | None, intensity.shape[projection_axis] // 2 137 | ) 138 | else: 139 | # upper hemisphere will be zeroed out 140 | slices[projection_axis] = slice( 141 | intensity.shape[projection_axis] // 2, None 142 | ) 143 | 144 | # select the hemisphere data and zero out the other hemisphere 145 | hemisphere_intensity = intensity.copy() 146 | hemisphere_intensity[tuple(slices)] = 0 147 | 148 | # make the meshgrid from the grid 149 | coordinate_meshgrids = np.meshgrid(*grid, indexing='ij') 150 | 151 | # make a spherical shell mask 152 | radii = np.sqrt(sum( 153 | (coordinate_meshgrids[i] - centres[i]) ** 2 154 | for i in range(3) 155 | )) 156 | 157 | # set default radius and thickness if not provided 158 | if radius is None: 159 | radius = 0.25 * np.max(radii) # A quarter of the max radius 160 | if dr is None: 161 | dr = 0.01 * radius 162 | 163 | if verbose: 164 | print( 165 | f"Selected radius: {radius:.3f} and spherical " 166 | f"shell thickness: {dr:.5f}" 167 | ) 168 | 169 | shell_mask = np.logical_and( 170 | radii > (radius - dr/2), 171 | radii < (radius + dr/2) 172 | ) 173 | 174 | # plot the filtered data if requested 175 | if verbose: 176 | _, debug_axes = plt.subplots(2, 2, layout="tight", figsize=figsize) 177 | params = { 178 | "norm": LogNorm(), 179 | "cmap": "turbo", 180 | "convention": "xu", 181 | "voxel_size": [np.diff(g).mean() for g in grid], 182 | "data_centre": [g.mean() for g in grid], 183 | "show": False 184 | } 185 | params["cmap"] = plt.get_cmap(params["cmap"]).copy() 186 | params["cmap"].set_bad(params["cmap"](0)) 187 | params["cmap"].set_under(params["cmap"](0)) 188 | 189 | for col, to_plot in enumerate( 190 | ( 191 | hemisphere_intensity, 192 | np.ma.masked_where(~shell_mask, hemisphere_intensity) 193 | ) 194 | ): 195 | _, old_axes = plot_volume_slices(to_plot, **params) 196 | add_labels(old_axes, convention="xu") 197 | axis_indexes = [0, 1, 2] 198 | axis_indexes.remove(projection_axis) 199 | old_axes = old_axes[axis_indexes] 200 | for new_ax, old_ax in zip(debug_axes[:, col].flat, old_axes.flat): 201 | im = old_ax.get_images()[0] 202 | new_ax.imshow( 203 | im.get_array(), cmap=im.get_cmap(), norm=LogNorm(), 204 | extent=im.get_extent(), origin=im.origin 205 | ) 206 | new_ax.axis(old_ax.axis()) 207 | new_ax.set_xlabel(old_ax.get_xlabel()) 208 | new_ax.set_ylabel(old_ax.get_ylabel()) 209 | debug_axes[0, col].set_title( 210 | f"Intensity of the {hemisphere} hemisphere" if col == 0 211 | else "Shell-masked intensity" 212 | ) 213 | 214 | # extract the masked data 215 | shell_intensity = hemisphere_intensity[shell_mask] 216 | 217 | # check if we have valid data to work with 218 | if shell_intensity.size == 0: 219 | raise ValueError( 220 | "No data points found in the specified radius shell. " 221 | "Try adjusting radius or dr." 222 | ) 223 | 224 | # calculate stereographic projection coordinates 225 | # determine which coordinates to use for the projection 226 | equatorial_plane_axes = [i for i in range(3) if i != projection_axis] 227 | 228 | # Get the coordinates for points in the mask 229 | masked_coordinates = [ 230 | coordinate_meshgrids[i][shell_mask] - centres[i] 231 | for i in range(3) 232 | ] 233 | 234 | # Stereographic projection formula depends on hemisphere selection. 235 | # for upper hemisphere, project from south pole (opposite pole) 236 | # for lower hemisphere, project from north pole (opposite pole) 237 | if select_upper_hemisphere: 238 | denominator = radius + masked_coordinates[projection_axis] 239 | else: 240 | denominator = radius - masked_coordinates[projection_axis] 241 | 242 | # avoid division by zero 243 | safe_denominator = np.where( 244 | np.abs(denominator) < 1e-10, 1e-10, denominator 245 | ) 246 | 247 | # Calculate projections for the two equatorial plane axes0. 248 | # The negative sign ensures correct orientation. 249 | # Scaling by 90 maps to stereographic degrees (0° at center, 90° at rim). 250 | projected_coordinates = [ 251 | -masked_coordinates[equatorial_plane_axes[0]] / safe_denominator * 90, 252 | -masked_coordinates[equatorial_plane_axes[1]] / safe_denominator * 90 253 | ] 254 | 255 | # make the target 2D grid for interpolation 256 | grid_x, grid_y = np.mgrid[-90:90:resolution*1j, -90:90:resolution*1j] 257 | 258 | # interpolate the intensity values onto the regular 2D grid 259 | projected_intensity = griddata( 260 | (projected_coordinates[0], projected_coordinates[1]), # source points 261 | shell_intensity, # values at those points 262 | (grid_x, grid_y), # target grid points 263 | method="cubic", 264 | fill_value=np.nan 265 | ) 266 | 267 | # generate a plot 268 | _plot_params = { 269 | "cmap": "cet_CET_C9s_r", 270 | "norm": LogNorm(1, 0.8 * np.max(projected_intensity)), 271 | "interpolation": "nearest", 272 | "origin": "lower", 273 | "extent": (-90, 90, -90, 90) 274 | } 275 | if plot_params: 276 | _plot_params.update(plot_params) 277 | 278 | # a colormap that handles nan values properly 279 | _plot_params["cmap"] = plt.get_cmap(_plot_params["cmap"]).copy() 280 | _plot_params["cmap"].set_bad("k") 281 | _plot_params["cmap"].set_under("k") 282 | 283 | fig, ax = plt.subplots(figsize=figsize, layout="tight") 284 | ax.axis("off") 285 | 286 | # plot the interpolated data 287 | im = ax.imshow(projected_intensity, **_plot_params) 288 | add_colorbar(ax, im, label="Intensity", extend="max") # add a colorbar 289 | 290 | # add circles with labels along the diagonal to upper right 291 | angles = [30, 45, 60, 75] 292 | 293 | # position for the labels in radians 45 degrees (upper right diagonal) 294 | label_pos = np.deg2rad(45) 295 | y_shift = 7 # slight shift to avoid overlap with the circle 296 | 297 | circle_params = { 298 | "color": "white", 299 | "fill": False, 300 | "linestyle": "dotted", 301 | "linewidth": 0.75, 302 | } 303 | 304 | for angle in angles: 305 | circle = plt.Circle((0, 0), angle, **circle_params) 306 | ax.add_patch(circle) 307 | 308 | # calculate label position along the diagonal 309 | label_x = angle * np.cos(label_pos) 310 | label_y = angle * np.sin(label_pos) 311 | 312 | ax.text( 313 | label_x, 314 | label_y + y_shift, 315 | f"{angle}°", 316 | color="white", 317 | ha="center", 318 | va="center", 319 | fontsize=7 320 | ) 321 | 322 | # add the primitive circle (90°) with a different style and label 323 | circle_params["linestyle"] = "solid" 324 | primitive_circle = plt.Circle((0, 0), 90, **circle_params) 325 | ax.add_patch(primitive_circle) 326 | 327 | # label for primitive circle along the same diagonal 328 | ax.text( 329 | (90 * np.cos(label_pos)) * 1.15, 330 | (90 * np.sin(label_pos)) * 1.05, 331 | "90°\n(equator)", 332 | color="white", 333 | ha="center", 334 | va="bottom", 335 | fontsize=7, 336 | ) 337 | 338 | # add angle lines (azimuths) 339 | for azimuth in range(0, 360, 45): 340 | rad = np.deg2rad(azimuth) 341 | ax.plot( 342 | [0, 90 * np.cos(rad)], 343 | [0, 90 * np.sin(rad)], 344 | color="white", 345 | linestyle="dashed", 346 | linewidth=0.75, 347 | ) 348 | 349 | # set axis labels based on the projection axis 350 | if title is None: 351 | title = ( 352 | f"Pole Figure: {hemisphere} hemisphere along axis{projection_axis}" 353 | f"\n(Stereographic projection from {observer_position} Pole)" 354 | ) 355 | ax.set_title(title, fontsize=7) 356 | 357 | if save is not None: 358 | save_fig(fig, save, transparent=False) 359 | 360 | return (grid_x, grid_y, projected_intensity), (fig, ax) 361 | -------------------------------------------------------------------------------- /src/cdiutils/facetanalysis/facet_correlation.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import mcubes 3 | import sys 4 | import matplotlib.pyplot as plt 5 | 6 | sys.path.append("/data/id01/inhouse/clatlan/pythonies/cdiutils") 7 | from cdiutils.plot.plot import plot_3D_object 8 | 9 | 10 | def compute_correlation(data): 11 | data_shape = data[0].shape 12 | concatenated_data = np.zeros( 13 | shape=((len(data), ) 14 | + (data_shape[0] * data_shape[1] * data_shape[2],) 15 | ) 16 | ) 17 | for i, d in enumerate(data): 18 | concatenated_data[i] = np.ravel(d) 19 | 20 | return np.corrcoef(concatenated_data, rowvar=True) 21 | 22 | 23 | def find_support_reference(supports, show=False): 24 | 25 | support_correlation = compute_correlation(supports) 26 | print("Correlation coefficient between data support: \n", 27 | support_correlation) 28 | 29 | support_sum = np.zeros(supports[0].shape) 30 | for support in supports: 31 | support_sum += support 32 | 33 | support_reference = np.where( 34 | support_sum > (len(supports)//2 + len(supports) % 2), 35 | 1, 36 | 0) 37 | 38 | if show: 39 | for support in supports: 40 | plot_3D_object(support, show=False) 41 | plot_3D_object(support_reference, show=False, title="Reference") 42 | plt.show() 43 | 44 | return support_reference 45 | 46 | 47 | 48 | # support = data[0]['bulk'] 49 | # smoothed_support = smoothed_sphere = mcubes.smooth(support) 50 | # vertices, triangles = mcubes.marching_cubes(smoothed_support, 0) 51 | # print(smoothed_support.shape, vertices.shape, triangles.shape) 52 | # # mcubes.export_mesh(vertices, triangles, "/users/atlan/Desktop/smoothed_support.dae", "smoothed_support") 53 | # 54 | # plot_3D_object(support) 55 | # plot_3D_object(smoothed_support, np.where(smoothed_support>0,smoothed_support, 0)) 56 | 57 | 58 | if __name__ == '__main__': 59 | import argparse 60 | 61 | # construct the argument parser and parse the arguments 62 | ap = argparse.ArgumentParser() 63 | 64 | ap.add_argument("-f", "--files", required=False, type=str, nargs="+", 65 | help="files to read") 66 | args = vars(ap.parse_args()) 67 | 68 | scan_digits = [181, 182, 183, 184, 185] 69 | # scan_digits=[181, 182] 70 | 71 | if args["files"] is None: 72 | file_template = "/data/id01/inhouse/clatlan/experiments/ihhc3567"\ 73 | "/analysis/results/S{}/pynxraw/S{}_amp-disp-strain_"\ 74 | "0.65_mode_avg3_apodize_blackman_crystal-frame.npz" 75 | files = [file_template.format(i, i) for i in scan_digits] 76 | else: 77 | files = args["files"] 78 | data = [np.load(f) for f in files] 79 | supports = [d["bulk"] for d in data] 80 | support_reference = find_support_reference(supports) 81 | -------------------------------------------------------------------------------- /src/cdiutils/facetanalysis/facet_utils.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import matplotlib.pyplot as plt 4 | 5 | 6 | def format_plane_name(list): 7 | name = "" 8 | for e in list: 9 | if e < 0: 10 | name += r"$\overline{" 11 | name += r"{}".format(-e) 12 | name += r"}$ " 13 | else: 14 | name += r"${}$ ".format(e) 15 | return name 16 | 17 | 18 | def planes_111_110_100(): 19 | 20 | planes111 = [] 21 | planes110 = [] 22 | planes100 = [] 23 | for i in [-1, 0, 1]: 24 | for j in [-1, 0, 1]: 25 | for k in [-1, 0, 1]: 26 | n = [i, j, k] 27 | if np.linalg.norm(n) == np.linalg.norm([1, 1, 1]): 28 | planes111.append(n) 29 | elif np.linalg.norm(n) == np.linalg.norm([1, 1, 0]): 30 | planes110.append(n) 31 | elif np.linalg.norm(n) == np.linalg.norm([1, 0, 0]): 32 | planes100.append(n) 33 | return planes111, planes110, planes100 34 | 35 | 36 | def get_rotation_matrix(u0, v0, u1, v1): 37 | """Get the rotation matrix between two frames. 38 | 39 | :param u0: 1st vector of frame 0 40 | :param v0: 2d vector of frame 0 41 | :param u1: 1st vector of frame 1 42 | :param v1: 2d vector of frame 1 43 | """ 44 | 45 | w0 = np.cross(u0, v0) 46 | w1 = np.cross(u1, v1) 47 | 48 | # normalize each vector 49 | u0 = unit_vector(u0) 50 | v0 = unit_vector(v0) 51 | w0 = unit_vector(w0) 52 | u1 = unit_vector(u1) 53 | v1 = unit_vector(v1) 54 | w1 = unit_vector(w1) 55 | 56 | # compute rotation matrix from base 1 to base 0 57 | a = np.array([u0, v0, w0]) 58 | b = np.linalg.inv(np.array([u1, v1, w1])) 59 | rotation_matrix = np.dot(np.transpose(a), np.transpose(b)) 60 | 61 | return rotation_matrix 62 | 63 | 64 | def unit_vector(vector): 65 | return vector / np.linalg.norm(vector) 66 | 67 | 68 | def angle_between(u, v): 69 | u, v = unit_vector(u), unit_vector(v) 70 | return np.arccos(np.clip(np.dot(u, v), -1.0, 1.0)) 71 | 72 | 73 | def find_best_matching_normal_index( 74 | reference, 75 | normals, 76 | criterion="angle"): 77 | 78 | best_index = 0 79 | 80 | if criterion == "angle": 81 | lowest_angle = abs(angle_between(reference, normals[0])) 82 | for i, normal in enumerate(normals): 83 | angle = angle_between(reference, normal) 84 | if abs(angle) < lowest_angle: 85 | lowest_angle = abs(angle) 86 | best_index = i 87 | # print("lowest_angle is", lowest_angle) 88 | 89 | elif criterion == "difference": 90 | lowest_difference = np.linalg.norm(reference - normals[0]) 91 | for i, normal in enumerate(normals[1:]): 92 | difference = np.linalg.norm(reference - normal) 93 | if difference < lowest_difference: 94 | lowest_difference = difference 95 | best_index = i 96 | return best_index 97 | 98 | 99 | def get_miller_indices(normal): 100 | miller_indices = [None, None, None] 101 | for i in range(normal.shape[0]): 102 | if abs(normal[i]) < 0.2: 103 | miller_indices[i] = int(0) 104 | else: 105 | absolute_value = abs(1/normal[i]) 106 | sign = -1 if normal[i] < 0 else 1 107 | if absolute_value > 1.41 and absolute_value < 2.5: 108 | absolute_value = 2 109 | miller_indices[i] = sign * int(round(absolute_value)) 110 | miller_indices /= np.gcd.reduce(np.array(miller_indices)) 111 | return [int(i) for i in miller_indices.tolist()] 112 | 113 | 114 | def distance_between_parallel_planes(a, b, c, d1, d2): 115 | return abs(d2 - d1) / np.sqrt(a**2 + b**2 + c**2) 116 | -------------------------------------------------------------------------------- /src/cdiutils/facetanalysis/get_facet_data.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from vtk.util.numpy_support import vtk_to_numpy 3 | 4 | from cdiutils.facetanalysis.facet_utils import get_miller_indices 5 | 6 | 7 | def load_vtk(file): 8 | """Get raw data from .vtk file.""" 9 | 10 | reader = vtk.vtkGenericDataObjectReader() 11 | reader.SetFileName(file) 12 | reader.ReadAllScalarsOn() 13 | reader.ReadAllVectorsOn() 14 | reader.ReadAllTensorsOn() 15 | reader.Update() 16 | 17 | return reader.GetOutput() 18 | 19 | 20 | def facet_data_from_vtk( 21 | vtk_data, 22 | rotation_matrix=None, 23 | verbose=False): 24 | """ 25 | Make a dictionary of relevant facet-related data from the 26 | FacetAnalyzer pluggin in Paraview. 27 | 28 | :param vtk_data: the vtk data from Paraview's FacetAnalyzer. 29 | :param rotation_matrix: the transform matrix (np.array) to switch 30 | to another frame of reference. (Default is None). 31 | :param verbose: whether to print out some comments. 32 | 33 | :return: A dictionary of sub dictionary. Every sub dictionary has 34 | the same keys, and they correspond to the facet ids. Key of the 35 | main dictionary correspond to a quantity such as strain, stress etc. 36 | """ 37 | 38 | disp = {} 39 | strain = {} 40 | disp_avg = {} 41 | strain_avg = {} 42 | disp_std = {} 43 | strain_std = {} 44 | point_coord = {} 45 | miller_indices = {} 46 | facet_normals = {} 47 | 48 | point_data = vtk_data.GetPointData() 49 | cell_data = vtk_data.GetCellData() 50 | field_data = vtk_data.GetFieldData() 51 | 52 | # Notice that the numbers of FacetIds for CellData and FieldData 53 | # are different. In the CellData, id 0 is taken into account and 54 | # corresponds to edges and corners. 55 | cell_facet_ids = vtk_to_numpy(cell_data.GetArray('FacetIds')) 56 | facet_ids = np.unique(cell_facet_ids) 57 | field_facet_ids = vtk_to_numpy(field_data.GetArray('FacetIds')) 58 | 59 | for id in facet_ids: 60 | if id != 0: 61 | # if rotation matrix is provided, rotate all the facet 62 | # normals. 63 | if rotation_matrix is not None: 64 | normal = vtk_to_numpy( 65 | field_data.GetArray("facetNormals"))[id-1] 66 | facet_normals[id] = np.dot(rotation_matrix, normal) 67 | miller_indices[id] = get_miller_indices(facet_normals[id]) 68 | if verbose: 69 | print( 70 | "Facet id: {}".format(id), 71 | "Original facet normal: {}".format(normal), 72 | "Rotated facet normal: {}".format(facet_normals[id]), 73 | "Miller indices: {}".format(miller_indices[id]) 74 | ) 75 | else: 76 | facet_normals[id] = vtk_to_numpy( 77 | field_data.GetArray("facetNormals"))[id-1] 78 | 79 | # Get the indices of interest, i.e those corresponding to the 80 | # current facet. 81 | indices_oi = np.where(cell_facet_ids == id)[0] 82 | point_oi_id = [] 83 | 84 | for ind in indices_oi: 85 | cell = vtk_data.GetCell(ind) 86 | point_oi_id.append(cell.GetPointId(0)) 87 | point_oi_id.append(cell.GetPointId(1)) 88 | point_oi_id.append(cell.GetPointId(2)) 89 | 90 | point_oi_id = np.unique(point_oi_id) 91 | 92 | # finally get the the disp and strain of the point of interest 93 | disp[id] = vtk_to_numpy( 94 | point_data.GetArray("disp"))[point_oi_id] 95 | strain[id] = vtk_to_numpy( 96 | point_data.GetArray("strain"))[point_oi_id] 97 | point_coord[id] = np.array([vtk_data.GetPoint(i) 98 | for i in point_oi_id]) 99 | disp_avg[id] = np.mean(disp[id]) 100 | strain_avg[id] = np.mean(strain[id]) 101 | disp_std[id] = np.std(disp[id]) 102 | strain_std[id] = np.std(strain[id]) 103 | 104 | return { 105 | "disp": disp, 106 | "strain": strain, 107 | "disp_avg": disp_avg, 108 | "strain_avg": strain_avg, 109 | "disp_std": disp_std, 110 | "strain_std": strain_std, 111 | "point_coord": point_coord, 112 | "facet_normals": facet_normals, 113 | "miller_indices": miller_indices 114 | } 115 | -------------------------------------------------------------------------------- /src/cdiutils/geometry.py: -------------------------------------------------------------------------------- 1 | 2 | 3 | CXI_TO_XU_TRANSITIONS = { 4 | "x+": "y+", 5 | "x-": "y-", 6 | "y+": "z+", 7 | "y-": "z-", 8 | "z+": "x+", 9 | "z-": "x-", 10 | } 11 | 12 | 13 | class Geometry: 14 | """ 15 | A class to handle the geometry of the experiment setup. 16 | The CXI convention is used here. 17 | """ 18 | def __init__( 19 | self, 20 | sample_circles: list = None, 21 | detector_circles: list = None, 22 | detector_axis0_orientation: str = "y-", 23 | detector_axis1_orientation: str = "x+", 24 | beam_direction: list = None, 25 | name: str = None, 26 | is_cxi: bool = True 27 | ) -> None: 28 | self.sample_circles = sample_circles 29 | self.detector_circles = detector_circles 30 | self.detector_axis0_orientation = detector_axis0_orientation 31 | self.detector_axis1_orientation = detector_axis1_orientation 32 | if beam_direction is None: 33 | self.beam_direction = [1, 0, 0] 34 | else: 35 | self.beam_direction = beam_direction 36 | 37 | self.name = name 38 | 39 | self.is_cxi = is_cxi 40 | 41 | def to_dict(self) -> dict: 42 | """Return the attributes of the Geometry instance as a dictionary.""" 43 | return self.__dict__.copy() 44 | 45 | @classmethod 46 | def from_dict(cls, data) -> "Geometry": 47 | """Create a Geometry instance from a dictionary.""" 48 | return cls(**data) 49 | 50 | @classmethod 51 | def from_setup(cls, beamline_setup: str) -> None: 52 | """Create a Geometry instance using a beamline name.""" 53 | 54 | # Note that we use CXI convention here 55 | if beamline_setup.lower() in ("id01", "id01spec", "id01bliss"): 56 | return cls( 57 | sample_circles=["x-", "y-"], # eta, phi 58 | detector_circles=["y-", "x-"], # nu, delta 59 | detector_axis0_orientation="y-", 60 | detector_axis1_orientation="x+", 61 | beam_direction=[1, 0, 0], 62 | name="ID01" 63 | ) 64 | if "p10" in beamline_setup.lower(): 65 | return cls( 66 | sample_circles=["x-", "y-"], # om (or samth), phi 67 | detector_circles=["y+", "x-"], # gam, del (or e2_t02) 68 | detector_axis0_orientation="y-", 69 | detector_axis1_orientation="x+", 70 | beam_direction=[1, 0, 0], 71 | name="P10" 72 | ) 73 | if "sixs" in beamline_setup.lower(): 74 | return cls( 75 | sample_circles=["x-", "y+"], # mu, omega 76 | detector_circles=["y+", "x-"], # gamma, delta NOT SURE 77 | detector_axis0_orientation=( 78 | "x-" if "2022" in beamline_setup.lower() else "y-" 79 | ), 80 | detector_axis1_orientation=( 81 | "y-" if "2022" in beamline_setup.lower() else "x+" 82 | ), 83 | beam_direction=[1, 0, 0], 84 | name="SIXS" 85 | ) 86 | if beamline_setup.lower() == "nanomax": 87 | return cls( 88 | sample_circles=["x-", "y-"], # gontheta, gonphi 89 | detector_circles=["y-", "x-"], # gamma, delta 90 | detector_axis0_orientation="y-", 91 | detector_axis1_orientation="x-", 92 | beam_direction=[1, 0, 0], 93 | name="NanoMAX" 94 | ) 95 | if beamline_setup.lower() == "cristal": 96 | # OK FOR omega/delta but not for the two others 97 | return cls( 98 | sample_circles=["x-", "y+"], # omega, phi 99 | detector_circles=["y+", "x-"], # gamma, delta 100 | detector_axis0_orientation="y-", 101 | detector_axis1_orientation="x+", 102 | beam_direction=[1, 0, 0], 103 | name="CRISTAL" 104 | ) 105 | 106 | if beamline_setup.lower() == "id27": 107 | return cls( 108 | sample_circles=["x-", "y-"], # In plane rotation only 109 | detector_circles=["y-", "x-"], # no circle, values dummy 110 | detector_axis0_orientation="y-", 111 | detector_axis1_orientation="x-", 112 | beam_direction=[1, 0, 0], 113 | name="ID27" 114 | ) 115 | raise NotImplementedError( 116 | f"The beamline_setup {beamline_setup} is not valid. Available:\n" 117 | "'ID01', 'ID01SPEC', 'ID27', 'P10', 'P10EH2', 'SIXS2022' " 118 | "and NanoMAX." 119 | ) 120 | 121 | def cxi_to_xu(self) -> None: 122 | """ 123 | Convert the CXI circle axes to the xrayutilities coordinates system 124 | """ 125 | self.sample_circles = [ 126 | CXI_TO_XU_TRANSITIONS[v] for v in self.sample_circles 127 | ] 128 | self.detector_circles = [ 129 | CXI_TO_XU_TRANSITIONS[v] for v in self.detector_circles 130 | ] 131 | self.detector_axis0_orientation = CXI_TO_XU_TRANSITIONS[ 132 | self.detector_axis0_orientation 133 | ] 134 | self.detector_axis1_orientation = CXI_TO_XU_TRANSITIONS[ 135 | self.detector_axis1_orientation 136 | ] 137 | self.is_cxi = False 138 | 139 | def __repr__(self) -> str: 140 | return ( 141 | f"{self.name} geometry:\n" 142 | f"{self.sample_circles=}\n" 143 | f"{self.detector_circles=}\n" 144 | f"{self.detector_axis0_orientation=}\n" 145 | f"{self.detector_axis1_orientation=}\n" 146 | f"{self.beam_direction=}\n" 147 | f"{self.is_cxi=}\n" 148 | ) 149 | -------------------------------------------------------------------------------- /src/cdiutils/io/__init__.py: -------------------------------------------------------------------------------- 1 | from .loader import Loader, h5_safe_load 2 | from .id01 import ID01Loader, SpecLoader 3 | from .id27 import ID27Loader 4 | from .p10 import P10Loader 5 | from .sixs import SIXSLoader 6 | from .cristal import CristalLoader 7 | from .nanomax import NanoMAXLoader 8 | from .cxi import CXIFile, save_as_cxi, load_cxi 9 | from .cxi_explorer import CXIExplorer 10 | from .vtk import save_as_vti 11 | 12 | __all__ = [ 13 | "Loader", 14 | "h5_safe_load", 15 | "ID01Loader", 16 | "ID27Loader", 17 | "P10Loader", 18 | "SpecLoader", 19 | "SIXSLoader", 20 | "CristalLoader", 21 | "NanoMAXLoader", 22 | "CXIFile", 23 | "CXIExplorer", 24 | "save_as_cxi", 25 | "load_cxi", 26 | "save_as_vti", 27 | ] 28 | -------------------------------------------------------------------------------- /src/cdiutils/io/cristal.py: -------------------------------------------------------------------------------- 1 | """Loader for the Cristal beamline at SOLEIL.""" 2 | 3 | 4 | import numpy as np 5 | 6 | from cdiutils.io.loader import H5TypeLoader, h5_safe_load 7 | 8 | 9 | class CristalLoader(H5TypeLoader): 10 | """ 11 | A class to handle loading/reading .h5 files that were created at the 12 | Cristal beamline. 13 | 14 | Args: 15 | experiment_file_path (str): path to the master file 16 | used for the experiment. 17 | flat_field (np.ndarray | str, optional): flat field to 18 | account for the non homogeneous counting of the 19 | detector. Defaults to None. 20 | alien_mask (np.ndarray | str, optional): array to mask the 21 | aliens. Defaults to None. 22 | """ 23 | 24 | angle_names = { 25 | "sample_outofplane_angle": "i06-c-c07-ex-mg_omega", 26 | "sample_inplane_angle": "i06-c-c07-ex-mg_phi", 27 | "detector_outofplane_angle": "Diffractometer/i06-c-c07-ex-dif-delta", 28 | "detector_inplane_angle": "Diffractometer/i06-c-c07-ex-dif-gamma" 29 | } 30 | authorised_detector_names = ("maxipix", ) 31 | 32 | def __init__( 33 | self, 34 | experiment_file_path: str, 35 | scan: int = None, 36 | flat_field: np.ndarray | str = None, 37 | alien_mask: np.ndarray | str = None, 38 | **kwargs 39 | ) -> None: 40 | """ 41 | Initialise NanoMaxLoader with experiment data file path and 42 | detector information. 43 | 44 | Args: 45 | experiment_file_path (str): path to the master file 46 | used for the experiment. 47 | scan (int, optional): scan number. Defaults to None. 48 | flat_field (np.ndarray | str, optional): flat field to 49 | account for the non homogeneous counting of the 50 | detector. Defaults to None. 51 | alien_mask (np.ndarray | str, optional): array to mask the 52 | aliens. Defaults to None. 53 | """ 54 | super().__init__( 55 | experiment_file_path, 56 | scan=scan, 57 | flat_field=flat_field, 58 | alien_mask=alien_mask 59 | ) 60 | 61 | @h5_safe_load 62 | def load_detector_data( 63 | self, 64 | scan: int = None, 65 | roi: tuple[slice] = None, 66 | rocking_angle_binning: int = None, 67 | binning_method: str = "sum" 68 | ) -> np.ndarray: 69 | scan, _ = self._check_scan_sample(scan) 70 | 71 | # First, find the key that corresponds to the detector data 72 | for k in self.h5file[f"exp_{scan:04d}/scan_data"]: 73 | if self.h5file[f"exp_{scan:04d}/scan_data"][k].ndim == 3: 74 | data_key = k 75 | 76 | key_path = f"exp_{scan:04d}/scan_data/{data_key}" 77 | 78 | roi = self._check_roi(roi) 79 | 80 | try: 81 | if rocking_angle_binning: 82 | # we first apply the roi for axis1 and axis2 83 | data = self.h5file[key_path][(slice(None), roi[1], roi[2])] 84 | else: 85 | data = self.h5file[key_path][roi] 86 | except KeyError as exc: 87 | raise KeyError( 88 | f"key_path is wrong (key_path='{key_path}'). " 89 | "Are sample_name, scan number or detector name correct?" 90 | ) from exc 91 | 92 | return self.bin_flat_mask( 93 | data, 94 | roi, 95 | self.flat_field, 96 | self.alien_mask, 97 | rocking_angle_binning, 98 | binning_method 99 | ) 100 | 101 | @h5_safe_load 102 | def load_motor_positions( 103 | self, 104 | scan: int = None, 105 | roi: tuple[slice] = None, 106 | rocking_angle_binning: int = None, 107 | ) -> dict: 108 | scan, _ = self._check_scan_sample(scan) 109 | 110 | key_path = f"exp_{scan:04d}" 111 | key_path_template = key_path + "/CRISTAL/{}/position" 112 | 113 | angles = {} 114 | for angle, name in CristalLoader.angle_names.items(): 115 | angles[angle] = float( 116 | self.h5file[key_path_template.format(name)][()] 117 | ) 118 | 119 | # Get the motor name used for the rocking curve 120 | rocking_motor = self.h5file[ 121 | key_path + "/scan_config/name" 122 | ][()].decode("utf-8")[-4:] # here, only the last 3 char are needed 123 | # Get the associated motor values 124 | rocking_values = self.h5file[key_path + "/scan_data/actuator_1_1"][()] 125 | 126 | if rocking_angle_binning: 127 | rocking_values = self.bin_rocking_angle_values( 128 | rocking_values, rocking_angle_binning 129 | ) 130 | 131 | # take care of the roi 132 | if isinstance(roi, (tuple, list)): 133 | if len(roi) == 2: 134 | roi = slice(None) 135 | else: 136 | roi = roi[0] 137 | elif roi is None: 138 | roi = slice(None) 139 | elif not isinstance(roi, slice): 140 | raise ValueError( 141 | f"roi should be tuple of slices, or a slice, not {type(roi)}" 142 | ) 143 | rocking_values = rocking_values[roi] 144 | 145 | # replace the value for the rocking angle by the array of values 146 | for angle, name in CristalLoader.angle_names.items(): 147 | if name.endswith(rocking_motor): 148 | angles[angle] = rocking_values 149 | return angles 150 | 151 | @h5_safe_load 152 | def load_energy(self, scan: int = None) -> float: 153 | scan, _ = self._check_scan_sample(scan) 154 | key_path = f"exp_{scan:04d}/CRISTAL/Monochromator/energy" 155 | return self.h5file[key_path][0] * 1e3 156 | 157 | @h5_safe_load 158 | def load_det_calib_params(self, scan: int = None) -> dict: 159 | scan, _ = self._check_scan_sample(scan) 160 | return None 161 | 162 | @h5_safe_load 163 | def load_detector_shape(self, scan: int) -> tuple: 164 | return None 165 | -------------------------------------------------------------------------------- /src/cdiutils/io/id27.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from cdiutils.io.loader import H5TypeLoader, h5_safe_load 4 | from cdiutils.utils import wavelength_to_energy 5 | import warnings 6 | 7 | 8 | class ID27Loader(H5TypeLoader): 9 | """ 10 | A class to handle loading/reading .h5 files that were created using 11 | Bliss on the ID27 beamline. 12 | """ 13 | 14 | angle_names = { 15 | "sample_outofplane_angle": None, 16 | "sample_inplane_angle": "nath", 17 | "detector_outofplane_angle": None, 18 | "detector_inplane_angle": None 19 | } 20 | authorised_detector_names = ("eiger", ) 21 | 22 | def __init__( 23 | self, 24 | experiment_file_path: str, 25 | scan: int = None, 26 | sample_name: str = None, 27 | detector_name: str = None, 28 | flat_field: np.ndarray | str = None, 29 | alien_mask: np.ndarray | str = None, 30 | **kwargs 31 | ) -> None: 32 | """ 33 | Initialise ID27Loader with experiment data file path and 34 | detector information. 35 | 36 | Args: 37 | experiment_file_path (str): path to the bliss master file 38 | used for the experiment. 39 | scan (int, optional): the scan number. Defaults to None. 40 | sample_name (str, optional): name of the sample. Defaults 41 | to None. 42 | detector_name (str): name of the detector. 43 | flat_field (np.ndarray | str, optional): flat field to 44 | account for the non homogeneous counting of the 45 | detector. Defaults to None. 46 | alien_mask (np.ndarray | str, optional): array to mask the 47 | aliens. Defaults to None. 48 | """ 49 | super().__init__( 50 | experiment_file_path, 51 | scan, 52 | sample_name, 53 | detector_name, 54 | flat_field, 55 | alien_mask 56 | ) 57 | 58 | @h5_safe_load 59 | def load_detector_data( 60 | self, 61 | scan: int = None, 62 | sample_name: str = None, 63 | roi: tuple[slice] = None, 64 | rocking_angle_binning: int = None, 65 | binning_method: str = "sum" 66 | ) -> np.ndarray: 67 | """ 68 | Load the detector data. 69 | 70 | Args: 71 | scan (int): the scan number. Defaults to None. 72 | sample_name (str, optional): the sample name. 73 | Defaults to None. 74 | roi (tuple[slice], optional): the region of interest to 75 | light load the data. Defaults to None. 76 | rocking_angle_binning (int, optional): the factor for the 77 | binning along the rocking curve axis. Defaults to None. 78 | binning_method (str, optional): the method for the binning 79 | along the rocking curve axis. Defaults to "sum". 80 | 81 | Raises: 82 | KeyError: if the key path is incorrect. 83 | 84 | Returns: 85 | np.ndarray: the detector data. 86 | """ 87 | scan, sample_name = self._check_scan_sample(scan, sample_name) 88 | 89 | key_path = ( 90 | "_".join((sample_name, str(scan))) 91 | + f".1/measurement/{self.detector_name}" 92 | ) 93 | roi = self._check_roi(roi) 94 | try: 95 | if rocking_angle_binning: 96 | # we first apply the roi for axis1 and axis2 97 | data = self.h5file[key_path][(slice(None), roi[1], roi[2])] 98 | else: 99 | data = self.h5file[key_path][roi] 100 | except KeyError as exc: 101 | raise KeyError( 102 | f"key_path is wrong (key_path='{key_path}'). " 103 | "Are sample_name, scan number or detector name correct?" 104 | ) from exc 105 | 106 | if self.detector_name in ("eiger", "eiger9m", "e9m"): 107 | # Must apply mask on ID21 Eiger data 108 | mask = self.get_mask( 109 | channel=data.shape[0], 110 | detector_name="e9m", 111 | roi=(slice(None), roi[1], roi[2]) 112 | ) 113 | data = data * np.where(mask, 0, 1) 114 | 115 | return self.bin_flat_mask( 116 | data, 117 | roi, 118 | self.flat_field, 119 | self.alien_mask, 120 | rocking_angle_binning, 121 | binning_method 122 | ) 123 | 124 | @h5_safe_load 125 | def load_motor_positions( 126 | self, 127 | scan: int = None, 128 | sample_name: str = None, 129 | roi: tuple[slice] = None, 130 | rocking_angle_binning: int = None, 131 | ) -> dict: 132 | """ 133 | Load the motor positions, i.e diffractometer angles associated 134 | with a scan. 135 | 136 | Args: 137 | scan (int): the scan number. Defaults to None. 138 | sample_name (str, optional): the sample name. 139 | Defaults to None. 140 | roi (tuple[slice], optional): the region of interest. 141 | Defaults to None. 142 | rocking_angle_binning (int, optional): the factor for the 143 | binning along the rocking curve axis. Defaults to None. 144 | 145 | Returns: 146 | dict: the four diffractometer angles. 147 | """ 148 | scan, sample_name = self._check_scan_sample(scan, sample_name) 149 | 150 | angles = self.load_angles( 151 | key_path=f"{sample_name}_{scan}.1/instrument/positioners/" 152 | ) 153 | 154 | formatted_angles = { 155 | key: angles[name] if angles.get(name) is not None else 0. 156 | for key, name in ID27Loader.angle_names.items() 157 | } 158 | 159 | self.rocking_angle = "sample_inplane_angle" 160 | formatted_angles[self.rocking_angle] = self.bin_rocking_angle_values( 161 | formatted_angles[self.rocking_angle], rocking_angle_binning 162 | ) 163 | # take care of the roi 164 | if isinstance(roi, (tuple, list)): 165 | if len(roi) == 2: 166 | roi = slice(None) 167 | else: 168 | roi = roi[0] 169 | elif roi is None: 170 | roi = slice(None) 171 | elif not isinstance(roi, slice): 172 | raise ValueError( 173 | f"roi should be tuple of slices, or a slice, not {type(roi)}" 174 | ) 175 | 176 | formatted_angles[ 177 | self.rocking_angle 178 | ] = formatted_angles[self.rocking_angle][roi] 179 | 180 | return formatted_angles 181 | 182 | @h5_safe_load 183 | def get_detector_name(self) -> str: 184 | key_path = ("_".join((self.sample_name, "1")) + ".1/measurement/") 185 | detector_names = [] 186 | for key in self.h5file[key_path]: 187 | if key in self.authorised_detector_names: 188 | detector_names.append(key) 189 | if len(detector_names) == 0: 190 | raise ValueError( 191 | f"No detector name found in{self.authorised_detector_names}" 192 | ) 193 | if len(detector_names) > 1: 194 | raise ValueError( 195 | f"Several detector names found ({detector_names}).\n" 196 | "Not handled yet." 197 | ) 198 | return detector_names[0] 199 | 200 | @h5_safe_load 201 | def load_det_calib_params(self) -> dict: 202 | """ 203 | Load the detector calibration parameters from the scan directly. 204 | Note that this will only provide the direct beam position, the 205 | sample-to-detector distance, and the pixel size. To get the 206 | tilt angles of the detector run the detector calibration 207 | notebook. 208 | """ 209 | return None # Data in id27 files are not reliable. 210 | 211 | @h5_safe_load 212 | def load_detector_shape( 213 | self, 214 | scan: int = None, 215 | sample_name: str = None, 216 | ) -> tuple: 217 | scan, sample_name = self._check_scan_sample(scan, sample_name) 218 | if self.detector_name in ("eiger", "eiger9m", "e9m"): 219 | shape = (3262, 3108) 220 | key_path = f"{sample_name}_{scan}.1/instrument/eiger/acq_nb_frames" 221 | try: 222 | return (int(self.h5file[key_path][()]), ) + shape 223 | except KeyError: 224 | print("Could not load original detector data shape.") 225 | return None 226 | 227 | @h5_safe_load 228 | def load_energy( 229 | self, 230 | scan: int = None, 231 | sample_name: str = None 232 | ) -> float: 233 | scan, sample_name = self._check_scan_sample(scan, sample_name) 234 | 235 | key_path = f"{sample_name}_{scan}.1/instrument/calibration/" 236 | try: 237 | # Convert from angstrom to m 238 | return wavelength_to_energy( 239 | float(self.h5file[key_path + "wavelength"][()]) * 1e-10 240 | ) 241 | except KeyError: 242 | warnings.warn(f"Energy not found at {key_path + 'wavelength'}. ") 243 | return None 244 | -------------------------------------------------------------------------------- /src/cdiutils/io/nanomax.py: -------------------------------------------------------------------------------- 1 | """ 2 | Loader for the Nanomax beamline at MAXIV. 3 | See: 4 | https://www.maxiv.lu.se/beamlines-accelerators/beamlines/nanomax/ 5 | """ 6 | 7 | import numpy as np 8 | 9 | from cdiutils.io.loader import H5TypeLoader, h5_safe_load 10 | 11 | 12 | class NanoMAXLoader(H5TypeLoader): 13 | """ 14 | A class to handle loading/reading .h5 files that were created at the 15 | NanoMax beamline. 16 | This loader class does not need any of the 'sample_name' or 17 | 'experiment_file_path' because NanoMAX data layering is rather 18 | simple. 19 | 20 | Args: 21 | experiment_file_path (str): path to the scan file. 22 | detector_name (str): name of the detector. 23 | flat_field (np.ndarray | str, optional): flat field to 24 | account for the non homogeneous counting of the 25 | detector. Defaults to None. 26 | alien_mask (np.ndarray | str, optional): array to mask the 27 | aliens. Defaults to None. 28 | """ 29 | 30 | angle_names = { 31 | "sample_outofplane_angle": "gontheta", 32 | "sample_inplane_angle": "gonphi", 33 | "detector_outofplane_angle": "delta", 34 | "detector_inplane_angle": "gamma" 35 | } 36 | authorised_detector_names = ("eiger500k", ) 37 | 38 | def __init__( 39 | self, 40 | experiment_file_path: str, 41 | detector_name: str = "eiger500k", 42 | flat_field: np.ndarray | str = None, 43 | alien_mask: np.ndarray | str = None, 44 | **kwargs 45 | ) -> None: 46 | """ 47 | Initialise NanoMaxLoader with experiment data file path and 48 | detector information. 49 | 50 | Args: 51 | experiment_file_path (str): path to the scan file. 52 | detector_name (str): name of the detector. 53 | flat_field (np.ndarray | str, optional): flat field to 54 | account for the non homogeneous counting of the 55 | detector. Defaults to None. 56 | alien_mask (np.ndarray | str, optional): array to mask the 57 | aliens. Defaults to None. 58 | """ 59 | super().__init__( 60 | experiment_file_path, 61 | detector_name=detector_name, 62 | flat_field=flat_field, 63 | alien_mask=alien_mask 64 | ) 65 | 66 | @h5_safe_load 67 | def load_detector_data( 68 | self, 69 | roi: tuple[slice] = None, 70 | rocking_angle_binning: int = None, 71 | binning_method: str = "sum" 72 | ) -> np.ndarray: 73 | """ 74 | Main method to load the detector data (collected intensity). 75 | 76 | Args: 77 | roi (tuple[slice], optional): the region of interest of the 78 | detector to load. Defaults to None. 79 | rocking_angle_binning (int, optional): whether to bin the data 80 | along the rocking curve axis. Defaults to None. 81 | binning_method (str, optional): the method employed for the 82 | binning. It can be sum or "mean". Defaults to "sum". 83 | 84 | Returns: 85 | np.ndarray: the detector data. 86 | """ 87 | # Where to find the data. 88 | key_path = ( 89 | f"/entry/measurement/{self.detector_name}/frames" 90 | ) 91 | roi = self._check_roi(roi) 92 | try: 93 | if rocking_angle_binning: 94 | # we first apply the roi for axis1 and axis2 95 | data = self.h5file[key_path][(slice(None), roi[1], roi[2])] 96 | else: 97 | data = self.h5file[key_path][roi] 98 | except KeyError as exc: 99 | raise KeyError( 100 | f"key_path is wrong (key_path='{key_path}'). " 101 | "Are sample_name, scan number or detector name correct?" 102 | ) from exc 103 | 104 | return self.bin_flat_mask( 105 | data, 106 | roi, 107 | self.flat_field, 108 | self.alien_mask, 109 | rocking_angle_binning, 110 | binning_method 111 | ) 112 | 113 | @h5_safe_load 114 | def load_motor_positions( 115 | self, 116 | roi: tuple[slice] = None, 117 | rocking_angle_binning: int = None, 118 | ) -> dict: 119 | roi = self._check_roi(roi) 120 | roi = roi[0] 121 | 122 | key_path = "entry/snapshots/post_scan/" 123 | angles = {key: None for key in NanoMAXLoader.angle_names} 124 | 125 | for angle, name in NanoMAXLoader.angle_names.items(): 126 | angles[angle] = self.h5file[key_path + name][()] 127 | 128 | # Take care of the rocking curve angle 129 | for angle in ("gonphi", "gontheta"): 130 | if angle in self.h5file["entry/measurement"].keys(): 131 | rocking_angle_name = angle 132 | if rocking_angle_binning: 133 | rocking_angle_values = self.h5file["entry/measurement"][angle][ 134 | () 135 | ] 136 | else: 137 | rocking_angle_values = self.h5file["entry/measurement"][angle][ 138 | roi 139 | ] 140 | # Find what generic angle (in-plane or out-of-plane) it 141 | # corresponds to. 142 | for angle, name in NanoMAXLoader.angle_names.items(): 143 | if name == rocking_angle_name: 144 | rocking_angle = angle 145 | 146 | self.rocking_angle = rocking_angle 147 | angles[rocking_angle] = rocking_angle_values 148 | 149 | angles[self.rocking_angle] = self.bin_rocking_angle_values( 150 | angles[self.rocking_angle] 151 | ) 152 | if roi and rocking_angle_binning: 153 | angles[self.rocking_angle] = angles[self.rocking_angle][roi] 154 | return angles 155 | 156 | @h5_safe_load 157 | def load_energy(self) -> float: 158 | """ 159 | Load and return the energy used during the experiment. 160 | 161 | Args: 162 | scan (int): the scan number of the file to load the energy 163 | from. 164 | 165 | Returns: 166 | float: the energy value in keV. 167 | """ 168 | return self.h5file["entry/snapshots/post_scan/energy"][0] 169 | 170 | def load_det_calib_params(self) -> dict: 171 | return None 172 | 173 | @h5_safe_load 174 | def load_detector_shape(self, scan: int = None) -> tuple: 175 | return None -------------------------------------------------------------------------------- /src/cdiutils/io/p10.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import silx.io.h5py_utils 3 | 4 | from cdiutils.io import Loader 5 | 6 | 7 | class P10Loader(Loader): 8 | """A class for loading data from P10 beamline experiments.""" 9 | 10 | angle_names = { 11 | "sample_outofplane_angle": "om", 12 | "sample_inplane_angle": "phi", 13 | "detector_outofplane_angle": "del", 14 | "detector_inplane_angle": "gam" 15 | } 16 | authorised_detector_names = ("eiger4m", ) 17 | 18 | def __init__( 19 | self, 20 | experiment_data_dir_path: str, 21 | scan: int = None, 22 | sample_name: str = None, 23 | detector_name: str = None, 24 | flat_field: np.ndarray | str = None, 25 | alien_mask: np.ndarray | str = None, 26 | hutch: str = "EH1", 27 | **kwargs 28 | ) -> None: 29 | """ 30 | Initialise P10Loader with experiment data directory path and 31 | detector information. 32 | 33 | Args: 34 | experiment_data_dir_path (str): path to the experiment data 35 | directory. 36 | detector_name (str): name of the detector. 37 | sample_name (str, optional): name of the sample. Defaults to 38 | None. 39 | flat_field (np.ndarray | str, optional): flat field to 40 | account for the non homogeneous counting of the 41 | detector. Defaults to None. 42 | alien_mask (np.ndarray | str, optional): array to mask the 43 | aliens. Defaults to None. 44 | """ 45 | self.experiment_data_dir_path = experiment_data_dir_path 46 | super().__init__(scan, sample_name, flat_field, alien_mask) 47 | self.detector_name = detector_name 48 | 49 | if hutch.lower() == "eh2": 50 | self.angle_names["sample_outofplane_angle"] = "samth" 51 | self.angle_names["detector_outofplane_angle"] = "e2_t02" 52 | self.angle_names["sample_inplane_angle"] = None 53 | self.angle_names["detector_inplane_angle"] = None 54 | elif hutch.lower() != "eh1": 55 | raise ValueError( 56 | f"Hutch name (hutch={hutch}) is not valid. Can be 'EH1' or " 57 | "'EH2'." 58 | ) 59 | 60 | def _get_file_path( 61 | self, 62 | scan: int, 63 | sample_name: str, 64 | data_type: str = "detector_data" 65 | ) -> str: 66 | """ 67 | Get the file path based on scan number, sample name, and data 68 | type. 69 | 70 | Args: 71 | scan (int): Scan number. 72 | sample_name (str): Name of the sample. 73 | data_type (str, optional): Type of data. Defaults to 74 | "detector_data". 75 | 76 | Returns: 77 | str: File path. 78 | """ 79 | if data_type == "detector_data": 80 | return ( 81 | self.experiment_data_dir_path 82 | + f"/{sample_name}_{scan:05d}" 83 | + f"/{self.detector_name}" 84 | + f"/{sample_name}_{scan:05d}_master.h5" 85 | ) 86 | if data_type == "motor_positions": 87 | return ( 88 | self.experiment_data_dir_path 89 | + f"/{sample_name}_{scan:05d}" 90 | + f"/{sample_name}_{scan:05d}.fio" 91 | ) 92 | raise ValueError( 93 | f"data_type {data_type} is not valid. Must be either detector_data" 94 | " or motor_positions." 95 | ) 96 | 97 | def load_detector_data( 98 | self, 99 | scan: int = None, 100 | sample_name: str = None, 101 | roi: tuple[slice] = None, 102 | rocking_angle_binning: int = None, 103 | binning_method: str = "sum" 104 | ) -> None: 105 | """ 106 | Load detector data for a given scan and sample. 107 | 108 | Args: 109 | scan (int): Scan number. Defaults to None. 110 | sample_name (str, optional): Name of the sample. Defaults to 111 | None. 112 | roi (tuple, optional): Region of interest. Defaults to None. 113 | rocking_angle_binning (int, optional): Binning factor along 114 | axis 0. Defaults to None. 115 | binning_method (str, optional): Binning method. Defaults to 116 | "sum". 117 | 118 | Returns: 119 | numpy.ndarray: Loaded detector data. 120 | """ 121 | scan, sample_name = self._check_scan_sample(scan, sample_name) 122 | 123 | path = self._get_file_path(scan, sample_name) 124 | key_path = "entry/data/data_000001" 125 | 126 | roi = self._check_roi(roi) 127 | 128 | with silx.io.h5py_utils.File(path) as h5file: 129 | if rocking_angle_binning: 130 | data = h5file[key_path][()] 131 | else: 132 | data = h5file[key_path][roi] 133 | 134 | data = self.bin_flat_mask( 135 | data, 136 | roi, 137 | self.flat_field, 138 | self.alien_mask, 139 | rocking_angle_binning, 140 | binning_method 141 | ) 142 | 143 | # Must apply mask on P10 Eiger data 144 | mask = self.get_mask( 145 | channel=data.shape[0], 146 | detector_name=self.detector_name, 147 | roi=(slice(None), roi[1], roi[2]) 148 | ) 149 | data = data * np.where(mask, 0, 1) 150 | 151 | return data 152 | 153 | def load_motor_positions( 154 | self, 155 | scan: int = None, 156 | sample_name: str = None, 157 | roi: tuple[slice] = None, 158 | rocking_angle_binning: int = None, 159 | ) -> None: 160 | """ 161 | Load motor positions for a given scan and sample. 162 | 163 | Args: 164 | scan (int): Scan number. Defaults to None. 165 | sample_name (str, optional): Name of the sample. Defaults 166 | to None. 167 | roi (tuple, optional): Region of interest. Defaults to None. 168 | rocking_angle_binning (int, optional): Binning factor along 169 | axis 0. Defaults to None. 170 | 171 | Returns: 172 | dict: Dictionary containing motor positions. 173 | """ 174 | scan, sample_name = self._check_scan_sample(scan, sample_name) 175 | 176 | path = self._get_file_path( 177 | scan, 178 | sample_name, 179 | data_type="motor_positions" 180 | ) 181 | if roi is None or len(roi) == 2: 182 | roi = slice(None) 183 | elif len(roi) == 3: 184 | roi = roi[0] 185 | 186 | angles = {name: None for name in self.angle_names.values()} 187 | 188 | rocking_angle_values = [] 189 | 190 | with open(path, encoding="utf8") as fio_file: 191 | lines = fio_file.readlines() 192 | rocking_angle_column = None 193 | for line in lines: 194 | line = line.strip() 195 | words = line.split() 196 | 197 | for name in self.angle_names.values(): 198 | if name in words: 199 | if "=" in words: 200 | angles[name] = float(words[-1]) 201 | if "Col" in words and rocking_angle_column is None: 202 | rocking_angle_column = int(words[1]) - 1 203 | rocking_angle = words[2] 204 | 205 | for line in lines: 206 | line = line.strip() 207 | words = line.split() 208 | 209 | # check if the first word is numeric, if True the line 210 | # contains motor position values 211 | # if words[0].replace(".", "", 1).isdigit(): 212 | if words[0].replace(".", "").replace("-", "").isnumeric(): 213 | rocking_angle_values.append( 214 | float(words[rocking_angle_column]) 215 | ) 216 | if "e2_t02" in angles: 217 | # This means that 'e2_t02' must be used as the 218 | # detector out-of-plane angle. 219 | angles["e2_t02"] = float(words[1]) 220 | self.rocking_angle = rocking_angle 221 | angles[rocking_angle] = np.array(rocking_angle_values) 222 | for name in angles: 223 | if angles[name] is None: 224 | angles[name] = 0 225 | 226 | angles[self.rocking_angle] = self.bin_rocking_angle_values( 227 | angles[self.rocking_angle], rocking_angle_binning 228 | ) 229 | if roi: 230 | angles[rocking_angle] = angles[rocking_angle][roi] 231 | 232 | return { 233 | angle: angles[name] 234 | for angle, name in self.angle_names.items() 235 | } 236 | 237 | def load_energy(self, scan: int = None, sample_name: str = None) -> float: 238 | scan, sample_name = self._check_scan_sample(scan, sample_name) 239 | path = self._get_file_path( 240 | scan, 241 | sample_name, 242 | data_type="motor_positions" 243 | ) 244 | with open(path, encoding="utf8") as fio_file: 245 | lines = fio_file.readlines() 246 | for line in lines: 247 | line = line.strip() 248 | words = line.split() 249 | if "fmbenergy" in words: 250 | return float(words[-1]) 251 | return None 252 | 253 | def load_det_calib_params(self) -> dict: 254 | return None 255 | 256 | def load_detector_shape(self, scan: int = None) -> tuple: 257 | return None 258 | -------------------------------------------------------------------------------- /src/cdiutils/io/sixs.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import h5py 3 | 4 | from cdiutils.io.loader import H5TypeLoader, h5_safe_load 5 | 6 | 7 | class SIXSLoader(H5TypeLoader): 8 | """A class for loading data from SIXS beamline experiments.""" 9 | 10 | angle_names = { 11 | "sample_outofplane_angle": "mu", 12 | "sample_inplane_angle": "omega", 13 | "detector_outofplane_angle": "gamma", 14 | "detector_inplane_angle": "delta" 15 | } 16 | authorised_detector_names = ("maxipix", ) 17 | 18 | def __init__( 19 | self, 20 | experiment_file_path: str, 21 | scan: int = None, 22 | sample_name: str = None, 23 | detector_name: str = None, 24 | flat_field: np.ndarray | str = None, 25 | alien_mask: np.ndarray | str = None, 26 | version: str = None, 27 | **kwargs 28 | ) -> None: 29 | """ 30 | Initialise SIXSLoader with experiment data directory path and 31 | detector information. 32 | 33 | Args: 34 | experiment_file_path (str): path to the experiment file. 35 | detector_name (str): name of the detector. 36 | sample_name (str, optional): name of the sample. Defaults to 37 | None. 38 | flat_field (np.ndarray | str, optional): flat field to 39 | account for the non homogeneous counting of the 40 | detector. Defaults to None. 41 | alien_mask (np.ndarray | str, optional): array to mask the 42 | aliens. Defaults to None. 43 | version (str, optional): the version of the loader. Defaults 44 | to None. 45 | """ 46 | self.version = version 47 | if version is None: 48 | self.version = "2022" 49 | super().__init__( 50 | experiment_file_path, 51 | scan, 52 | sample_name, 53 | detector_name, 54 | flat_field, 55 | alien_mask 56 | ) 57 | 58 | @h5_safe_load 59 | def load_detector_data( 60 | self, 61 | scan: int = None, 62 | sample_name: str = None, 63 | roi: tuple[slice] = None, 64 | rocking_angle_binning: int = None, 65 | binning_method: str = "sum" 66 | ) -> np.ndarray: 67 | """ 68 | Load detector data for a given scan and sample. 69 | 70 | Args: 71 | scan (int, optional): Scan number. 72 | sample_name (str, optional): Name of the sample. Defaults to 73 | None. 74 | roi (tuple, optional): Region of interest. Defaults to None. 75 | rocking_angle_binning (int, optional): Binning factor along 76 | axis 0. Defaults to None. 77 | binning_method (str, optional): Binning method. Defaults to 78 | "sum". 79 | 80 | Returns: 81 | np.ndarray: Loaded detector data. 82 | """ 83 | scan, sample_name = self._check_scan_sample(scan, sample_name) 84 | 85 | key_path = self._get_detector_key_path(self.h5file) 86 | 87 | roi = self._check_roi(roi) 88 | 89 | if rocking_angle_binning: 90 | # we first apply the roi for axis1 and axis2 91 | data = self.h5file[key_path][(slice(None), roi[1], roi[2])] 92 | else: 93 | data = self.h5file[key_path][roi] 94 | 95 | return self.bin_flat_mask( 96 | data, 97 | roi, 98 | self.flat_field, 99 | self.alien_mask, 100 | rocking_angle_binning, 101 | binning_method 102 | ) 103 | 104 | @h5_safe_load 105 | def load_motor_positions( 106 | self, 107 | scan: int = None, 108 | sample_name: str = None, 109 | roi: tuple[slice] = None, 110 | rocking_angle_binning: int = None, 111 | ) -> dict: 112 | """ 113 | Load the motor positions, i.e diffractometer angles associated 114 | with a scan. 115 | 116 | Args: 117 | scan (int, optional): the scan number. Defaults to None. 118 | sample_name (str, optional): the sample name. 119 | Defaults to None. 120 | roi (tuple[slice], optional): the region of interest. 121 | Defaults to None. 122 | rocking_angle_binning (int, optional): the factor for the 123 | binning along the rocking curve axis. Defaults to None. 124 | 125 | Returns: 126 | dict: the four diffractometer angles. 127 | """ 128 | scan, sample_name = self._check_scan_sample(scan, sample_name) 129 | 130 | if roi is None or len(roi) == 2: 131 | roi = slice(None) 132 | elif len(roi) == 3: 133 | roi = roi[0] 134 | 135 | angles = {key: None for key in self.angle_names} 136 | 137 | for angle, name in self.angle_names.items(): 138 | motor_key_path = self._get_motor_key_path(self.h5file, name) 139 | angles[angle] = self.h5file[motor_key_path][()] 140 | 141 | # take care of the rocking angle 142 | self.rocking_angle = "sample_outofplane_angle" 143 | if self.version == "2019": 144 | node_name = "data_07" 145 | elif self.version == "2022": 146 | node_name = "actuator_1_1" 147 | else: 148 | raise ValueError(f"Version {self.version} not supported yet.") 149 | angles[self.rocking_angle] = self.h5file[ 150 | f"com/scan_data/{node_name}" 151 | ][()] 152 | 153 | if rocking_angle_binning: 154 | angles[self.rocking_angle] = self.bin_rocking_angle_values( 155 | angles[self.rocking_angle], rocking_angle_binning 156 | ) 157 | 158 | # take care of the roi 159 | if isinstance(roi, (tuple, list)): 160 | if len(roi) == 2: 161 | roi = slice(None) 162 | else: 163 | roi = roi[0] 164 | elif roi is None: 165 | roi = slice(None) 166 | elif not isinstance(roi, slice): 167 | raise ValueError( 168 | f"roi should be tuple of slices, or a slice, not {type(roi)}" 169 | ) 170 | angles[self.rocking_angle] = angles[self.rocking_angle][roi] 171 | 172 | return angles 173 | 174 | @staticmethod 175 | def _get_detector_key_path(h5file: h5py.File) -> str: 176 | """ 177 | Get the key path for the detector data. 178 | 179 | Args: 180 | h5file (h5py.File): the h5 file to search in. 181 | 182 | Returns: 183 | str: the key path. 184 | """ 185 | key_path = "com/scan_data/" 186 | for key in h5file[key_path]: 187 | data = h5file[key_path + key][()] 188 | if isinstance(data, np.ndarray) and data.ndim == 3: 189 | return key_path + key 190 | raise ValueError("No detector data found in the file.") 191 | 192 | def _get_motor_key_path(self, h5file: h5py.File, name: str) -> str: 193 | """ 194 | Get the key path for the motor data. 195 | 196 | Args: 197 | h5file (h5py.File): the h5 file to search in. 198 | name (str): the angle name to search for. 199 | 200 | Returns: 201 | str: the key path. 202 | """ 203 | if self.version == "2022": 204 | key_path = "com/SIXS/i14-c-cx1-ex-med-v-dif-group.1" 205 | for key in h5file[key_path]: 206 | if name in key: 207 | return key_path + f"/{key}/position" 208 | if self.version == "2019": 209 | key_path = "com/SIXS/" 210 | for key in h5file[key_path]: 211 | if name in key: 212 | return key_path + key + "/position_pre" 213 | raise ValueError("No motor data found in the file.") 214 | 215 | def load_det_calib_params(self) -> dict: 216 | return None 217 | 218 | @h5_safe_load 219 | def load_energy(self, scan: int = None) -> float: 220 | """ 221 | Load the energy of the beamline. 222 | 223 | Args: 224 | scan (int, optional): the scan number. Defaults to None. 225 | 226 | Returns: 227 | tuple: the photon energy used during beamtime. 228 | """ 229 | scan, _ = self._check_scan_sample(scan, None) 230 | key_path = "com/SIXS/i14-c-c02-op-mono/energy" 231 | 232 | return self.h5file[key_path][()].item() * 1e3 233 | 234 | @h5_safe_load 235 | def load_detector_shape(self, scan: int = None) -> tuple: 236 | return None 237 | -------------------------------------------------------------------------------- /src/cdiutils/io/vtk.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | # handling vtk case 3 | try: 4 | import vtk 5 | from vtk.util.numpy_support import numpy_to_vtk 6 | IS_VTK_AVAILABLE = True 7 | 8 | except ImportError: 9 | print("vtk package is not installed.") 10 | IS_VTK_AVAILABLE = False 11 | 12 | 13 | class VtkImportError(ImportError): 14 | """Custom exception to handle Vtk import error.""" 15 | 16 | def __init__(self, msg: str = None) -> None: 17 | _msg = "vtk package is not installed." 18 | if msg is not None: 19 | _msg += "\n" + msg 20 | super().__init__(_msg) 21 | 22 | 23 | def load_vtk(file_path: str): 24 | """ 25 | Load a vtk file. 26 | 27 | Args: 28 | file_path (_type_): the path to the file to open. 29 | 30 | Raises: 31 | VtkImportError: if vtk is not installed. 32 | 33 | Returns: 34 | the reader output 35 | """ 36 | if not IS_VTK_AVAILABLE: 37 | raise VtkImportError 38 | 39 | reader = vtk.vtkGenericDataObjectReader() 40 | reader.SetFileName(file_path) 41 | reader.ReadAllScalarsOn() 42 | reader.ReadAllVectorsOn() 43 | reader.ReadAllTensorsOn() 44 | reader.Update() 45 | 46 | return reader.GetOutput() 47 | 48 | 49 | def save_as_vti( 50 | output_path: str, 51 | voxel_size: tuple | list | np.ndarray, 52 | cxi_convention: bool = False, 53 | origin: tuple = (0, 0, 0), 54 | **np_arrays: dict[np.ndarray] 55 | ) -> None: 56 | """Save numpy arrays to .vti file.""" 57 | if not IS_VTK_AVAILABLE: 58 | raise VtkImportError 59 | voxel_size = tuple(voxel_size) 60 | nb_arrays = len(np_arrays) 61 | 62 | if not nb_arrays: 63 | raise ValueError( 64 | "np_arrays is empty, please provide a dictionary of " 65 | "(fieldnames: np.ndarray) you want to save." 66 | ) 67 | is_init = False 68 | for i, (key, array) in enumerate(np_arrays.items()): 69 | if not is_init: 70 | shape = array.shape 71 | if cxi_convention: 72 | voxel_size = (voxel_size[2], voxel_size[1], voxel_size[0]) 73 | shape = (shape[2], shape[1], shape[0]) 74 | image_data = vtk.vtkImageData() 75 | image_data.SetOrigin(origin) 76 | image_data.SetSpacing(voxel_size) 77 | image_data.SetExtent( 78 | 0, shape[0] - 1, 79 | 0, shape[1] - 1, 80 | 0, shape[2] - 1 81 | ) 82 | point_data = image_data.GetPointData() 83 | is_init = True 84 | 85 | vtk_array = numpy_to_vtk(array.ravel()) 86 | point_data.AddArray(vtk_array) 87 | point_data.GetArray(i).SetName(key) 88 | point_data.Update() 89 | 90 | writer = vtk.vtkXMLImageDataWriter() 91 | writer.SetFileName(output_path) 92 | writer.SetInputData(image_data) 93 | writer.Write() 94 | -------------------------------------------------------------------------------- /src/cdiutils/multibcdi/3D_phase.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from utils import are_coplanar 4 | 5 | 6 | def compute_3D_phase_V2(phases, q_vectors, support): 7 | # First load the vectors of the measurement frame. 8 | Q = [q1, q2, q3] = list(q_vectors.values()) 9 | Q = [] 10 | phi = [] 11 | for key in phases.keys(): 12 | Q.append(q_vectors[key]) 13 | phi.append(phases[key]) 14 | phi = np.array(phi) 15 | 16 | # check if vectors are coplanar. If they are, they cannot describe the 17 | # 3D space and then 3D phase cannot be computed 18 | if are_coplanar(q1, q2, q3): 19 | print("The given vectors are coplanar, this won't work.") 20 | return None 21 | Q_inverse = np.linalg.inv(np.array(Q)) 22 | 23 | # Get only the coordinates of the voxel within the support. 24 | I, J, K = np.where(support > 0) 25 | 26 | # Initialise the 3D phase. 27 | shape = support.shape 28 | phase_3D = np.zeros([shape[0], shape[1], shape[2], 3]) 29 | 30 | # Iteration over each voxel. 3D phase is computed for each voxel. 31 | for i, j, k in zip(I.tolist(), J.tolist(), K.tolist()): 32 | 33 | # Compute U values in the canonical basis. 34 | U = np.dot(Q_inverse, phi[..., i, j, k]) 35 | phase_3D[i, j, k] = np.array(U) 36 | 37 | return phase_3D 38 | 39 | 40 | def compute_3D_phase(phases, q_vectors, support): 41 | 42 | # First load the vectors of the measurement frame. 43 | Q = [q1, q2, q3] = q_vectors.values() 44 | 45 | # check if vectors are coplanar. If they are, the cannot describe the 46 | # 3D space and then 3D phase cannot be computed 47 | if are_coplanar(q1, q2, q3): 48 | print("The given vectors are coplanar, this won't work.") 49 | return None 50 | 51 | # Prepare the matrix M_inverse that is needed for retrieving the U values. 52 | # Theses U values are the actual components of the 3D phase for each q 53 | # vector. 54 | alpha = np.dot(q1, q2) 55 | beta = np.dot(q2, q3) 56 | gamma = np.dot(q1, q3) 57 | M = np.array([[1, alpha, gamma], [alpha, 1, beta], 58 | [gamma, beta, 1]], np.float) 59 | M_inverse = np.linalg.inv(M) 60 | 61 | # Get the shape of the 3D phase and initialise it. 62 | shape = support.shape 63 | phase_3D = np.zeros([shape[0], shape[1], shape[2], 3]) 64 | 65 | # Get only the coordinates of the voxel within the support. 66 | I, J, K = np.where(support > 0) 67 | 68 | # Iteration over each voxel. 3D phase is computed for each voxel. 69 | for i, j, k in zip(I.tolist(), J.tolist(), K.tolist()): 70 | 71 | # Initialise the current voxel phase with zeros 72 | voxel_phase = np.zeros([3]) 73 | 74 | # Phi values are the measured values. The phi list will contain 75 | # 3 values, one for each q_vector. 76 | phi = [] 77 | for peak in phases.keys(): 78 | phi.append(phases[peak][i, j, k]) 79 | 80 | # U Values are computed here. 81 | U = np.dot(M_inverse, np.array(phi)) 82 | 83 | # Add up the u vectors described in the canonical basis (u*q). 84 | # This will provide the 3D phase of the voxel. 85 | for u, q in zip(U.tolist(), Q): 86 | voxel_phase = np.add(voxel_phase, u * q) 87 | 88 | # Finally append the voxel phase to the global 3D phase 89 | phase_3D[i, j, k] = voxel_phase 90 | 91 | return phase_3D 92 | 93 | 94 | def compute_projected_phase(phase_3D, q_projection): 95 | return np.dot(phase_3D, q_projection) 96 | -------------------------------------------------------------------------------- /src/cdiutils/multibcdi/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/src/cdiutils/multibcdi/__init__.py -------------------------------------------------------------------------------- /src/cdiutils/multibcdi/preprocess.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy.ndimage.measurements import center_of_mass 3 | 4 | 5 | def center_data(data, mask=None, max_size=256): 6 | shape = data.shape 7 | com = [int(round(c)) for c in center_of_mass(data)] 8 | com_box = [2 * min(c, shape[i] - c) for i, c in enumerate(com)] 9 | max_box = [min(c, max_size) for c in com_box] 10 | 11 | final_shape = smaller_primes((max_box[0], max_box[1], max_box[2]), 12 | maxprime=7, required_dividers=(2,)) 13 | 14 | centered_data = data[com[0]-final_shape[0]//2 : com[0]+final_shape[0]//2, 15 | com[1]-final_shape[1]//2 : com[1]+final_shape[1]//2, 16 | com[2]-final_shape[2]//2 : com[2]+final_shape[2]//2] 17 | if mask is not None: 18 | centered_mask = mask[com[0]-final_shape[0]//2:com[0]+final_shape[0]//2, 19 | com[1]-final_shape[1]//2:com[1]+final_shape[1]//2, 20 | com[2]-final_shape[2]//2:com[2]+final_shape[2]//2] 21 | return centered_data, centered_mask 22 | 23 | return centered_data 24 | 25 | 26 | def center(data, com=None): 27 | shape = data.shape 28 | if com is None: 29 | com =[round(c) for c in center_of_mass(data)] 30 | centered_data = np.roll(data, shape[0] // 2 - com[0], axis=0) 31 | centered_data = np.roll(centered_data, shape[1] // 2 - com[1], axis=1) 32 | centered_data = np.roll(centered_data, shape[2] //2 - com[2], axis=2) 33 | 34 | return centered_data 35 | 36 | 37 | def crop_at_center(data, final_shape=None): 38 | if final_shape is None: 39 | print("No final shape specified, did not proceed to cropping") 40 | return data 41 | 42 | shape = data.shape 43 | final_shape = np.array(final_shape) 44 | 45 | if not (final_shape <= data.shape).all(): 46 | print("One of the axis of the final shape is larger than " \ 47 | "the initial axis (initial shape: {}, final shape: {}).\n" \ 48 | "Did not proceed to cropping.".format(shape, tuple(final_shape))) 49 | return data 50 | 51 | c = np.array(shape) // 2 52 | to_crop = final_shape // 2 53 | plus_one = np.where((final_shape %2 == 0), 0, 1) 54 | 55 | cropped = data[c[0] - to_crop[0] : c[0] + to_crop[0] + plus_one[0], 56 | c[1] - to_crop[1] : c[1] + to_crop[1] + plus_one[1], 57 | c[2] - to_crop[2] : c[2] + to_crop[2] + plus_one[2]] 58 | 59 | return cropped 60 | 61 | 62 | def are_coplanar(q1, q2, q3, value=False): 63 | result = np.dot(q1, np.cross(q2, q3)) 64 | if value: 65 | return result, result == 0 66 | else: 67 | return result == 0 68 | -------------------------------------------------------------------------------- /src/cdiutils/pipeline/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Pipeline module. 3 | 4 | Implement the engines to run the pipelines. 5 | """ 6 | 7 | import importlib 8 | 9 | from .base import Pipeline 10 | from .bcdi import BcdiPipeline 11 | from .pipeline_plotter import PipelinePlotter 12 | from .parameters import get_params_from_variables 13 | 14 | 15 | __class_func_submodules__ = { 16 | "Pipeline": "base", 17 | "BcdiPipeline": "bcdi", 18 | "PipelinePlotter": "pipeline_plotter", 19 | "get_params_from_variables": "parameters" 20 | } 21 | 22 | __all__ = [ 23 | "Pipeline", "BcdiPipeline", 24 | "PipelinePlotter", "get_params_from_variables" 25 | ] 26 | 27 | 28 | def __getattr__(name): 29 | if name in __class_func_submodules__: 30 | submodule = importlib.import_module( 31 | f"{__name__}.{__class_func_submodules__[name]}" 32 | ) 33 | return getattr(submodule, name) 34 | raise AttributeError(f"module {__name__} has no attribute {name}.") 35 | -------------------------------------------------------------------------------- /src/cdiutils/pipeline/base.py: -------------------------------------------------------------------------------- 1 | 2 | from abc import ABC 3 | from functools import wraps 4 | import logging 5 | import os 6 | from typing import Callable 7 | import signal 8 | import subprocess 9 | import sys 10 | import textwrap 11 | import time 12 | 13 | 14 | import numpy as np 15 | import yaml 16 | 17 | 18 | from cdiutils.plot.formatting import update_plot_params 19 | 20 | 21 | # Define a custom log level for JOB 22 | JOB_LOG_LEVEL = 25 # Between INFO (20) and WARNING (30) 23 | logging.addLevelName(JOB_LOG_LEVEL, "JOB") 24 | 25 | 26 | # Create a method to log at the JOB level 27 | def job(self, message, *args, **kwargs): 28 | if self.isEnabledFor(JOB_LOG_LEVEL): 29 | self._log(JOB_LOG_LEVEL, message, args, **kwargs) 30 | 31 | 32 | logging.Logger.job = job 33 | 34 | 35 | class LoggerWriter: 36 | """ 37 | Custom stream to send stdout (print statements) directly to 38 | logger in real-time. 39 | """ 40 | 41 | def __init__(self, logger, level, wrap=True): 42 | self.logger = logger 43 | self.level = level 44 | self.wrap = wrap 45 | 46 | def write(self, message): 47 | if message.strip(): 48 | # Only log if there's something to log 49 | # (ignores empty messages) 50 | if self.wrap: 51 | # Wrap lines at 79 characters 52 | wrapped_message = textwrap.fill(message.strip(), width=79) 53 | else: 54 | wrapped_message = message.strip() 55 | self.logger.log(self.level, "\n" + wrapped_message + "\n") 56 | 57 | def flush(self): 58 | """flush method is needed for compatibility with `sys.stdout`.""" 59 | 60 | 61 | class JobCancelledError(Exception): 62 | """Custom exception to handle job cancellations by the user.""" 63 | 64 | 65 | class JobFailedError(Exception): 66 | """Custom exception to handle job failure.""" 67 | 68 | 69 | class Pipeline(ABC): 70 | """ 71 | The main class for handling pipelines in the context of cdi. It is 72 | not intended for direct use, and should be derived for a specific 73 | application. 74 | """ 75 | 76 | def __init__( 77 | self, 78 | params: dict = None, 79 | param_file_path: str = None 80 | ): 81 | """ 82 | Initialisation method. 83 | 84 | Args: 85 | param_file_path (str, optional): the path to the 86 | parameter file. Defaults to None. 87 | parameters (dict, optional): the parameter dictionary. 88 | Defaults to None. 89 | 90 | """ 91 | self.param_file_path = param_file_path 92 | self.params = params 93 | 94 | if params is None: 95 | if param_file_path is None: 96 | raise ValueError( 97 | "param_file_path or parameters must be provided" 98 | ) 99 | self.params = self.load_parameters() 100 | 101 | # Create the dump directory 102 | self.dump_dir = self.params["dump_dir"] 103 | self.make_dump_dir() 104 | 105 | # Initialise the logger 106 | self.logger = self._init_logger() 107 | 108 | self.interrupted = False # Flag to check for keyboard interrupt 109 | 110 | # Set the printoptions legacy to 1.21, otherwise types are printed. 111 | np.set_printoptions(legacy="1.21") 112 | 113 | # update the plot parameters 114 | update_plot_params() 115 | 116 | def make_dump_dir(self) -> None: 117 | dump_dir = self.params["dump_dir"] 118 | if os.path.isdir(dump_dir): 119 | print( 120 | "\nDump directory already exists, results will be " 121 | f"saved in:\n{dump_dir}." 122 | ) 123 | else: 124 | print( 125 | f"Creating the dump directory at: {dump_dir}") 126 | os.makedirs(dump_dir, exist_ok=True) 127 | 128 | @staticmethod 129 | def _init_logger() -> logging.Logger: 130 | # Remove all handlers associated with the root logger (Jupyter 131 | # default). 132 | for handler in logging.root.handlers[:]: 133 | logging.root.removeHandler(handler) 134 | 135 | logger = logging.getLogger("PipelineLogger") 136 | 137 | # Check if the logger already has handlers to avoid adding 138 | # multiple. 139 | if not logger.hasHandlers(): 140 | logger.setLevel(logging.DEBUG) 141 | 142 | # Console handler 143 | console_handler = logging.StreamHandler() 144 | console_handler.setLevel(logging.INFO) 145 | 146 | console_formatter = logging.Formatter( 147 | fmt="[%(levelname)s] %(message)s", 148 | ) 149 | console_handler.setFormatter(console_formatter) 150 | logger.addHandler(console_handler) 151 | 152 | return logger 153 | 154 | def _init_process_logger(self, process_name) -> logging.FileHandler: 155 | """ 156 | Setup a new file handler for each process and overwrite the log 157 | file. 158 | """ 159 | file_handler = logging.FileHandler(f"{process_name}.log", mode="w") 160 | file_handler.setLevel(logging.DEBUG) 161 | file_format = logging.Formatter( 162 | fmt="%(asctime)s [%(levelname)s] %(message)s", 163 | datefmt="%Y-%m-%d %H:%M:%S" 164 | ) 165 | file_handler.setFormatter(file_format) 166 | self.logger.addHandler(file_handler) 167 | return file_handler 168 | 169 | @staticmethod 170 | def process(func: Callable) -> Callable: 171 | @wraps(func) 172 | def wrapper(self, *args, **kwargs) -> None: 173 | 174 | # Setup a new log file for this process 175 | file_handler = self._init_process_logger( 176 | f"{self.dump_dir}/{func.__name__}_output" 177 | ) 178 | msg = self.pretty_print( 179 | f"Starting process: {func.__name__}", 180 | do_print=False, 181 | return_text=True 182 | ) 183 | self.logger.info(msg) 184 | 185 | # Redirect stdout to capture print statements in real time 186 | original_stdout = sys.stdout # Save original stdout 187 | sys.stdout = LoggerWriter(self.logger, logging.INFO) 188 | 189 | try: 190 | func(self, *args, **kwargs) 191 | self.logger.info( 192 | f"Process {func.__name__} completed successfully." 193 | ) 194 | except Exception as e: 195 | self.logger.error( 196 | "\nError occurred in the " 197 | f"'{func.__name__}' process:\n{e}" 198 | ) 199 | # traceback.print_exception(e) 200 | raise 201 | finally: 202 | # Restore original stdout and remove file handler 203 | sys.stdout = original_stdout 204 | self.logger.removeHandler(file_handler) 205 | file_handler.close() 206 | return wrapper 207 | 208 | def _unwrap_logs(self) -> None: 209 | """Bypass wrapping when printing logs.""" 210 | sys.stdout = LoggerWriter(self.logger, logging.INFO, wrap=False) 211 | 212 | def _wrap_logs(self) -> None: 213 | """Enable wrapping.""" 214 | sys.stdout = LoggerWriter(self.logger, logging.INFO, wrap=True) 215 | 216 | def _subprocess_run( 217 | self, 218 | cmd: str | list[str] 219 | ) -> subprocess.CompletedProcess: 220 | """ 221 | Run a subprocess command and return the result. 222 | 223 | Args: 224 | cmd (str | list[str]): The command to run. 225 | 226 | Raises: 227 | subprocess.CalledProcessError: If the command fails. 228 | 229 | Returns: 230 | subprocess.CompletedProcess: The result of the subprocess. 231 | """ 232 | result = subprocess.run( 233 | cmd, 234 | stdout=subprocess.PIPE, 235 | stderr=subprocess.PIPE, 236 | text=True, 237 | ) 238 | if result.returncode != 0: 239 | self.logger.error(f"Command {cmd} failed: {result.stderr}") 240 | raise subprocess.CalledProcessError( 241 | result.returncode, 242 | result.args, 243 | output=result.stdout, 244 | stderr=result.stderr 245 | ) 246 | return result 247 | 248 | def submit_job(self, job_file: str, working_dir: str) -> tuple[str, str]: 249 | """Submit a job to SLURM as a subprocess.""" 250 | # Set up signal handler for keyboard interrupt (Ctrl + C) 251 | signal.signal( 252 | signal.SIGINT, lambda sig, frame: self._handle_interrupt(job_id) 253 | ) 254 | 255 | cmd = f"sbatch {job_file}" 256 | try: 257 | with subprocess.Popen( 258 | ["bash", "-l", "-c", cmd], 259 | stdout=subprocess.PIPE, 260 | stderr=subprocess.PIPE, 261 | cwd=working_dir, # Change to this directory first 262 | text=True, # Ensures stdout/stderr are str, not bytes 263 | env=os.environ.copy() 264 | ) as proc: 265 | stdout, stderr = proc.communicate() 266 | 267 | # Check for errors based on the return code 268 | if proc.returncode != 0: 269 | # An error occurred, log the stderr output 270 | self.logger.error( 271 | f"Error submitting job. Command returned: {stderr}" 272 | ) 273 | raise subprocess.CalledProcessError( 274 | proc.returncode, 275 | proc.args, 276 | output=stdout, 277 | stderr=stderr 278 | ) 279 | 280 | # Extract job ID from the output 281 | job_id = self._get_job_id(stdout) 282 | if job_id: 283 | self.logger.info( 284 | f"Job submitted successfully. Job ID: {job_id}" 285 | ) 286 | output_file = f"slurm-{job_id}.out" 287 | return job_id, os.path.join(working_dir, output_file) 288 | 289 | raise ValueError( 290 | "Failed to extract job ID from sbatch output." 291 | ) 292 | 293 | except subprocess.CalledProcessError as e: 294 | # Log the error if the job submission fails 295 | self.logger.error( 296 | f"Subprocess failed with return code {e.returncode}: " 297 | f"{e.stderr}" 298 | ) 299 | raise e 300 | 301 | @staticmethod 302 | def _get_job_id(stdout: str) -> str: 303 | """Extract the job ID from sbatch output.""" 304 | for line in stdout.splitlines(): 305 | if "Submitted batch job" in line: 306 | return line.split()[-1] # last element of the line 307 | return None 308 | 309 | def is_job_running(self, job_id: str) -> bool: 310 | """Check if the job is still running using squeue.""" 311 | result = self._subprocess_run(["squeue", "--job", job_id]) 312 | return job_id in result.stdout # Job is running if job_id is found 313 | 314 | def stream_job_output(self, job_id: str, output_file: str) -> None: 315 | """Stream the job output in real time from the SLURM output file.""" 316 | try: 317 | self.logger.info("Waiting for job output file...") 318 | 319 | # Wait until the output file is created (check every 2 seconds) 320 | while not os.path.exists(output_file): 321 | if self.interrupted: 322 | self.logger.info( 323 | "Job monitoring interrupted before file creation." 324 | ) 325 | return 326 | time.sleep(0.5) 327 | 328 | self.logger.info(f"Streaming job output from {output_file}:\n\n") 329 | 330 | # Keep trying to read the output file until the job is done 331 | with open(output_file, "r") as f: 332 | while not self.interrupted: 333 | # Check if the job is still in the queue before reading 334 | if not self.is_job_running(job_id): 335 | self.logger.info( 336 | f"\n\nJob {job_id} is no longer running. " 337 | "Stopping output streaming." 338 | ) 339 | break 340 | 341 | line = f.readline() 342 | if line: 343 | self.logger.job(line.strip()) 344 | else: 345 | time.sleep(0.5) # Sleep briefly before checking again 346 | 347 | except FileNotFoundError: 348 | self.logger.error(f"Output file {output_file} not found.") 349 | raise 350 | 351 | def monitor_job( 352 | self, 353 | job_id: str, 354 | output_file: str, 355 | retries: int = 10, 356 | delay: int = 1 357 | ) -> None: 358 | """ 359 | Monitor the job and stream its output in real time. Check the 360 | final status of the job and raise an exception if the job 361 | fails. 362 | 363 | Args: 364 | job_id (str): The job ID to monitor. 365 | output_file (str): The path to the output file. 366 | retries (int, optional): Number of retries to check the job 367 | state if it remains in RUNNING state but is no longer in 368 | the queue. Defaults to 10. 369 | delay (int, optional): Delay in seconds between retries. 370 | Defaults to 1. 371 | 372 | Raises: 373 | JobFailedError: If the job fails. 374 | """ 375 | # Start monitoring the job and streaming output 376 | while not self.interrupted: 377 | if not self.is_job_running(job_id): 378 | self.logger.info("Checking final status...") 379 | break 380 | 381 | # Job is still running, stream the output file 382 | self.stream_job_output(job_id, output_file) 383 | 384 | # After job finishes, check final status 385 | if not self.interrupted: 386 | state, exit_code = self.get_job_state(job_id) 387 | attempt = 0 388 | while state == "RUNNING" and attempt < retries: 389 | self.logger.info( 390 | f"Job {job_id} is still in RUNNING state but not " 391 | f"found in queue. Rechecking the state in {delay} " 392 | f"second(s)..." 393 | ) 394 | time.sleep(delay) 395 | state, exit_code = self.get_job_state(job_id) 396 | attempt += 1 397 | if state == "COMPLETED": 398 | self.logger.info( 399 | f"Job {job_id} completed successfully with " 400 | f"exit code: {exit_code}" 401 | ) 402 | return 403 | elif state == "FAILED": 404 | raise JobFailedError( 405 | f"Job {job_id} failed with exit code: {exit_code}." 406 | f"See {output_file} for more details." 407 | ) 408 | else: 409 | self.logger.warning( 410 | f"Job {job_id} finished with unexpected state: {state}." 411 | ) 412 | 413 | def get_job_state(self, job_id: str) -> tuple[str, str]: 414 | """ 415 | Get the job state and exit code using sacct. 416 | 417 | Args: 418 | job_id (str): The job ID to check. 419 | 420 | Raises: 421 | ValueError: If the job is not found in sacct. 422 | 423 | Returns: 424 | tuple[str, str]: The job state and exit code. 425 | """ 426 | result = self._subprocess_run( 427 | [ 428 | "sacct", "-j", job_id, 429 | "--format=JobID,State,ExitCode", "--noheader" 430 | ] 431 | ) 432 | state, exit_code = None, None 433 | 434 | # Parse the sacct output to check the job's final state 435 | for line in result.stdout.splitlines(): 436 | if job_id in line: 437 | parts = line.split() 438 | if len(parts) >= 3: 439 | state = parts[1] 440 | exit_code = parts[2] 441 | break 442 | if state is None or exit_code is None: 443 | raise ValueError(f"Job {job_id} not found in sacct.") 444 | return state, exit_code 445 | 446 | def cancel_job(self, job_id: str) -> None: 447 | """Cancel the SLURM job using scancel.""" 448 | try: 449 | self.logger.info(f"\n\nCancelling job {job_id}...") 450 | _ = self._subprocess_run(["scancel", job_id]) 451 | self.logger.info(f"Job {job_id} cancelled successfully.") 452 | except subprocess.CalledProcessError as e: 453 | self.logger.error(f"Failed to cancel job {job_id}: {e.stderr}") 454 | raise 455 | 456 | def _handle_interrupt(self, job_id: str) -> None: 457 | """ 458 | Handle a keyboard interrupt (Ctrl + C) by cancelling the 459 | job. 460 | """ 461 | self.interrupted = True # Set flag to interrupt monitoring 462 | self.cancel_job(job_id) 463 | raise JobCancelledError( 464 | "Keyboard interruption. " 465 | f"Job {job_id} was cancelled by the user." 466 | ) 467 | 468 | def load_parameters( 469 | self, 470 | file_path: str = None 471 | ) -> dict: 472 | """Load the parameters from the configuration files.""" 473 | if file_path is None: 474 | file_path = self.param_file_path 475 | 476 | with open(file_path, "r", encoding="utf8") as file: 477 | params = yaml.safe_load(file) 478 | return params 479 | 480 | @staticmethod 481 | def pretty_print( 482 | text: str, 483 | max_char_per_line: int = 79, 484 | do_print: bool = True, 485 | return_text: bool = False 486 | ) -> None | str: 487 | """Print text with a frame of stars.""" 488 | pretty_text = "\n".join( 489 | [ 490 | "", 491 | "*" * (max_char_per_line), 492 | *[ 493 | f"* {w[::-1].center(max_char_per_line-4)[::-1]} *" 494 | for w in textwrap.wrap(text, width=max_char_per_line - 4) 495 | ], 496 | "*" * max_char_per_line, 497 | "", 498 | ] 499 | ) 500 | if do_print: 501 | print(pretty_text) 502 | if return_text: 503 | return pretty_text 504 | return None 505 | -------------------------------------------------------------------------------- /src/cdiutils/pipeline/parameters.py: -------------------------------------------------------------------------------- 1 | from collections.abc import Mapping # more flexible than dict 2 | import numpy as np 3 | import warnings 4 | 5 | from cdiutils.utils import energy_to_wavelength 6 | 7 | DEFAULT_PIPELINE_PARAMS = { 8 | # Formerly the "metadata" 9 | "beamline_setup": "REQUIRED", 10 | "scan": "REQUIRED", 11 | "experiment_file_path": "REQUIRED", 12 | "dump_dir": "REQUIRED", 13 | "sample_name": None, 14 | "experiment_data_dir_path": None, 15 | "detector_data_path": None, 16 | "edf_file_template": None, 17 | "detector_name": None, 18 | "flat_field": None, 19 | "alien_mask": None, 20 | 21 | "background_level": None, 22 | "preprocess_shape": (150, 150), 23 | "energy": None, 24 | "hkl": [1, 1, 1], 25 | "hot_pixel_filter": False, 26 | "voxel_reference_methods": ["max", "com", "com"], 27 | "q_lab_ref": None, 28 | "light_loading": False, 29 | "det_reference_voxel": None, 30 | "rocking_angle_binning": None, 31 | "det_calib_params": None, 32 | "voxel_size": None, 33 | "apodize": "blackman", 34 | "flip": False, 35 | "isosurface": None, 36 | "show": False, 37 | "verbose": True, 38 | "debug": True, 39 | "handle_defects": False, 40 | "orthogonalise_before_phasing": False, 41 | "orientation_convention": "cxi", 42 | "pynx": { 43 | "data": None, 44 | "mask": None, 45 | "data2cxi": False, 46 | "auto_center_resize": False, 47 | "support": "auto", 48 | "support_size": None, 49 | "support_threshold": "0.15, 0.40", 50 | "support_threshold_method": "rms", 51 | "support_only_shrink": False, 52 | "support_update_period": 20, 53 | "support_smooth_width_begin": 2, 54 | "support_smooth_width_end": 0.5, 55 | "support_post_expand": None, # (-1, 1) 56 | "support_update_border_n": None, 57 | "algorithm": None, 58 | "psf": None, # "pseudo-voigt,0.5,0.1,10", 59 | "nb_raar": 500, 60 | "nb_hio": 300, 61 | "nb_er": 200, 62 | "nb_ml": 0, 63 | "nb_run": 20, 64 | "nb_run_keep": 10, 65 | "zero_mask": False, 66 | "crop_output": 0, 67 | "roi": "full", 68 | "positivity": False, 69 | "beta": 0.9, 70 | "detwin": True, 71 | "rebin": "1, 1, 1", 72 | "verbose": 100, 73 | "output_format": "cxi", 74 | "live_plot": False, 75 | "save_plot": True, 76 | "mpi": "run" 77 | }, 78 | "support": { 79 | "support_method": None, 80 | "raw_process": True, 81 | "support_path": None, 82 | }, 83 | "facets": { 84 | "nb_facets": None, 85 | "remove_edges": True, 86 | "order_of_derivative": None, 87 | "derivative_threshold": None, 88 | "amplitude_threshold": None, 89 | "top_facet_reference_index": [1, 1, 1], 90 | "authorised_index": 1, 91 | "nb_nghbs_min": 0, 92 | "index_to_display": None, 93 | "display_f_e_c": 'facet', 94 | "size": 10, 95 | } 96 | } 97 | 98 | 99 | # cache the valid keys once, instead of recomputing every time 100 | _VALID_KEYS_CACHE = None # global variable to store keys 101 | 102 | 103 | def validate_and_fill_params( 104 | user_params: dict, 105 | defaults: dict = DEFAULT_PIPELINE_PARAMS 106 | ) -> dict: 107 | """ 108 | Validate user parameters against DEFAULT_PIPELINE_PARAMS. Ensures 109 | required parameters are present and fills in missing optional ones. 110 | 111 | Args: 112 | user_params (dict): dict of user-provided parameters. 113 | defaults (dict, optional): default pipeline parameters (can be 114 | nested). Defaults to DEFAULT_PIPELINE_PARAMS. 115 | 116 | Raises: 117 | ValueError: if a required parameter is missing. 118 | 119 | Returns: 120 | dict: new dictionary with defaults filled in 121 | """ 122 | filled_params = {} 123 | 124 | for key, default in defaults.items(): 125 | user_value = user_params.get(key, None) 126 | 127 | # handle nested dictionaries recursively 128 | if isinstance(default, Mapping): 129 | if user_value is None: 130 | user_value = {} # create an empty dict if missing 131 | filled_params[key] = validate_and_fill_params(user_value, default) 132 | 133 | # check for required parameters 134 | elif default == "REQUIRED" and user_value is None: 135 | raise ValueError(f"Missing required parameter: '{key}'") 136 | 137 | # use user value or fallback to default 138 | else: 139 | filled_params[key] = ( 140 | user_value if user_value is not None else default 141 | ) 142 | 143 | # warn for unexpected parameters 144 | known_keys = set(defaults.keys()) # optimised for membership tests 145 | for key in user_params: 146 | if key not in known_keys: 147 | warnings.warn( 148 | f"Parameter '{key}' is unknown and will not be used.", 149 | UserWarning 150 | ) 151 | 152 | return filled_params 153 | 154 | 155 | def collect_keys(d: dict) -> set: 156 | """Recursively collect all keys from a nested dictionary.""" 157 | keys = set(d.keys()) 158 | for value in d.values(): 159 | if isinstance(value, Mapping): 160 | keys |= collect_keys(value) 161 | return keys 162 | 163 | 164 | def isparameter(string: str) -> bool: 165 | """ 166 | Check if a string is a valid parameter name in 167 | DEFAULT_PIPELINE_PARAMS. 168 | """ 169 | global _VALID_KEYS_CACHE # use the global cache 170 | 171 | if _VALID_KEYS_CACHE is None: # compute only once 172 | _VALID_KEYS_CACHE = collect_keys(DEFAULT_PIPELINE_PARAMS) 173 | 174 | return string in _VALID_KEYS_CACHE 175 | 176 | 177 | def get_params_from_variables( 178 | dir_list: list, 179 | globals_dict: dict 180 | ) -> dict: 181 | """ 182 | Return a dictionary of parameters whose keys are authorized by the 183 | DEFAULT_PIPELINE_PARAMS list. 184 | """ 185 | params = {"pynx": {}, "facets": {}, "support": {}} 186 | for e in dir_list: 187 | if e in DEFAULT_PIPELINE_PARAMS: 188 | params[e] = globals_dict[e] 189 | elif e in DEFAULT_PIPELINE_PARAMS["pynx"]: 190 | params["pynx"][e] = globals_dict[e] 191 | elif e in DEFAULT_PIPELINE_PARAMS["facets"]: 192 | params["facets"][e] = globals_dict[e] 193 | elif e in DEFAULT_PIPELINE_PARAMS["support"]: 194 | params["support"][e] = globals_dict[e] 195 | return params 196 | 197 | 198 | def convert_np_arrays(**data) -> dict: 199 | """ 200 | Recursively converts numpy types and arrays in a dictionary to 201 | standard Python types for YAML serialization. 202 | 203 | Args: 204 | **data: arbitrary keyword arguments representing a dictionary 205 | with potential numpy types. 206 | 207 | Returns: 208 | dict: A dictionary with all numpy types converted to standard 209 | Python types. 210 | """ 211 | def convert_value(value): 212 | # Handle numpy arrays 213 | if isinstance(value, np.ndarray): 214 | if value.size == 1: 215 | return convert_value(value.item()) 216 | return tuple(convert_value(v) for v in value) 217 | 218 | # Handle numpy scalar types. 219 | if isinstance(value, (np.integer, np.int32, np.int64)): 220 | return int(value) 221 | if isinstance(value, (np.floating, np.float32, np.float64)): 222 | return float(value) 223 | if isinstance(value, (np.bool_, bool)): 224 | return bool(value) 225 | if isinstance(value, (np.str_, str)): 226 | return str(value) 227 | 228 | # Handle nested lists or tuples. 229 | if isinstance(value, (list, tuple)): 230 | return type(value)(convert_value(v) for v in value) 231 | 232 | # If value is a dictionary, convert its contents recursively. 233 | if isinstance(value, dict): 234 | return convert_np_arrays(**value) 235 | 236 | # Return the value as is if no conversion is needed. 237 | return value 238 | 239 | # Apply the conversion function to each dictionary entry 240 | return { 241 | key: convert_value(value) for key, value in data.items() 242 | } -------------------------------------------------------------------------------- /src/cdiutils/pipeline/pynx-id01-cdi_template.slurm: -------------------------------------------------------------------------------- 1 | #!/bin/bash -l 2 | #SBATCH --partition=gpu,p9gpu 3 | #SBATCH --nodes=1 4 | #SBATCH --ntasks-per-node=1 5 | #SBATCH --gres=gpu:1 6 | #SBATCH --time=00:10:00 7 | #SBATCH --output=slurm-%j.out 8 | 9 | scontrol --details show jobs $SLURM_JOBID | grep RES 10 | 11 | module load pynx 12 | 13 | cd $data_path 14 | 15 | pynx-cdi-id01 pynx-cdi-inputs.txt -------------------------------------------------------------------------------- /src/cdiutils/plot/__init__.py: -------------------------------------------------------------------------------- 1 | import importlib 2 | 3 | from .formatting import update_plot_params 4 | from .formatting import get_figure_size 5 | from .formatting import set_plot_configs 6 | from .formatting import get_plot_configs 7 | from .formatting import get_extent 8 | from .formatting import add_colorbar 9 | from .formatting import x_y_lim_from_support 10 | from .formatting import get_x_y_limits_extents 11 | from .formatting import set_x_y_limits_extents 12 | from .formatting import add_labels 13 | from .formatting import save_fig 14 | from .formatting import white_interior_ticks_labels 15 | 16 | 17 | __submodules__ = { 18 | "slice", 19 | "volume" 20 | "interactive", 21 | "stats" 22 | } 23 | 24 | __class_func_submodules__ = { 25 | "Plotter": "interactive", 26 | "plot_volume_slices": "slice", 27 | "plot_multiple_volume_slices": "slice", 28 | "VolumeViewer": "volume", 29 | "plot_histogram": "stats", 30 | "strain_statistics": "stats", 31 | } 32 | 33 | __all__ = [ 34 | "update_plot_params", 35 | "get_figure_size", 36 | "set_plot_configs", 37 | "get_plot_configs", 38 | "get_extent", 39 | "add_colorbar", 40 | "x_y_lim_from_support", 41 | "get_x_y_limits_extents", 42 | "set_x_y_limits_extents", 43 | "add_labels", 44 | "save_fig", 45 | "white_interior_ticks_labels", 46 | ] 47 | __all__ += list(__submodules__) + list(__class_func_submodules__) 48 | 49 | 50 | def __getattr__(name): 51 | if name in __submodules__: 52 | return importlib.import_module(f'{__name__}.{name}') 53 | 54 | if name in __class_func_submodules__: 55 | submodule = importlib.import_module( 56 | f"{__name__}.{__class_func_submodules__[name]}" 57 | ) 58 | return getattr(submodule, name) 59 | raise AttributeError(f"module {__name__} has no attribute {name}.") 60 | -------------------------------------------------------------------------------- /src/cdiutils/plot/colormap.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import numpy as np 3 | import colorcet 4 | 5 | from matplotlib.colors import LinearSegmentedColormap 6 | 7 | 8 | RED_TO_TEAL = LinearSegmentedColormap.from_list( 9 | 10 | "red_to_teal", 11 | [ 12 | '#f84650', '#fb4358', '#fb455b', '#fc475d', '#fc495e', '#fc4b60', 13 | '#fd4d62', '#fd4f64', '#fd5165', '#fd5367', '#fe5568', '#fe576a', 14 | '#fe596b', '#fe5b6d', '#fe5d6e', '#ff5f70', '#ff6171', '#ff6373', 15 | '#ff6574', '#ff6776', '#ff6977', '#ff6a79', '#ff6c7a', '#ff6e7c', 16 | '#ff707d', '#ff727e', '#ff7380', '#ff7581', '#ff7783', '#ff7984', 17 | '#ff7a85', '#ff7c87', '#ff7e88', '#ff7f8a', '#ff818b', '#ff838c', 18 | '#ff848e', '#ff868f', '#ff8890', '#ff8992', '#ff8b93', '#ff8d95', 19 | '#fe8e96', '#fe9097', '#fe9199', '#fe939a', '#fe959b', '#fe969d', 20 | '#fd989e', '#fd99a0', '#fd9ba1', '#fd9da2', '#fd9ea4', '#fca0a5', 21 | '#fca1a6', '#fca3a8', '#fca4a9', '#fba6ab', '#fba7ac', '#fba9ad', 22 | '#fbabaf', '#faacb0', '#faaeb1', '#faafb3', '#f9b1b4', '#f9b2b6', 23 | '#f9b4b7', '#f8b5b8', '#f8b7ba', '#f8b8bb', '#f7babc', '#f7bbbe', 24 | '#f6bdbf', '#f6bec1', '#f6c0c2', '#f5c1c3', '#f5c3c5', '#f4c4c6', 25 | '#f4c6c7', '#f3c7c9', '#f3c9ca', '#f2cacc', '#f2cccd', '#f1cdce', 26 | '#f1cfd0', '#f0d0d1', '#efd2d3', '#efd3d4', '#eed4d5', '#eed6d7', 27 | '#edd7d8', '#ecd9d9', '#ecdadb', '#ebdcdc', '#eaddde', '#eadfdf', 28 | '#e9e0e0', '#e8e2e2', '#e8e3e3', '#e7e5e5', '#e3e5e5', '#e1e5e4', 29 | '#dfe4e4', '#dce4e3', '#dae3e2', '#d7e3e1', '#d5e2e0', '#d3e1e0', 30 | '#d0e1df', '#cee0de', '#ccdfdd', '#c9dfdd', '#c7dedc', '#c5dddb', 31 | '#c3ddda', '#c0dcda', '#bedbd9', '#bcdbd8', '#badad7', '#b8d9d7', 32 | '#b6d8d6', '#b4d8d5', '#b2d7d5', '#afd6d4', '#add6d3', '#abd5d2', 33 | '#a9d4d2', '#a7d3d1', '#a5d3d0', '#a3d2d0', '#a1d1cf', '#9fd0ce', 34 | '#9dcfcd', '#9bcfcd', '#99cecc', '#97cdcb', '#95cccb', '#93ccca', 35 | '#91cbc9', '#90cac9', '#8ec9c8', '#8cc8c7', '#8ac7c7', '#88c7c6', 36 | '#86c6c5', '#84c5c5', '#82c4c4', '#80c3c3', '#7ec3c2', '#7dc2c2', 37 | '#7bc1c1', '#79c0c1', '#77bfc0', '#75bebf', '#73bebf', '#71bdbe', 38 | '#70bcbd', '#6ebbbd', '#6cbabc', '#6ab9bb', '#68b8bb', '#66b8ba', 39 | '#64b7b9', '#62b6b9', '#61b5b8', '#5fb4b7', '#5db3b7', '#5bb3b6', 40 | '#59b2b5', '#57b1b5', '#55b0b4', '#53afb4', '#51aeb3', '#4fadb2', 41 | '#4dacb2', '#4bacb1', '#49abb0', '#47aab0', '#45a9af', '#43a8af', 42 | '#41a7ae', '#3fa6ad', '#3da5ad', '#3aa5ac', '#38a4ab', '#36a3ab', 43 | '#33a2aa', '#31a1aa', '#2ea0a9', '#2c9fa8', '#299ea8', '#269ea7', 44 | '#239da6', '#209ca6', '#1c9ba5', '#189aa5', '#1499a4', '#0e98a3', 45 | '#0797a3', '#0097a2' 46 | ] 47 | ) 48 | 49 | 50 | TURBO_FIRST_HALF = LinearSegmentedColormap.from_list( 51 | "turbo_first_half", 52 | [ 53 | '#30123b', '#31133d', '#31143e', '#321540', '#321641', '#331743', 54 | '#331744', '#341846', '#341947', '#351a49', '#351b4b', '#361c4c', 55 | '#361d4e', '#371e4f', '#371f51', '#382052', '#382154', '#382256', 56 | '#392357', '#392359', '#3a245a', '#3a255c', '#3b265d', '#3b275f', 57 | '#3b2861', '#3c2962', '#3c2a64', '#3d2b65', '#3d2c67', '#3d2d68', 58 | '#3e2e6a', '#3e2f6b', '#3e306d', '#3f316f', '#3f3270', '#3f3372', 59 | '#403473', '#403575', '#403676', '#413778', '#413879', '#41397b', 60 | '#413a7c', '#423b7e', '#423c80', '#423d81', '#433e83', '#433f84', 61 | '#434086', '#434187', '#444289', '#44438a', '#44448c', '#44458d', 62 | '#44468f', '#454790', '#454892', '#454993', '#454a95', '#454c96', 63 | '#464d98', '#464e99', '#464f9a', '#46509c', '#46519d', '#46529f', 64 | '#4753a0', '#4754a2', '#4755a3', '#4756a4', '#4757a6', '#4759a7', 65 | '#475aa9', '#475baa', '#475cab', '#475dad', '#485eae', '#485faf', 66 | '#4860b1', '#4862b2', '#4863b3', '#4864b5', '#4865b6', '#4866b7', 67 | '#4867b9', '#4868ba', '#486abb', '#486bbc', '#486cbe', '#486dbf', 68 | '#486ec0', '#486fc1', '#4871c3', '#4872c4', '#4873c5', '#4774c6', 69 | '#4775c7', '#4777c8', '#4778ca', '#4779cb', '#477acc', '#477bcd', 70 | '#477dce', '#467ecf', '#467fd0', '#4680d1', '#4681d2', '#4683d3', 71 | '#4684d4', '#4585d5', '#4586d6', '#4588d7', '#4589d8', '#448ad9', 72 | '#448bda', '#448ddb', '#438edc', '#438fdd', '#4391de', '#4292de', 73 | '#4293df', '#4194e0', '#4196e1', '#4097e2', '#4098e2', '#409ae3', 74 | '#3f9be4', '#3e9ce4', '#3e9ee5', '#3d9fe6', '#3da0e6', '#3ca2e7', 75 | '#3ba3e7', '#3aa4e8', '#3aa6e9', '#39a7e9', '#38a8ea', '#37aaea', 76 | '#36abea', '#35adeb', '#34aeeb', '#33afeb', '#32b1ec', '#30b2ec', 77 | '#2fb4ec', '#2eb5ed', '#2cb6ed', '#2ab8ed', '#2ab9ed', '#2dbbea', 78 | '#30bce8', '#33bee6', '#36bfe4', '#38c1e1', '#3bc2df', '#3dc4dd', 79 | '#3fc5da', '#41c7d8', '#43c8d6', '#45c9d3', '#47cbd1', '#49ccce', 80 | '#4bcecc', '#4dcfc9', '#4ed1c7', '#50d2c4', '#52d4c2', '#54d5bf', 81 | '#55d6bc', '#57d8ba', '#59d9b7', '#5adbb4', '#5cdcb1', '#5eddaf', 82 | '#60dfac', '#61e0a9', '#63e1a6', '#65e3a3', '#67e4a0', '#69e59d', 83 | '#6be799', '#6de896', '#6fe993', '#71eb8f', '#73ec8c', '#75ed88', 84 | '#77ee84', '#7af080', '#7cf17c', '#7ff278', '#82f374', '#85f46f', 85 | '#88f56a', '#8bf765', '#8ef860', '#92f95a', '#96fa54', '#9afa4d', 86 | '#9efb45', '#a3fc3c' 87 | ] 88 | ) 89 | 90 | 91 | TURBO_SECOND_HALF = LinearSegmentedColormap.from_list( 92 | "turbo_second_half", 93 | ['#a3fc3c', '#a7fa3c', '#abf83c', '#aff63c', '#b3f43b', '#b6f33b', 94 | '#b9f13b', '#bcef3b', '#bfed3a', '#c2eb3a', '#c4ea3a', '#c6e83a', 95 | '#c9e639', '#cbe439', '#cde339', '#cfe138', '#d1df38', '#d3dd38', 96 | '#d4dc37', '#d6da37', '#d8d837', '#d9d736', '#dbd536', '#dcd336', 97 | '#ddd135', '#dfd035', '#e0ce35', '#e1cc34', '#e3cb34', '#e4c933', 98 | '#e5c733', '#e6c633', '#e7c432', '#e8c232', '#e9c132', '#eabf31', 99 | '#ebbd31', '#ebbc30', '#ecba30', '#edb830', '#eeb72f', '#eeb52f', 100 | '#efb32e', '#f0b22e', '#f0b02d', '#f1ae2d', '#f2ad2d', '#f2ab2c', 101 | '#f3aa2c', '#f3a82b', '#f4a62b', '#f4a52b', '#f5a32a', '#f5a12a', 102 | '#f6a029', '#f69e29', '#f79c28', '#f79a28', '#f79928', '#f89727', 103 | '#f89527', '#f89426', '#f99226', '#f99025', '#f98f25', '#f98d25', 104 | '#fa8b24', '#fa8a24', '#fa8823', '#fa8623', '#fa8423', '#fb8222', 105 | '#fb8122', '#fb7f21', '#fb7e21', '#fa7c20', '#f97b20', '#f97a1f', 106 | '#f8791f', '#f7781e', '#f7761e', '#f6751d', '#f5741d', '#f4731c', 107 | '#f4721c', '#f3711c', '#f26f1b', '#f16e1b', '#f16d1a', '#f06c1a', 108 | '#ef6b19', '#ee6a19', '#ed6919', '#ed6818', '#ec6618', '#eb6517', 109 | '#ea6417', '#e96317', '#e86216', '#e76116', '#e76016', '#e65f15', 110 | '#e55e15', '#e45d15', '#e35b14', '#e25a14', '#e15913', '#e05813', 111 | '#df5713', '#de5613', '#de5512', '#dd5412', '#dc5312', '#db5211', 112 | '#da5111', '#d95011', '#d84f10', '#d74e10', '#d64d10', '#d54c10', 113 | '#d44b0f', '#d34a0f', '#d2490f', '#d1480f', '#d0470e', '#cf460e', 114 | '#ce450e', '#cd440e', '#cc430d', '#cb420d', '#ca410d', '#c9400d', 115 | '#c83f0c', '#c73e0c', '#c53d0c', '#c43c0c', '#c33b0c', '#c23a0b', 116 | '#c1390b', '#c0380b', '#bf370b', '#be370b', '#bd360b', '#bc350a', 117 | '#bb340a', '#ba330a', '#b8320a', '#b7310a', '#b6300a', '#b52f09', 118 | '#b42e09', '#b32d09', '#b22d09', '#b12c09', '#af2b09', '#ae2a09', 119 | '#ad2909', '#ac2808', '#ab2708', '#aa2608', '#a92508', '#a72508', 120 | '#a62408', '#a52308', '#a42208', '#a32108', '#a22008', '#a01f08', 121 | '#9f1f07', '#9e1e07', '#9d1d07', '#9c1c07', '#9b1b07', '#991a07', 122 | '#981907', '#971907', '#961807', '#951707', '#931607', '#921506', 123 | '#911406', '#901306', '#8f1306', '#8d1206', '#8c1106', '#8b1006', 124 | '#8a0f06', '#890e05', '#870d05', '#860c05', '#850b05', '#840b05', 125 | '#830a04', '#810904', '#800804', '#7f0704', '#7e0604', '#7c0503', 126 | '#7b0503', '#7a0403'] 127 | ) 128 | 129 | 130 | def save_json_cmap(colormap_name: str, output_path: str) -> None: 131 | cmap = plt.get_cmap(colormap_name) 132 | pace = 0.015 133 | array = np.arange(0, 1+pace, pace) 134 | 135 | with open(output_path, "w") as file: 136 | file.write( 137 | ''' 138 | [ 139 | { 140 | "ColorSpace" : "Lab", 141 | "Creator" : "Matplotlib", 142 | "DefaultMap" : true, 143 | ''' 144 | ) 145 | file.write( 146 | f'"Name" : "{colormap_name}",' 147 | ) 148 | file.write( 149 | ''' 150 | "NanColor" : 151 | [ 152 | 0, 153 | 0, 154 | 0 155 | ], 156 | "RGBPoints" : 157 | [ 158 | ''' 159 | ) 160 | for i, e in enumerate(array): 161 | file.write( 162 | f"{e} ,\n" 163 | f"{cmap(e)[0]},\n" 164 | f"{cmap(e)[1]},\n" 165 | f"{cmap(e)[2]}" 166 | ) 167 | if i != array.shape[0]-1: 168 | file.write(",\n") 169 | 170 | file.write( 171 | """ 172 | ] 173 | } 174 | ] 175 | """ 176 | ) 177 | 178 | 179 | if __name__ == '__main__': 180 | import sys 181 | 182 | if len(sys.argv) < 3: 183 | print( 184 | f"usage: {sys.argv[0]}, cmap_name, output_path" 185 | ) 186 | sys.exit() 187 | save_json_cmap(sys.argv[1], sys.argv[2]) 188 | -------------------------------------------------------------------------------- /src/cdiutils/plot/quiver.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import matplotlib.pyplot as plt 3 | from scipy.interpolate import splev, splrep 4 | from scipy.ndimage import rotate 5 | 6 | from cdiutils.utils import ( 7 | normalize, size_up_support, crop_at_center, to_bool, nan_to_zero 8 | ) 9 | 10 | def plot_deviation( 11 | ax, 12 | x, 13 | y_pos, 14 | deviation, 15 | scale=1, 16 | arrow=False, 17 | attribute=None, 18 | reference_line=True, 19 | vmin=None, 20 | vmax=None, 21 | centered=True, 22 | cmap="jet", 23 | interpolate=2, 24 | linewidth=0.7, 25 | **kwargs 26 | ): 27 | colormap = plt.get_cmap(cmap) 28 | 29 | support = to_bool(deviation, nan_value=True) 30 | if reference_line: 31 | ax.plot( 32 | x, 33 | y_pos * support, 34 | c="grey", 35 | ls="--", 36 | lw=linewidth/2, 37 | ) 38 | 39 | support = np.repeat(support, interpolate) 40 | 41 | if np.isnan(deviation).any(): 42 | deviation = nan_to_zero(deviation) 43 | 44 | if interpolate: 45 | spl_deviation = splrep( 46 | x, 47 | deviation, 48 | # s=(x.shape[0]-np.sqrt(2*x.shape[0]), x.shape[0]+np.sqrt(2*x.shape[0])) 49 | ) 50 | if attribute is not None: 51 | spl_attribute = splrep(x, attribute) 52 | x = np.linspace(0, np.max(x), x.shape[0] * interpolate) 53 | deviation = splev(x, spl_deviation) 54 | if attribute is not None: 55 | attribute = splev(x, spl_attribute) 56 | 57 | y = support * deviation * scale + y_pos 58 | 59 | if vmin and vmax: 60 | if centered and (vmin < 0 and vmax >= 0): 61 | vmin, vmax = -np.max([-vmin, vmax]), np.max([-vmin, vmax]) 62 | if attribute is not None: 63 | normalised_attribute = (attribute - vmin) / (vmax - vmin) 64 | else: 65 | normalised_attribute = (deviation - vmin) / (vmax - vmin) 66 | else: 67 | normalised_attribute = normalize( 68 | data=attribute if attribute is not None else deviation, 69 | zero_centered=centered 70 | ) 71 | 72 | c = colormap(normalised_attribute) 73 | 74 | length = len(x) if type(x) is list else x.shape[0] 75 | 76 | for i in range(length-1): 77 | ax.plot( 78 | [x[i], x[i+1]], 79 | [y[i], y[i+1]], 80 | c=c[i], 81 | linewidth=linewidth, 82 | zorder=kwargs["zorder"] 83 | ) 84 | if arrow and i % interpolate == 0 and not np.isnan(support[i]): 85 | ax.quiver( 86 | x[i], 87 | y_pos, 88 | 0, 89 | deviation[i], 90 | color=c[i], 91 | scale=1/scale, 92 | scale_units="xy", 93 | angles="xy", 94 | **kwargs 95 | ) 96 | 97 | sm = plt.cm.ScalarMappable(cmap=colormap, norm=None) 98 | sm.set_array(attribute if attribute is not None else deviation) 99 | sm.set_clim(vmin, vmax) 100 | 101 | return ax, sm 102 | 103 | 104 | def quiver_plot( 105 | ax, 106 | support, 107 | disp, 108 | strain, 109 | target_shape, 110 | slice_pos, 111 | displacement_scale=1, 112 | min_max_strain=(-0.1, 0.1), 113 | min_max_disp=(-0.01, 0.01), 114 | background_cmap=plt.get_cmap("cet_CET_D13"), 115 | foreground_cmap=plt.get_cmap("cet_CET_D8"), 116 | crop_fit=[1, -1], 117 | rotation=0, 118 | flip=False, 119 | interpolate=1, 120 | linewidth=0.7, 121 | contour_linewidth=1, 122 | reference_line=True, 123 | return_colorbar=False, 124 | no_background=False, 125 | no_foreground=False, 126 | background_opacity=1, 127 | aspect_ratio="equal", 128 | **kwargs 129 | ): 130 | 131 | if rotation != 0: 132 | strain = rotate(strain, rotation, axes=(0, 2)) 133 | disp = rotate(disp, rotation, axes=(0, 2)) 134 | support = rotate(support, rotation, axes=(0, 2)) 135 | 136 | disp = crop_at_center(disp, final_shape=target_shape) 137 | strain = crop_at_center(strain, final_shape=target_shape) 138 | support = crop_at_center(support, final_shape=target_shape) 139 | 140 | # Flip the slice if needed 141 | if flip: 142 | disp = np.flip(disp, axis=2) 143 | strain = np.flip(strain, axis=2) 144 | support = np.flip(support, axis=2) 145 | 146 | # Make the last minor slice adjustement to have a tight layout plot 147 | disp = disp[..., crop_fit[0]: crop_fit[-1]] 148 | strain = strain[..., crop_fit[0]: crop_fit[-1]] 149 | support = support[..., crop_fit[0]: crop_fit[-1]] 150 | 151 | X, Y = np.meshgrid( 152 | np.arange(0, support.shape[2]), (np.arange(0, support.shape[1]))) 153 | 154 | ax.contour( 155 | X, 156 | Y, 157 | support[slice_pos, ...], 158 | levels=[0, .009], 159 | linewidths=contour_linewidth, 160 | colors="k", 161 | zorder=2.3 162 | ) 163 | if not no_background: 164 | # strain = strain * support 165 | background = ax.matshow( 166 | strain[slice_pos, ...], 167 | origin="lower", 168 | cmap=background_cmap, 169 | vmin=min_max_strain[0], 170 | vmax=min_max_strain[1], 171 | alpha=background_opacity, 172 | zorder=1, 173 | aspect=aspect_ratio 174 | ) 175 | else: 176 | background = None 177 | 178 | # sized_up_support = size_up_support(support) 179 | # ax.matshow( 180 | # np.where(sized_up_support == 1, np.nan, 0)[slice_pos, ...], 181 | # origin="lower", 182 | # cmap=matplotlib.colors.ListedColormap(['white']), 183 | # alpha=1, 184 | # zorder=2.2 185 | # ) 186 | if not no_foreground: 187 | for z in np.arange(0, disp.shape[1]): 188 | ax, sm = plot_deviation( 189 | ax, 190 | x=np.arange(0, disp.shape[2]), 191 | y_pos=z, 192 | deviation=disp[slice_pos, z, :], 193 | scale=displacement_scale, 194 | vmin=min_max_disp[0], 195 | vmax=min_max_disp[1], 196 | centered=True, 197 | cmap=foreground_cmap, 198 | arrow=True, 199 | reference_line=reference_line, 200 | interpolate=interpolate, 201 | linewidth=linewidth, 202 | zorder=2.1, 203 | **kwargs 204 | ) 205 | else: 206 | sm = None 207 | if not return_colorbar: 208 | return ax 209 | else: 210 | return ax, background, sm -------------------------------------------------------------------------------- /src/cdiutils/process/__init__.py: -------------------------------------------------------------------------------- 1 | from .phaser import PyNXPhaser, PhasingResultAnalyser 2 | from .postprocessor import PostProcessor 3 | 4 | 5 | __all__ = [ 6 | "PyNXPhaser", 7 | "PostProcessor", 8 | "PhasingResultAnalyser", 9 | ] 10 | -------------------------------------------------------------------------------- /src/cdiutils/scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/clatlan/cdiutils/761456037e7f9a056d962b251cc36c33954e135f/src/cdiutils/scripts/__init__.py -------------------------------------------------------------------------------- /src/cdiutils/scripts/prepare_bcdi_notebooks.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | This is a simple script to handle the creation of the Jupyter 5 | notebooks required for BCDI analysis using cdiutils package. 6 | """ 7 | 8 | import argparse 9 | import os 10 | import shutil 11 | from importlib import resources 12 | 13 | 14 | def get_templates_path(): 15 | """Locate the templates folder bundled with the package.""" 16 | return str(resources.files("cdiutils").joinpath("templates")) 17 | 18 | 19 | def main() -> None: 20 | helptext = "try -h or --help to see usage." 21 | 22 | parser = argparse.ArgumentParser( 23 | prog="prepare_bcdi_notebooks", 24 | description="Prepare the notebooks required for BCDI analysis.", 25 | ) 26 | parser.add_argument( 27 | "-p", "--path", 28 | type=str, 29 | help="The directory path where the notebooks will be created." 30 | ) 31 | parser.add_argument( 32 | "-f", "--force", 33 | default=False, 34 | action="store_true", 35 | help=( 36 | "Whether or not to force the creation of the files if they " 37 | "already exist." 38 | ) 39 | ) 40 | 41 | args = parser.parse_args() 42 | 43 | # Locate the examples directory 44 | templates_dir = get_templates_path() 45 | 46 | # Update paths to notebooks in the examples directory 47 | bcdi_notebook = os.path.join(templates_dir, "bcdi_pipeline.ipynb") 48 | step_by_step_notebook = os.path.join( 49 | templates_dir, "step_by_step_bcdi_analysis.ipynb" 50 | ) 51 | 52 | if ( 53 | not os.path.exists(bcdi_notebook) 54 | or not os.path.exists(step_by_step_notebook) 55 | ): 56 | raise FileNotFoundError( 57 | "Examples notebooks not found. " 58 | f"Expected location: {templates_dir}\n" + helptext 59 | ) 60 | 61 | path = os.getcwd() 62 | if args.path: 63 | path = args.path 64 | if not path.endswith("/"): 65 | path += "/" 66 | 67 | if not os.path.exists(os.path.dirname(path)): 68 | raise FileNotFoundError( 69 | f"Directory {path} does not exist.\n" + helptext 70 | ) 71 | 72 | files = { 73 | bcdi_notebook: os.path.join(path, os.path.basename(bcdi_notebook)), 74 | step_by_step_notebook: os.path.join( 75 | path, os.path.basename(step_by_step_notebook) 76 | ) 77 | } 78 | 79 | for source, dest in files.items(): 80 | if os.path.isfile(dest): 81 | if args.force: 82 | print( 83 | f"Force file creation requested, file '{dest}' " 84 | "will be overwritten." 85 | ) 86 | shutil.copy(source, dest) 87 | else: 88 | raise FileExistsError( 89 | f"File {dest} already exists. Rename the existing file or " 90 | "use -f or --force option to force creation." 91 | ) 92 | else: 93 | shutil.copy(source, dest) 94 | 95 | print( 96 | f"Notebooks copied to {path}.\n" 97 | "You can now run the notebooks using Jupyter Notebook or Jupyter Lab." 98 | ) 99 | 100 | 101 | if __name__ == "__main__": 102 | main() 103 | -------------------------------------------------------------------------------- /src/cdiutils/scripts/prepare_detector_calibration.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | 3 | """ 4 | This is a simple script to handle the creation of the Jupyter 5 | notebooks required for running the detector calibration using the cdiutils 6 | package. 7 | """ 8 | 9 | import argparse 10 | import os 11 | import shutil 12 | from importlib import resources 13 | 14 | 15 | def get_templates_path(): 16 | """Locate the templates folder bundled with the package.""" 17 | return str(resources.files("cdiutils").joinpath("templates")) 18 | 19 | 20 | def main() -> None: 21 | helptext = "try -h or --help to see usage." 22 | 23 | parser = argparse.ArgumentParser( 24 | prog="prepare_detector_calibration", 25 | description=( 26 | "Prepare the notebooks required for the detector calibration." 27 | ), 28 | ) 29 | parser.add_argument( 30 | "-p", "--path", 31 | type=str, 32 | help="The directory path where the notebooks will be created." 33 | ) 34 | parser.add_argument( 35 | "-f", "--force", 36 | default=False, 37 | action="store_true", 38 | help=( 39 | "Whether or not to force the creation of the files if they " 40 | "already exist." 41 | ) 42 | ) 43 | 44 | args = parser.parse_args() 45 | 46 | # Locate the examples directory 47 | templates_dir = get_templates_path() 48 | 49 | # Path to the notebook in the examples directory 50 | notebook_path = os.path.join(templates_dir, "detector_calibration.ipynb") 51 | 52 | if not os.path.exists(notebook_path): 53 | raise FileNotFoundError( 54 | f"Notebook not found. Expected location: {notebook_path}\n" 55 | + helptext 56 | ) 57 | 58 | path = os.getcwd() 59 | if args.path: 60 | path = args.path 61 | if not path.endswith("/"): 62 | path += "/" 63 | 64 | if not os.path.exists(os.path.dirname(path)): 65 | raise FileNotFoundError( 66 | f"Directory {path} does not exist.\n" + helptext 67 | ) 68 | 69 | dest = os.path.join(path, os.path.basename(notebook_path)) 70 | 71 | if os.path.isfile(dest): 72 | if args.force: 73 | print( 74 | f"Force file creation requested, file '{dest}' " 75 | "will be overwritten." 76 | ) 77 | shutil.copy(notebook_path, dest) 78 | else: 79 | raise FileExistsError( 80 | f"File {dest} already exists. Rename the existing file or " 81 | "use -f or --force option to force creation." 82 | ) 83 | else: 84 | shutil.copy(notebook_path, dest) 85 | 86 | print( 87 | f"Notebook copied to {path}.\n" 88 | "You can now run the notebook using Jupyter Notebook or Jupyter Lab." 89 | ) 90 | 91 | 92 | if __name__ == "__main__": 93 | main() 94 | -------------------------------------------------------------------------------- /src/cdiutils/templates/bcdi_pipeline.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# 🏭 **BCDI Pipeline** \n", 8 | "### A Notebook to Run the `BcdiPipeline` Instance \n", 9 | "\n", 10 | "This notebook provides a structured workflow for running a **Bragg Coherent Diffraction Imaging (BCDI) pipeline**. \n", 11 | "\n", 12 | "The `BcdiPipeline` class handles the entire process, including: \n", 13 | "- **Pre-processing** → Data preparation and corrections. \n", 14 | "- **Phase retrieval** → Running PyNX algorithms to reconstruct the phase. \n", 15 | "- **Post-processing** → Refining, analysing (get the strain!), and visualising results. \n", 16 | "\n", 17 | "You can provide **either**: \n", 18 | "- A **YAML parameter file** for full automation. \n", 19 | "- A **Python dictionary** for interactive control in this notebook. \n" 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": null, 25 | "metadata": { 26 | "tags": [] 27 | }, 28 | "outputs": [], 29 | "source": [ 30 | "# import required packages\n", 31 | "import os\n", 32 | "import cdiutils # core library for BCDI processing" 33 | ] 34 | }, 35 | { 36 | "cell_type": "markdown", 37 | "metadata": {}, 38 | "source": [ 39 | "## ⚙️ **General Parameters**\n", 40 | "Here, define the key parameters for **accessing and saving data** before running the pipeline. \n", 41 | "- **These parameters must be set manually by the user** before execution. \n", 42 | "- The output data will be saved in a structured directory format based on `sample_name` and `scan`. However, you can change the directory path if you like.\n" 43 | ] 44 | }, 45 | { 46 | "cell_type": "code", 47 | "execution_count": null, 48 | "metadata": {}, 49 | "outputs": [], 50 | "source": [ 51 | "# define the key parameters (must be filled in by the user)\n", 52 | "beamline_setup: str = \"\" # example: \"ID01\" (provide the beamline setup)\n", 53 | "experiment_file_path: str = \"\" # example: \"/path/to/experiment/file.h5\"\n", 54 | "sample_name: str = \"\" # example: \"Sample_Pt\" (specify the sample name)\n", 55 | "scan: int = 0 # example: 42 (specify the scan number)\n", 56 | "\n", 57 | "# choose where to save the results (default: current working directory)\n", 58 | "dump_dir = os.getcwd() + f\"/results/{sample_name}/S{scan}/\"\n", 59 | "\n", 60 | "# load the parameters and parse them into the BcdiPipeline class instance\n", 61 | "params = cdiutils.pipeline.get_params_from_variables(dir(), globals())\n", 62 | "bcdi_pipeline = cdiutils.BcdiPipeline(params=params)" 63 | ] 64 | }, 65 | { 66 | "cell_type": "markdown", 67 | "metadata": {}, 68 | "source": [ 69 | "## 🔹**Pre-Processing** \n", 70 | "\n", 71 | "If you need to update specific parameters, you can **pass them directly** into the `preprocess` method. \n", 72 | "\n", 73 | "### **Main Parameters**\n", 74 | "- `preprocess_shape` → The shape of the cropped window used throughout the processes. \n", 75 | " - Can be a **tuple of 2 or 3 values**. \n", 76 | " - If only **2 values**, the entire rocking curve is used. \n", 77 | "\n", 78 | "- `voxel_reference_methods` → A `list` (or a single value) defining how to centre the data. \n", 79 | " - Can include `\"com\"`, `\"max\"`, or a `tuple` of `int` (specific voxel position). \n", 80 | " - Example:\n", 81 | " ```python\n", 82 | " voxel_reference_methods = [(70, 200, 200), \"com\", \"com\"]\n", 83 | " ```\n", 84 | " - This centres a box of size `preprocess_shape` around `(70, 200, 200)`, then iteratively refines it using `\"com\"` (only computed within this box).\n", 85 | " - Useful when `\"com\"` fails due to artifacts or `\"max\"` fails due to hot pixels. \n", 86 | " - Default: `[\"max\", \"com\", \"com\"]`. \n", 87 | "\n", 88 | "- `rocking_angle_binning` → If you want to bin in the **rocking curve direction**, provide a binning factor (ex.: `2`). \n", 89 | "\n", 90 | "- `light_loading` → If `True`, loads only the **ROI of the data** based on `voxel_reference_methods` and `preprocess_output_shape`. \n", 91 | "\n", 92 | "- `hot_pixel_filter` → Removes isolated hot pixels. \n", 93 | " - Default: `False`. \n", 94 | "\n", 95 | "- `background_level` → Sets the background intensity to be removed. \n", 96 | " - Example: `3`. \n", 97 | " - Default: `None`. \n", 98 | "\n", 99 | "- `hkl` → Defines the **Bragg reflection** measured to extend *d*-spacing values to the lattice parameter. \n", 100 | " - Default: `[1, 1, 1]`. \n" 101 | ] 102 | }, 103 | { 104 | "cell_type": "code", 105 | "execution_count": null, 106 | "metadata": {}, 107 | "outputs": [], 108 | "source": [ 109 | "bcdi_pipeline.preprocess(\n", 110 | " preprocess_shape=(150, 150), # define cropped window size\n", 111 | " voxel_reference_methods=[\"max\", \"com\", \"com\"], # centring method sequence\n", 112 | " hot_pixel_filter=False, # remove isolated hot pixels\n", 113 | " background_level=None, # background intensity level to remove\n", 114 | ")" 115 | ] 116 | }, 117 | { 118 | "cell_type": "markdown", 119 | "metadata": {}, 120 | "source": [ 121 | "## 🔹 **[PyNX](https://pynx.esrf.fr/en/latest/index.html) Phase Retrieval**\n", 122 | "See the [pynx.cdi](https://pynx.esrf.fr/en/latest/scripts/pynx-cdi-id01.html) documentation for details on the phasing algorithms used here. \n", 123 | "\n", 124 | "**Algorithm recipe**\n", 125 | "\n", 126 | "You can either: \n", 127 | "- provide the exact chain of algorithms. \n", 128 | "- or specify the number of iterations for **RAAR**, **HIO**, and **ER**. \n", 129 | "\n", 130 | "```python\n", 131 | "algorithm = None # ex: \"(Sup * (ER**20)) ** 10, (Sup*(HIO**20)) ** 15, (Sup*(RAAR**20)) ** 25\"\n", 132 | "nb_raar = 500\n", 133 | "nb_hio = 300\n", 134 | "nb_er = 200\n", 135 | "```\n", 136 | "**Support-related parameters**\n", 137 | "```python\n", 138 | "support = \"auto\" # ex: bcdi_pipeline.pynx_phasing_dir + \"support.cxi\" (path to an existing support)\n", 139 | "support_threshold = \"0.15, 0.40\" # must be a string\n", 140 | "support_update_period = 20\n", 141 | "support_only_shrink = False\n", 142 | "support_post_expand = None # ex: \"-1,1\" or \"-1,2,-1\"\n", 143 | "support_update_border_n = None\n", 144 | "support_smooth_width_begin = 2\n", 145 | "support_smooth_width_end = 0.5\n", 146 | "```\n", 147 | "**Other parameters**\n", 148 | "```python\n", 149 | "positivity = False\n", 150 | "beta = 0.9 # β parameter in HIO and RAAR\n", 151 | "detwin = True\n", 152 | "rebin = \"1, 1, 1\" # must be a string\n", 153 | "```\n", 154 | "**Number of Runs & Reconstructions to Keep**\n", 155 | "```python\n", 156 | "nb_run = 20 # total number of runs\n", 157 | "nb_run_keep = 10 # number of reconstructions to keep\n", 158 | "```\n", 159 | "\n", 160 | "**Override defaults in `phase_retrieval`**\n", 161 | "\n", 162 | "You can override any default parameter directly in the phase_retrieval method:\n", 163 | "```python\n", 164 | "bcdi_pipeline.phase_retrieval(nb_run=50, nb_run_keep=25)\n", 165 | "```\n", 166 | "If a parameter is not provided, the default value is used." 167 | ] 168 | }, 169 | { 170 | "cell_type": "code", 171 | "execution_count": null, 172 | "metadata": {}, 173 | "outputs": [], 174 | "source": [ 175 | "bcdi_pipeline.phase_retrieval(\n", 176 | " clear_former_results=True,\n", 177 | " nb_run=20,\n", 178 | " nb_run_keep=10,\n", 179 | " # support=bcdi_pipeline.pynx_phasing_dir + \"support.cxi\"\n", 180 | ")" 181 | ] 182 | }, 183 | { 184 | "cell_type": "markdown", 185 | "metadata": {}, 186 | "source": [ 187 | "### 🔹 **Analyse the phasing results**\n", 188 | "\n", 189 | "This step evaluates the quality of the phase retrieval results by sorting reconstructions based on a `sorting_criterion`. \n", 190 | "\n", 191 | "##### **Available Sorting Criteria**\n", 192 | "- `\"mean_to_max\"` → Difference between the mean of the **Gaussian fit of the amplitude histogram** and its maximum value. A **smaller difference** indicates a more homogeneous reconstruction. \n", 193 | "- `\"sharpness\"` → Sum of the amplitude within the support raised to the power of 4. **Lower values** indicate greater homogeneity. \n", 194 | "- `\"std\"` → **Standard deviation** of the amplitude. \n", 195 | "- `\"llk\"` → **Log-likelihood** of the reconstruction. \n", 196 | "- `\"llkf\"` → **Free log-likelihood** of the reconstruction. \n" 197 | ] 198 | }, 199 | { 200 | "cell_type": "code", 201 | "execution_count": null, 202 | "metadata": {}, 203 | "outputs": [], 204 | "source": [ 205 | "bcdi_pipeline.analyse_phasing_results(\n", 206 | " sorting_criterion=\"mean_to_max\", # selects the sorting method\n", 207 | " \n", 208 | " # Optional parameters\n", 209 | " # plot_phasing_results=False, # uncomment to disable plotting\n", 210 | " # plot_phase=True, # uncomment to enable phase plotting\n", 211 | ")" 212 | ] 213 | }, 214 | { 215 | "cell_type": "markdown", 216 | "metadata": {}, 217 | "source": [ 218 | "### 🔹 Optionally, generate a support for further phasing attempts \n", 219 | "\n", 220 | "##### **Parameters**\n", 221 | "- `run` → set to either: \n", 222 | " - `\"best\"` to use the best reconstruction. \n", 223 | " - an **integer** corresponding to the specific run you want. \n", 224 | "- `output_path` → the location to save the generated support. By default, it will be saved in the `pynx_phasing` folder. \n", 225 | "- `fill` → whether to fill the support if it contains holes. \n", 226 | " - Default: `False`.\n", 227 | "- `verbose` → whether to print logs and display a plot of the support. \n", 228 | "\n", 229 | "" 232 | ] 233 | }, 234 | { 235 | "cell_type": "code", 236 | "execution_count": null, 237 | "metadata": {}, 238 | "outputs": [], 239 | "source": [ 240 | "# bcdi_pipeline.generate_support_from(\"best\", fill=False) # uncomment to generate a support" 241 | ] 242 | }, 243 | { 244 | "cell_type": "markdown", 245 | "metadata": {}, 246 | "source": [ 247 | "### 🔹Selection of the best reconstructions & mode decomposition \n", 248 | "\n", 249 | "You can select the best reconstructions based on a **sorting criterion** and keep a specified number of top candidates. \n", 250 | "\n", 251 | "##### **Parameters**\n", 252 | "- `nb_of_best_sorted_runs` → the number of best reconstructions to keep, selected based on the `sorting_criterion` used in the `analyse_phasing_results` method above. \n", 253 | "- `best_runs` → instead of selecting based on sorting, you can manually specify a list of reconstruction numbers.\n", 254 | "\n", 255 | "By default, the **best reconstructions** are automatically selected. \n", 256 | "\n", 257 | "Once the best candidates are chosen, `mode_decomposition` analyses them to extract dominant features. \n", 258 | "\n" 259 | ] 260 | }, 261 | { 262 | "cell_type": "code", 263 | "execution_count": null, 264 | "metadata": {}, 265 | "outputs": [], 266 | "source": [ 267 | "# define how many of the best candidates to keep\n", 268 | "number_of_best_candidates: int = 5 \n", 269 | "\n", 270 | "# select the best reconstructions based on the sorting criterion\n", 271 | "bcdi_pipeline.select_best_candidates(\n", 272 | " nb_of_best_sorted_runs=number_of_best_candidates\n", 273 | " # best_runs=[10] # uncomment to manually select a specific run\n", 274 | ")\n", 275 | "\n", 276 | "# perform mode decomposition on the selected reconstructions\n", 277 | "bcdi_pipeline.mode_decomposition()\n" 278 | ] 279 | }, 280 | { 281 | "cell_type": "markdown", 282 | "metadata": {}, 283 | "source": [ 284 | "## 🔹**Post-processing**\n", 285 | "\n", 286 | "This stage includes several key operations: \n", 287 | "- **orthogonalisation** of the reconstructed data. \n", 288 | "- **phase manipulation**: \n", 289 | " - phase unwrapping \n", 290 | " - phase ramp removal \n", 291 | "- **computation of physical properties**: \n", 292 | " - displacement field \n", 293 | " - strain \n", 294 | " - d-spacing \n", 295 | "- **visualisation**: Generate multiple plots for analysis. \n" 296 | ] 297 | }, 298 | { 299 | "cell_type": "code", 300 | "execution_count": null, 301 | "metadata": {}, 302 | "outputs": [], 303 | "source": [ 304 | "bcdi_pipeline.postprocess(\n", 305 | " isosurface=0.3, # threshold for isosurface\n", 306 | " voxel_size=None, # use default voxel size if not provided\n", 307 | " flip=False # whether to flip the reconstruction if you got the twin image (enantiomorph)\n", 308 | ")" 309 | ] 310 | }, 311 | { 312 | "cell_type": "markdown", 313 | "metadata": {}, 314 | "source": [ 315 | "## 🔹 Facet analysis: detection & indexing *(coming soon...)* " 316 | ] 317 | }, 318 | { 319 | "cell_type": "markdown", 320 | "metadata": {}, 321 | "source": [ 322 | "## 💡 Feedback & Issue Reporting \n", 323 | "\n", 324 | "If you have **comments, suggestions, or encounter any issues**, please reach out: \n", 325 | "\n", 326 | "📧 **Email:** [clement.atlan@esrf.fr](mailto:clement.atlan@esrf.fr?subject=cdiutils) \n", 327 | "🐙 **GitHub Issues:** [Report an issue](https://github.com/clatlan/cdiutils/issues) \n" 328 | ] 329 | }, 330 | { 331 | "cell_type": "markdown", 332 | "metadata": {}, 333 | "source": [ 334 | "## Credits\n", 335 | "This notebook was created by Clément Atlan, ESRF, 2025. It is part of the `cdiutils` package, which provides tools for BCDI data analysis and visualisation.\n", 336 | "If you have used this notebook or the `cdiutils` package in your research, please consider citing the package https://github.com/clatlan/cdiutils/\n", 337 | "You'll find the citation information in the `cdiutils` package documentation.\n", 338 | "\n", 339 | "```bibtex\n", 340 | "@software{Atlan_Cdiutils_A_python,\n", 341 | "author = {Atlan, Clement},\n", 342 | "doi = {10.5281/zenodo.7656853},\n", 343 | "license = {MIT},\n", 344 | "title = {{Cdiutils: A python package for Bragg Coherent Diffraction Imaging processing, analysis and visualisation workflows}},\n", 345 | "url = {https://github.com/clatlan/cdiutils},\n", 346 | "version = {0.2.0}\n", 347 | "}\n", 348 | "```\n", 349 | "\n" 350 | ] 351 | } 352 | ], 353 | "metadata": { 354 | "kernelspec": { 355 | "display_name": "Python 3", 356 | "language": "python", 357 | "name": "python3" 358 | }, 359 | "language_info": { 360 | "codemirror_mode": { 361 | "name": "ipython", 362 | "version": 3 363 | }, 364 | "file_extension": ".py", 365 | "mimetype": "text/x-python", 366 | "name": "python", 367 | "nbconvert_exporter": "python", 368 | "pygments_lexer": "ipython3", 369 | "version": "3.10.14" 370 | } 371 | }, 372 | "nbformat": 4, 373 | "nbformat_minor": 4 374 | } 375 | -------------------------------------------------------------------------------- /src/cdiutils/templates/detector_calibration.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "code", 5 | "execution_count": null, 6 | "metadata": {}, 7 | "outputs": [], 8 | "source": [ 9 | "import cdiutils" 10 | ] 11 | }, 12 | { 13 | "cell_type": "code", 14 | "execution_count": null, 15 | "metadata": {}, 16 | "outputs": [], 17 | "source": [ 18 | "experiment_file_path =\n", 19 | "\n", 20 | "\n", 21 | "energy =\n", 22 | "\n", 23 | "sample_name =\n", 24 | "\n", 25 | "loader = cdiutils.io.ID01Loader(\n", 26 | " experiment_file_path=experiment_file_path,\n", 27 | " sample_name=sample_name,\n", 28 | ")" 29 | ] 30 | }, 31 | { 32 | "cell_type": "code", 33 | "execution_count": null, 34 | "metadata": {}, 35 | "outputs": [], 36 | "source": [ 37 | "scan =\n", 38 | "det_calib_frames = loader.load_detector_data(scan=scan)\n", 39 | "angles = loader.load_motor_positions(scan=scan)" 40 | ] 41 | }, 42 | { 43 | "cell_type": "code", 44 | "execution_count": null, 45 | "metadata": {}, 46 | "outputs": [], 47 | "source": [ 48 | "geometry = cdiutils.Geometry.from_setup(beamline_setup=)\n", 49 | "geometry.cxi_to_xu() # change to XU convention\n", 50 | "print(geometry) # to check out the geometry" 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "det_calib_params = cdiutils.SpaceConverter.run_detector_calibration(\n", 60 | " det_calib_frames,\n", 61 | " detector_outofplane_angle=angles[\"detector_outofplane_angle\"],\n", 62 | " detector_inplane_angle=angles[\"detector_inplane_angle\"],\n", 63 | " xu_detector_circles=geometry.detector_circles,\n", 64 | " energy=energy,\n", 65 | " sdd_estimate=None\n", 66 | ")" 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": [ 75 | "\n", 76 | "print(\n", 77 | " \"det_calib_params = {\"\n", 78 | ")\n", 79 | "\n", 80 | "for k, v in det_calib_params.items():\n", 81 | " print(\n", 82 | " f'\\t\"{k}\": {v},'\n", 83 | " )\n", 84 | "print(\"}\")" 85 | ] 86 | } 87 | ], 88 | "metadata": { 89 | "kernelspec": { 90 | "display_name": "Python 3", 91 | "language": "python", 92 | "name": "python3" 93 | }, 94 | "language_info": { 95 | "codemirror_mode": { 96 | "name": "ipython", 97 | "version": 3 98 | }, 99 | "file_extension": ".py", 100 | "mimetype": "text/x-python", 101 | "name": "python", 102 | "nbconvert_exporter": "python", 103 | "pygments_lexer": "ipython3", 104 | "version": "3.10.14" 105 | }, 106 | "orig_nbformat": 4, 107 | "vscode": { 108 | "interpreter": { 109 | "hash": "f8c9f7f8d10f9447c17de65ed45babc999209e89aee4cc6fcc55aaea8612438e" 110 | } 111 | } 112 | }, 113 | "nbformat": 4, 114 | "nbformat_minor": 2 115 | } 116 | --------------------------------------------------------------------------------