├── .github ├── CONTRIBUTING.md ├── ISSUE_TEMPLATE │ ├── bug_report.yml │ ├── documentation.yml │ └── feature_request.yml ├── actions │ └── setup-venv │ │ └── action.yml ├── dependabot.yml ├── pull_request_template.md ├── release-drafter.yml └── workflows │ ├── changelog.yml │ ├── main.yml │ ├── pr_checks.yml │ └── release-drafter.yml ├── .gitignore ├── .pre-commit-config.yaml ├── .readthedocs.yaml ├── CHANGES.rst ├── CITATION.cff ├── CODE_OF_CONDUCT.md ├── LICENSE ├── MANIFEST.in ├── Makefile ├── README.md ├── RELEASE_PROCESS.md ├── dev ├── README.md ├── codespell_ignore_words.txt ├── fermipy_crab_zipped.zip └── lst1_crab_dl4.zip ├── docs ├── .gitignore ├── Makefile ├── changes │ ├── README.md │ └── template.rst ├── make.bat └── source │ ├── CODE_OF_CONDUCT.md │ ├── CONTRIBUTING.md │ ├── _api_docs │ ├── analysis.rst │ ├── analysis │ │ ├── analysis_analysis.rst │ │ ├── analysis_step.rst │ │ └── analysis_step_base.rst │ ├── base.rst │ ├── base │ │ ├── base_base.rst │ │ ├── base_geom.rst │ │ └── base_reduction.rst │ ├── config.rst │ ├── config │ │ ├── generator.rst │ │ └── operations.rst │ ├── data.rst │ ├── data │ │ ├── data_1d.rst │ │ ├── data_3d.rst │ │ ├── data_dl4.rst │ │ ├── data_target.rst │ │ └── target │ │ │ ├── data_target_b.rst │ │ │ └── data_target_f.rst │ ├── gammapy.rst │ ├── gammapy │ │ ├── interop_model.rst │ │ └── read_models.rst │ ├── index.rst │ ├── io.rst │ ├── io │ │ ├── input_dl3.rst │ │ └── io_dl4.rst │ ├── stats.rst │ └── stats │ │ ├── stats.rst │ │ └── utils.rst │ ├── _static │ ├── SED_mult_instr_Crab.png │ ├── asgardpy_target_params_renaming.png │ ├── asgardpy_workflow.png │ ├── css │ │ └── custom.css │ └── favicon.ico │ ├── _templates │ ├── template.yaml │ └── template_model.yaml │ ├── additional_dataset_input.rst │ ├── additional_model_input.rst │ ├── additional_stats.rst │ ├── changelog.rst │ ├── conf.py │ ├── crab_sed.rst │ ├── index.rst │ ├── installation.md │ ├── need_for_asgardpy.rst │ └── overview.rst ├── environment.yml ├── environment_0.4.4.yml ├── notebooks ├── SED_comparison_joint_fit_vs_individual_fit.ipynb ├── get_preferred_spectral_model.ipynb ├── test_dataset_1d_step.ipynb ├── test_dataset_3d_step.ipynb ├── test_dl4_steps.ipynb ├── test_explore_EBL.ipynb ├── test_filtering_data_models.ipynb └── test_models.ipynb ├── pyproject.toml ├── scripts ├── check_preferred_model.py ├── download_asgardpy_data.sh └── run_asgardpy_full.py ├── src └── asgardpy │ ├── __init__.py │ ├── _dev_version │ └── __init__.py │ ├── analysis │ ├── __init__.py │ ├── analysis.py │ ├── step.py │ ├── step_base.py │ └── tests │ │ ├── __init__.py │ │ └── test_analysis_steps.py │ ├── base │ ├── __init__.py │ ├── base.py │ ├── geom.py │ └── reduction.py │ ├── config │ ├── __init__.py │ ├── generator.py │ ├── model_templates │ │ ├── model_template_bpl.yaml │ │ ├── model_template_bpl2.yaml │ │ ├── model_template_eclp.yaml │ │ ├── model_template_ecpl-3fgl.yaml │ │ ├── model_template_ecpl.yaml │ │ ├── model_template_ecpl2.yaml │ │ ├── model_template_fov.yaml │ │ ├── model_template_lp.yaml │ │ ├── model_template_pl.yaml │ │ ├── model_template_pl_ebl.yaml │ │ ├── model_template_sbpl.yaml │ │ └── model_template_secpl.yaml │ ├── operations.py │ ├── template.yaml │ ├── template_model.yaml │ └── tests │ │ ├── __init__.py │ │ └── test_config.py │ ├── conftest.py │ ├── data │ ├── __init__.py │ ├── dataset_1d.py │ ├── dataset_3d.py │ ├── dl4.py │ ├── target.py │ └── tests │ │ ├── __init__.py │ │ ├── test_catalog.py │ │ ├── test_dataset1d.py │ │ ├── test_dataset3d.py │ │ ├── test_gpy_mwl.py │ │ └── test_target.py │ ├── gammapy │ ├── __init__.py │ ├── interoperate_models.py │ ├── read_models.py │ └── tests │ │ ├── __init__.py │ │ └── test_xml_model.py │ ├── io │ ├── __init__.py │ ├── input_dl3.py │ ├── io_dl4.py │ └── tests │ │ ├── __init__.py │ │ └── test_io.py │ ├── py.typed │ ├── stats │ ├── __init__.py │ ├── stats.py │ ├── tests │ │ ├── __init__.py │ │ ├── test_model_pref.py │ │ └── test_pivot_energy.py │ └── utils.py │ ├── tests │ ├── __init__.py │ ├── config_crab_mult_instruments.yaml │ ├── config_gpy_mwl.yaml │ ├── config_hawc.yaml │ ├── config_test_base.yaml │ ├── config_test_ebl.yaml │ └── config_test_gadf.yaml │ └── version.py └── tox.ini /.github/ISSUE_TEMPLATE/bug_report.yml: -------------------------------------------------------------------------------- 1 | name: 🐛 Bug Report 2 | description: Create a report to help us reproduce and fix the bug 3 | labels: 'bug' 4 | 5 | body: 6 | - type: markdown 7 | attributes: 8 | value: > 9 | #### Before submitting a bug, please make sure the issue hasn't been already addressed by searching through [the existing and past issues](https://github.com/mireianievas/asgardpy/issues?q=is%3Aissue+sort%3Acreated-desc+). 10 | - type: textarea 11 | attributes: 12 | label: 🐛 Describe the bug 13 | description: | 14 | Please provide a clear and concise description of what the bug is. 15 | 16 | If relevant, add a minimal example so that we can reproduce the error by running the code. It is very important for the snippet to be as succinct (minimal) as possible, so please take time to trim down any irrelevant code to help us debug efficiently. We are going to copy-paste your code and we expect to get the same result as you did: avoid any external data, and include the relevant imports, etc. For example: 17 | 18 | ```python 19 | # All necessary imports at the beginning 20 | import asgardpy 21 | 22 | # A succinct reproducing example trimmed down to the essential parts: 23 | assert False is True, "Oh no!" 24 | ``` 25 | 26 | If the code is too long (hopefully, it isn't), feel free to put it in a public gist and link it in the issue: https://gist.github.com. 27 | 28 | Please also paste or describe the results you observe instead of the expected results. If you observe an error, please paste the error message including the **full** traceback of the exception. It may be relevant to wrap error messages in ```` ```triple quotes blocks``` ````. 29 | placeholder: | 30 | A clear and concise description of what the bug is. 31 | validations: 32 | required: true 33 | - type: textarea 34 | attributes: 35 | label: Versions 36 | description: | 37 | Please run the following and paste the output below. 38 | ```sh 39 | python --version && pip freeze 40 | ``` 41 | validations: 42 | required: true 43 | - type: markdown 44 | attributes: 45 | value: > 46 | Thanks for contributing 🎉! 47 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/documentation.yml: -------------------------------------------------------------------------------- 1 | name: 📚 Documentation 2 | description: Report an issue related to https://asgardpy.readthedocs.io/latest 3 | labels: 'documentation' 4 | 5 | body: 6 | - type: textarea 7 | attributes: 8 | label: 📚 The doc issue 9 | description: > 10 | A clear and concise description of what content in https://asgardpy.readthedocs.io/latest is an issue. 11 | validations: 12 | required: true 13 | - type: textarea 14 | attributes: 15 | label: Suggest a potential alternative/fix 16 | description: > 17 | Tell us how we could improve the documentation in this regard. 18 | - type: markdown 19 | attributes: 20 | value: > 21 | Thanks for contributing 🎉! 22 | -------------------------------------------------------------------------------- /.github/ISSUE_TEMPLATE/feature_request.yml: -------------------------------------------------------------------------------- 1 | name: 🚀 Feature request 2 | description: Submit a proposal/request for a new feature 3 | labels: 'feature request' 4 | 5 | body: 6 | - type: textarea 7 | attributes: 8 | label: 🚀 The feature, motivation and pitch 9 | description: > 10 | A clear and concise description of the feature proposal. Please outline the motivation for the proposal. Is your feature request related to a specific problem? e.g., *"I'm working on X and would like Y to be possible"*. If this is related to another GitHub issue, please link here too. 11 | validations: 12 | required: true 13 | - type: textarea 14 | attributes: 15 | label: Alternatives 16 | description: > 17 | A description of any alternative solutions or features you've considered, if any. 18 | - type: textarea 19 | attributes: 20 | label: Additional context 21 | description: > 22 | Add any other context or screenshots about the feature request. 23 | - type: markdown 24 | attributes: 25 | value: > 26 | Thanks for contributing 🎉! 27 | -------------------------------------------------------------------------------- /.github/actions/setup-venv/action.yml: -------------------------------------------------------------------------------- 1 | name: Python virtualenv 2 | description: Set up a Python virtual environment with caching 3 | inputs: 4 | python-version: 5 | description: The Python version to use 6 | required: true 7 | # cache-prefix: 8 | # description: Update this to invalidate the cache 9 | # required: true 10 | # default: v4 11 | runs: 12 | using: composite 13 | steps: 14 | - name: Setup Python 15 | uses: actions/setup-python@v5 16 | with: 17 | python-version: ${{ inputs.python-version }} 18 | 19 | - shell: ${{ runner.shell }} 20 | run: | 21 | # Install prerequisites. 22 | python -m pip install tox 23 | 24 | - shell: ${{ runner.shell }} 25 | run: | 26 | # Get the exact Python version to use in the cache key. 27 | echo "PYTHON_VERSION=$(python --version)" >> $GITHUB_ENV 28 | 29 | - uses: actions/cache@v4 30 | id: virtualenv-cache 31 | with: 32 | path: .venv 33 | enableCrossOsArchive: true 34 | key: ${{ runner.os }}-${{ env.PYTHON_VERSION }}-${{ hashFiles('pyproject.toml') }} # ${{ inputs.cache-prefix }}- 35 | restore-keys: | 36 | ${{ runner.os }}-pip- 37 | 38 | - if: steps.virtualenv-cache.outputs.cache-hit != 'true' 39 | shell: ${{ runner.shell }} 40 | run: | 41 | # Set up virtual environment without cache hit. 42 | test -d .venv || virtualenv -p $(which python) --copies --reset-app-data .venv 43 | echo "${{ runner.venv_activate_command }}" 44 | python -m pip install -e .[dev] 45 | 46 | - if: steps.virtualenv-cache.outputs.cache-hit == 'true' 47 | shell: ${{ runner.shell }} 48 | run: | 49 | # Set up virtual environment from cache hit. 50 | echo "${{ runner.venv_activate_command }}" 51 | python -m pip install --no-deps -e .[dev] 52 | 53 | - shell: ${{ runner.shell }} 54 | run: | 55 | # Show environment info. 56 | echo "${{ runner.venv_activate_command }}" 57 | echo "✓ Installed $(python --version) virtual environment to $(which python)" 58 | echo "Packages:" 59 | python -m pip freeze 60 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | version: 2 2 | updates: 3 | - package-ecosystem: "pip" 4 | directory: "/" 5 | schedule: 6 | interval: "daily" 7 | open-pull-requests-limit: 10 8 | labels: 9 | - "dependencies" 10 | - "github_actions" 11 | - "no-changelog-needed" 12 | - package-ecosystem: "github-actions" 13 | directory: "/" 14 | labels: 15 | - "dependencies" 16 | - "github_actions" 17 | - "no-changelog-needed" 18 | schedule: 19 | interval: "daily" 20 | groups: 21 | actions: 22 | patterns: 23 | - "*" 24 | -------------------------------------------------------------------------------- /.github/pull_request_template.md: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Fixes # 5 | 6 | Changes proposed in this pull request: 7 | 8 | - 9 | 10 | ## Before submitting 11 | 12 | 13 | - [ ] I've read and followed all steps in the [Making a pull request](https://github.com/allenai/beaker-py/blob/main/.github/CONTRIBUTING.md#making-a-pull-request) 14 | section of the `CONTRIBUTING` docs. 15 | - [ ] I've updated or added any relevant docstrings following the syntax described in the 16 | [Writing docstrings](https://github.com/allenai/beaker-py/blob/main/.github/CONTRIBUTING.md#writing-docstrings) section of the `CONTRIBUTING` docs. 17 | - [ ] If this PR fixes a bug, I've added a test that will fail without my fix. 18 | - [ ] If this PR adds a new feature, I've added tests that sufficiently cover my new functionality. 19 | -------------------------------------------------------------------------------- /.github/release-drafter.yml: -------------------------------------------------------------------------------- 1 | template: | 2 | ## What’s Changed 3 | 4 | $CHANGES 5 | 6 | ## Contributers 7 | 8 | $CONTRIBUTORS 9 | -------------------------------------------------------------------------------- /.github/workflows/changelog.yml: -------------------------------------------------------------------------------- 1 | name: Changelog 2 | 3 | on: 4 | pull_request: 5 | # should also be re-run when changing labels 6 | types: [opened, reopened, labeled, unlabeled, synchronize] 7 | 8 | env: 9 | FRAGMENT_NAME: "docs/changes/${{ github.event.number }}.*.rst" 10 | 11 | jobs: 12 | changelog: 13 | runs-on: ubuntu-latest 14 | steps: 15 | - uses: actions/checkout@v4 16 | with: 17 | fetch-depth: 0 18 | 19 | - name: Check for news fragment 20 | if: ${{ ! contains( github.event.pull_request.labels.*.name, 'no-changelog-needed')}} 21 | uses: andstor/file-existence-action@v3 22 | with: 23 | files: ${{ env.FRAGMENT_NAME }} 24 | fail: true 25 | -------------------------------------------------------------------------------- /.github/workflows/pr_checks.yml: -------------------------------------------------------------------------------- 1 | name: PR Checks 2 | 3 | concurrency: 4 | group: ${{ github.workflow }}-${{ github.ref }} 5 | cancel-in-progress: true 6 | 7 | on: 8 | pull_request: 9 | branches: 10 | - main 11 | paths: 12 | - 'asgardpy/**' 13 | 14 | jobs: 15 | changelog: 16 | name: CHANGELOG 17 | runs-on: ubuntu-latest 18 | if: github.event_name == 'pull_request' 19 | 20 | steps: 21 | - uses: actions/checkout@v4 22 | with: 23 | fetch-depth: 0 24 | - run: git config --global init.defaultBranch main 25 | 26 | - name: Check that CHANGELOG has been updated 27 | run: | 28 | # If this step fails, this means you haven't updated the CHANGELOG.md 29 | # file with notes on your contribution. 30 | git diff --name-only $(git merge-base origin/main HEAD) | grep '^CHANGELOG.md$' && echo "Thanks for helping keep our CHANGELOG up-to-date!" 31 | -------------------------------------------------------------------------------- /.github/workflows/release-drafter.yml: -------------------------------------------------------------------------------- 1 | name: Release Drafter 2 | 3 | on: 4 | push: 5 | branches: 6 | - main 7 | 8 | jobs: 9 | update_release_draft: 10 | runs-on: ubuntu-latest 11 | steps: 12 | - uses: release-drafter/release-drafter@v6 13 | env: 14 | GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} 15 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # setuptools_scm generated version file 2 | src/asgardpy/_version.py 3 | 4 | # build artifacts 5 | 6 | .eggs/ 7 | .mypy_cache 8 | *.egg-info/ 9 | *.egg 10 | build/ 11 | dist/ 12 | sdist/ 13 | lib/ 14 | lib64/ 15 | pip-wheel-metadata/ 16 | MANIFEST 17 | 18 | 19 | # dev tools 20 | 21 | .env 22 | env/ 23 | .envrc 24 | .python-version 25 | .idea 26 | .venv/ 27 | venv/ 28 | ENV/ 29 | env.bak/ 30 | venv.bak/ 31 | .vscode/ 32 | /*.iml 33 | 34 | 35 | # jupyter notebooks 36 | 37 | .ipynb_checkpoints 38 | 39 | 40 | # miscellaneous 41 | 42 | .cache/ 43 | doc/_build/ 44 | *.swp 45 | .DS_Store 46 | 47 | 48 | # python 49 | 50 | *.pyc 51 | *.pyo 52 | __pycache__ 53 | 54 | 55 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 56 | __pypackages__/ 57 | 58 | 59 | # testing and continuous integration 60 | 61 | .coverage 62 | coverage.xml 63 | .pytest_cache/ 64 | .tox/ 65 | .benchmarks 66 | 67 | # documentation build artifacts 68 | 69 | docs/build 70 | site/ 71 | -------------------------------------------------------------------------------- /.pre-commit-config.yaml: -------------------------------------------------------------------------------- 1 | repos: 2 | # https://github.com/pre-commit/pre-commit-hooks 3 | - repo: https://github.com/pre-commit/pre-commit-hooks 4 | rev: "v5.0.0" 5 | hooks: 6 | - id: check-added-large-files 7 | - id: check-case-conflict 8 | - id: check-merge-conflict 9 | - id: check-symlinks 10 | - id: check-yaml 11 | - id: debug-statements 12 | - id: end-of-file-fixer 13 | - id: mixed-line-ending 14 | - id: name-tests-test 15 | args: ["--pytest-test-first"] 16 | - id: requirements-txt-fixer 17 | - id: trailing-whitespace 18 | # https://pycqa.github.io/isort/docs/configuration/black_compatibility.html#integration-with-pre-commit 19 | - repo: https://github.com/astral-sh/ruff-pre-commit 20 | rev: v0.9.3 21 | hooks: 22 | # Run the linter. 23 | - id: ruff 24 | # args: [ --fix ] 25 | # Run the formatter. 26 | - id: ruff-format 27 | - repo: https://github.com/pycqa/isort 28 | rev: 5.13.2 29 | hooks: 30 | - id: isort 31 | args: ["--profile", "black", "--filter-files"] 32 | - repo: https://github.com/adamchainz/blacken-docs 33 | rev: "1.19.1" 34 | hooks: 35 | - id: blacken-docs 36 | additional_dependencies: [black==24.*] 37 | - repo: https://github.com/codespell-project/codespell 38 | rev: v2.4.0 39 | hooks: 40 | - id: codespell 41 | additional_dependencies: 42 | - tomli 43 | - repo: https://github.com/pre-commit/mirrors-mypy 44 | rev: "v1.14.1" 45 | hooks: 46 | - id: mypy 47 | files: src 48 | args: [] 49 | additional_dependencies: ["types-PyYAML"] 50 | - repo: https://github.com/pre-commit/pygrep-hooks 51 | # For .rst files 52 | rev: "v1.10.0" 53 | hooks: 54 | - id: rst-backticks 55 | - id: rst-directive-colons 56 | - id: rst-inline-touching-normal 57 | -------------------------------------------------------------------------------- /.readthedocs.yaml: -------------------------------------------------------------------------------- 1 | version: 2 2 | 3 | build: 4 | os: ubuntu-22.04 5 | tools: 6 | python: "3.11" 7 | 8 | sphinx: 9 | configuration: docs/source/conf.py 10 | fail_on_warning: true 11 | 12 | python: 13 | install: 14 | - method: pip 15 | path: . 16 | extra_requirements: 17 | - dev 18 | -------------------------------------------------------------------------------- /CITATION.cff: -------------------------------------------------------------------------------- 1 | # Metadata for citation of this software according to the CFF format (https://citation-file-format.github.io/) 2 | cff-version: 1.2.0 3 | title: 'Asgardpy: Gammapy-based pipeline to support high-level analysis for multi-instruments joint datasets' 4 | abstract: 'User-friendly configuration-centred pipeline built over Gammapy to allow for easy 5 | simultaneous analysis of various datasets of different formats. 6 | 7 | Example: 3D Fermi-LAT (with various source models in the Region of Interest stored in XML file) 8 | + 1D energy-dependent directional cuts MAGIC/LST [PointSkyRegion geometry for ON region] 9 | + 1D global directional cut VERITAS [CircleSkyRegion geometry for ON region]' 10 | type: software 11 | message: "If you use this software, please cite it using the metadata from this file." 12 | url: "https://asgardpy.readthedocs.io/en/latest/" 13 | doi: 10.5281/zenodo.8106369 14 | keywords: 15 | - Astronomy 16 | - "Gamma-rays" 17 | - "Data analysis" 18 | version: v0.5.3 19 | repository-code: "https://github.com/chaimain/asgardpy" 20 | license: Apache-2.0 21 | contact: 22 | - email: chaitanya.p.astrphys@gmail.com 23 | name: "Project manager" 24 | authors: 25 | - given-names: Chaitanya 26 | family-names: Priyadarshi 27 | affiliation: "Instytut Fizyki Jądrowej - Polskiej Akademii Nauk, Kraków, Poland" 28 | orcid: "https://orcid.org/0000-0002-9160-9617" 29 | - given-names: Mireia 30 | family-names: Nievas Rosillo 31 | affiliation: "Instituto de Astrofísica de Canarias, San Cristobal de La Laguna, Canarias, Spain" 32 | orcid: "https://orcid.org/0000-0002-8321-9168" 33 | data_released: 2024-10-29 34 | -------------------------------------------------------------------------------- /CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | All Asgardpy community members are expected to abide by the [Asgardpy Project Code of Conduct](https://asgardpy.readthedocs.io/en/latest/CODE_OF_CONDUCT.html). 2 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | # This subpckage is only used in development checkouts and should not be 2 | # included in built tarballs 3 | prune src/asgardpy/_dev_version 4 | prune .github 5 | -------------------------------------------------------------------------------- /Makefile: -------------------------------------------------------------------------------- 1 | .PHONY : docs 2 | docs : 3 | rm -rf docs/build/ 4 | sphinx-autobuild -b html --watch src/asgardpy/ docs/source/ docs/build/ 5 | 6 | .PHONY : run-checks 7 | run-checks : 8 | isort --check src/ 9 | black --check src/ 10 | ruff check src/ 11 | mypy src/ 12 | codespell src/asgardpy 13 | CUDA_VISIBLE_DEVICES='' pytest -v --color=yes --doctest-modules tests/ src/asgardpy/ 14 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | Agardpy: Analysis Software for GAmma-Ray Data in Python 2 | ======================================================= 3 | 4 | [![Build Status](https://github.com/chaimain/asgardpy/actions/workflows/main.yml/badge.svg?branch=main)](https://github.com/chaimain/asgardpy/actions?query=branch%3Amain) [![codecov](https://codecov.io/gh/chaimain/asgardpy/branch/main/graph/badge.svg?token=0XEI9W8AKJ)](https://codecov.io/gh/chaimain/asgardpy) [![Scrutinizer Code Quality](https://scrutinizer-ci.com/g/chaimain/asgardpy/badges/quality-score.png?b=main)](https://scrutinizer-ci.com/g/chaimain/asgardpy/?branch=main) [![DOI](https://zenodo.org/badge/DOI/10.5281/zenodo.8106369.svg)](https://doi.org/10.5281/zenodo.8106369) ![PyPI](https://img.shields.io/pypi/v/asgardpy?label=pypi%20asgardpy) [![OpenSSF Best Practices](https://bestpractices.coreinfrastructure.org/projects/7699/badge)](https://bestpractices.coreinfrastructure.org/projects/7699) [![gammapy](https://img.shields.io/badge/powered%20by-gammapy-orange.svg?style=flat)](https://www.gammapy.org/) [![astropy](http://img.shields.io/badge/powered%20by-AstroPy-orange.svg?style=flat)](https://www.astropy.org/) 5 | 6 | 'User-friendly' configuration-centred pipeline built over [Gammapy](https://github.com/gammapy/gammapy) to allow for easy simultaneous analysis of various datasets of different formats. 7 | Example: 3D Fermi-LAT (with various source models in the Region of Interest stored in XML file) + 1D energy-dependent directional cuts MAGIC/LST [``PointSkyRegion`` geometry for ``ON`` region] + 1D global directional cut VERITAS [``CircleSkyRegion`` geometry for ``ON`` region]. 8 | 9 | Follow the documentation at https://asgardpy.readthedocs.io/en/latest/ for the main functionality of this pipeline. 10 | Follow the [Gammapy v1.3](https://docs.gammapy.org/1.3/) documentation for understanding the core Gammapy objects. 11 | 12 | Check this [documentation page](https://asgardpy.readthedocs.io/en/latest/need_for_asgardpy.html) for seeing the list of reasons to use Asgardpy over Gammapy v1.3 and this [documentation page](https://asgardpy.readthedocs.io/en/latest/crab_sed.html) for seeing an extended example of the usage of Asgardpy in analyzing multi-instrument data of Crab Nebula. 13 | 14 | The various Data Levels used here follow the descriptions suggested by [GADF v0.3](https://gamma-astro-data-formats.readthedocs.io/en/latest/) and [CTAO](https://www.ctao.org/) Data Model. 15 | 16 | # NOTE 17 | 18 | For requiring support only for [Gammapy v1.1](https://docs.gammapy.org/1.1/), one may follow the latest Hotfix release [v0.4.4](https://github.com/chaimain/asgardpy/tree/v0.4.4) which benefits from the correct usage of getting EBL-deabsorbed data products as included in [v0.5.0](https://github.com/chaimain/asgardpy/tree/v0.5.0). This can be done by using 19 | 20 | ```bash 21 | git fetch --tags 22 | git switch -c tags/v0.4.4 23 | ``` 24 | 25 | For creating a conda environment, for this Hotfix release, one can use 26 | 27 | ```bash 28 | conda env create -f environment_0.4.4.yml 29 | ``` 30 | 31 | and in general, for the latest release, 32 | 33 | ```bash 34 | conda env create -f environment.yml 35 | ``` 36 | 37 | This method was included after v0.5.0, and for earlier ( origin/main 12 | ``` 13 | 14 | 2. Run `towncrier` in to render the changelog: 15 | 16 | ```bash 17 | towncrier build --version= 18 | ``` 19 | 20 | 3. Commit the changes made: 21 | 22 | ```bash 23 | git commit -a 24 | ``` 25 | 26 | 4. Add TAG of the version number: 27 | 28 | ```bash 29 | git tag v -m v 30 | git push --tags 31 | ``` 32 | -------------------------------------------------------------------------------- /dev/README.md: -------------------------------------------------------------------------------- 1 | # Additional Fermi-LAT data 2 | 3 | The fermipy_crab_zipped.zip file contains a set of Crab Nebula (+ Crab Pulsar) data, produced 4 | using [fermipy](https://fermipy.readthedocs.io/en/latest/), graciously provided 5 | by [Lea Heckmann](https://orcid.org/0000-0002-6653-8407), to be read with 6 | Gammapy (via Asgardpy). 7 | 8 | The data is generated for the time exposure from January 2019 - May 2022. 9 | 10 | The zipped file also contains several extra files to aid in covering tests for 11 | support provided by Asgardpy, for reading such files. 12 | 13 | # Additional CTAO-LST1 data 14 | 15 | The lst1_crab_dl4.zip file contains the reduced DL4 dataset of the Crab Nebula 16 | example dataset from the [LST1 Performance Study](https://iopscience.iop.org/article/10.3847/1538-4357/ace89d), 17 | stored on the public record at [zenodo](https://zenodo.org/records/11445184), 18 | provided by [Daniel Morcuende](https://orcid.org/0000-0001-9400-0922). The 19 | original dataset were DL3 files and over 750 MBs in size, hence they were 20 | reduced to DL4 data and stored here for easier access. 21 | 22 | Detailed information about the data can be found at the links provided. 23 | -------------------------------------------------------------------------------- /dev/codespell_ignore_words.txt: -------------------------------------------------------------------------------- 1 | nd 2 | livetime 3 | indexs 4 | -------------------------------------------------------------------------------- /dev/fermipy_crab_zipped.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/dev/fermipy_crab_zipped.zip -------------------------------------------------------------------------------- /dev/lst1_crab_dl4.zip: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/dev/lst1_crab_dl4.zip -------------------------------------------------------------------------------- /docs/.gitignore: -------------------------------------------------------------------------------- 1 | build 2 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/changes/README.md: -------------------------------------------------------------------------------- 1 | # How to use towncrier 2 | 3 | An overview can be found [here](https://towncrier.readthedocs.io/en/stable/quickstart.html#). 4 | 5 | 1. Create a new file for your changes `..rst` in the corresponding folder. The following types are available: 6 | - feature: `New features` 7 | - bugfix: `Bug fixes` 8 | - api: `API Changes` 9 | - maintenance: `Maintenance` 10 | 11 | 12 | 2. Write a suitable message for the change: 13 | ``` 14 | Fixed ``crazy_function`` to be consistent with ``not_so_crazy_function`` 15 | ``` 16 | 17 | 3. (For maintainers) How to generate a change log: 18 | - Execute the following command in the base directory of the project 19 | ``` 20 | towncrier build --version= 21 | ``` 22 | -------------------------------------------------------------------------------- /docs/changes/template.rst: -------------------------------------------------------------------------------- 1 | {% if render_title %} 2 | {% if versiondata.name %} 3 | {{ versiondata.name }} {{ versiondata.version }} ({{ versiondata.date }}) 4 | {{ top_underline * ((versiondata.name + versiondata.version + versiondata.date)|length + 4)}} 5 | {% else %} 6 | {{ versiondata.version }} ({{ versiondata.date }}) 7 | {{ top_underline * ((versiondata.version + versiondata.date)|length + 3)}} 8 | {% endif %} 9 | {% endif %} 10 | 11 | {% for category, val in definitions.items() %} 12 | 13 | {% set underline = underlines[0] %} 14 | {{ definitions[category]['name'] }} 15 | {{ underline * definitions[category]['name']|length }} 16 | {% set underline = underlines[1] %} 17 | 18 | {% for section, _ in sections.items() %} 19 | {% if section and category in sections[section] %} 20 | {{section}} 21 | {{ underline * section|length }} 22 | 23 | {% endif %} 24 | {% if sections[section] and category in sections[section] %} 25 | {% if definitions[category]['showcontent'] %} 26 | {% for text, values in sections[section][category].items() %} 27 | - {{ text }} [{{ values|join(', ') }}] 28 | 29 | {% endfor %} 30 | {% else %} 31 | - {{ sections[section][category]['']|join(', ') }} 32 | 33 | {% endif %} 34 | {% if sections[section][category]|length == 0 %} 35 | No significant changes. 36 | 37 | {% else %} 38 | {% endif %} 39 | {% else %} 40 | {# No significant changes. #} 41 | {% endif %} 42 | {% endfor %} 43 | {% endfor %} 44 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.https://www.sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/CODE_OF_CONDUCT.md: -------------------------------------------------------------------------------- 1 | # Asgardpy Community Code of Conduct 2 | 3 | Asgardpy is committed to fostering an inclusive community. 4 | 5 | Aside from the actual code, Asgardpy is also a community of users and developers. This community is central to accomplishing anything with the code itself. We welcome anyone who wishes to contribute to the project and expect everyone in our community to follow this code of conduct when interacting with others. 6 | 7 | As members of the community, 8 | 9 | * We pledge to treat all people with respect and provide a harassment- and bullying-free environment, regardless of sex, sexual orientation and/or gender identity, disability, physical appearance, body size, race, nationality, ethnicity, caste and religion. In particular, sexual language and imagery, sexist, racist, casteist, or otherwise exclusionary jokes are not appropriate. 10 | * We pledge to respect the work of others by recognizing acknowledgment/citation requests of original authors. As authors, we pledge to be explicit about how we want our own work to be cited or acknowledged. 11 | * We pledge to welcome those interested in joining the community, and realize that including people with a variety of opinions and backgrounds will only serve to enrich our community. In particular, discussions relating to pros/cons of various technologies, programming languages, and so on are welcome, but these should be done with respect, taking proactive measure to ensure that all participants are heard and feel confident that they can freely express their opinions. 12 | * We pledge to welcome questions and answer them respectfully, paying particular attention to those new to the community. We pledge to provide respectful criticisms and feedback in forums, especially in discussion threads resulting from code contributions. 13 | * We pledge to be conscientious of the perceptions of the wider community and to respond to criticism respectfully. We will strive to model behaviors that encourage productive debate and disagreement, both within our community and where we are criticized. We will treat those outside our community with the same respect as people within our community. 14 | * We pledge to help the entire community follow the code of conduct, and to not remain silent when we see violations of the code of conduct. We will take action when members of our community violate this code. 15 | 16 | 17 | This code of conduct applies to all community situations online and offline, including mailing lists, forums, social media, conferences, meetings, associated social events, and one-to-one interactions. This Code of Conduct have been re-used from the [Gammapy code of conduct](https://gammapy.org/CoC.html). 18 | 19 | 20 | ## Report 21 | 22 | In order to report any information or issue, please contact the author and maintainer of the project at chaitanya.p.astrphys@gmail.com, or any of the major [contributors](https://github.com/chaimain/asgardpy/graphs/contributors). 23 | -------------------------------------------------------------------------------- /docs/source/CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | ../../.github/CONTRIBUTING.md -------------------------------------------------------------------------------- /docs/source/_api_docs/analysis.rst: -------------------------------------------------------------------------------- 1 | asgardpy.analysis Main Module 2 | ============================= 3 | 4 | .. automodule:: asgardpy.analysis 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | :glob: 9 | 10 | analysis/* 11 | -------------------------------------------------------------------------------- /docs/source/_api_docs/analysis/analysis_analysis.rst: -------------------------------------------------------------------------------- 1 | asgardpy.analysis.analysis module 2 | ================================= 3 | 4 | .. automodule:: asgardpy.analysis.analysis 5 | :members: AsgardpyAnalysis 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/analysis/analysis_step.rst: -------------------------------------------------------------------------------- 1 | asgardpy.analysis.step module 2 | ============================= 3 | 4 | .. automodule:: asgardpy.analysis.step 5 | :members: AnalysisStep 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/analysis/analysis_step_base.rst: -------------------------------------------------------------------------------- 1 | asgardpy.analysis.step_base module 2 | ================================== 3 | 4 | .. automodule:: asgardpy.analysis.step_base 5 | :members: AnalysisStepBase 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/base.rst: -------------------------------------------------------------------------------- 1 | asgardpy.base Main Module 2 | ========================= 3 | 4 | .. automodule:: asgardpy.base 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | :glob: 9 | 10 | base/* 11 | -------------------------------------------------------------------------------- /docs/source/_api_docs/base/base_base.rst: -------------------------------------------------------------------------------- 1 | asgardpy.base.base module 2 | ========================= 3 | 4 | .. automodule:: asgardpy.base.base 5 | :members: BaseConfig 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/base/base_geom.rst: -------------------------------------------------------------------------------- 1 | asgardpy.base.geom module 2 | ========================= 3 | 4 | .. automodule:: asgardpy.base.geom 5 | :members: GeomConfig, WcsConfig, SkyPositionConfig, MapAxesConfig, EnergyAxisConfig, create_counts_map, generate_geom, get_energy_axis, get_source_position 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/base/base_reduction.rst: -------------------------------------------------------------------------------- 1 | asgardpy.base.reduction module 2 | ============================== 3 | 4 | .. automodule:: asgardpy.base.reduction 5 | :members: BackgroundConfig, SafeMaskConfig, ObservationsConfig, generate_dl4_dataset, get_bkg_maker, get_dataset_reference, get_dataset_maker, get_exclusion_region_mask, get_filtered_observations, get_safe_mask_maker 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/config.rst: -------------------------------------------------------------------------------- 1 | asgardpy.config Module 2 | ====================== 3 | 4 | .. automodule:: asgardpy.config 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | :glob: 9 | 10 | config/* 11 | -------------------------------------------------------------------------------- /docs/source/_api_docs/config/generator.rst: -------------------------------------------------------------------------------- 1 | asgardpy.config.generator module 2 | ================================ 3 | 4 | .. automodule:: asgardpy.config.generator 5 | :members: AsgardpyConfig, GeneralConfig, gammapy_model_to_asgardpy_model_config, write_asgardpy_model_to_file 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Template AsgardpyConfig 10 | ======================= 11 | 12 | .. literalinclude:: ../../_templates/template.yaml 13 | :language: yaml 14 | 15 | Template models file 16 | ==================== 17 | 18 | .. literalinclude:: ../../_templates/template_model.yaml 19 | :language: yaml 20 | -------------------------------------------------------------------------------- /docs/source/_api_docs/config/operations.rst: -------------------------------------------------------------------------------- 1 | asgardpy.config.operations module 2 | ================================= 3 | 4 | .. automodule:: asgardpy.config.operations 5 | :members: get_model_template, recursive_merge_dicts, deep_update 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/data.rst: -------------------------------------------------------------------------------- 1 | asgardpy.data Main Module 2 | ========================= 3 | 4 | .. automodule:: asgardpy.data 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | :glob: 9 | 10 | data/* 11 | -------------------------------------------------------------------------------- /docs/source/_api_docs/data/data_1d.rst: -------------------------------------------------------------------------------- 1 | asgardpy.data.dataset_1d module 2 | =============================== 3 | 4 | Basic overview 5 | -------------- 6 | 7 | The class :class:`asgardpy.data.dataset_1d.Datasets1DAnalysisStep` gathers information for a list of instruments by using the class :class:`asgardpy.data.dataset_1d.Dataset1DGeneration`. 8 | 9 | The main config for this dataset is defined by :class:`asgardpy.data.dataset_1d.Dataset1DConfig` which is simply a collection of basic information as defined in :class:`asgardpy.data.dataset_1d.Dataset1DBaseConfig`. This collection is a combination of :class:`asgardpy.io.io.InputConfig` and :class:`asgardpy.data.dataset_1d.Dataset1DInfoConfig` information. 10 | 11 | Classes 12 | ------- 13 | 14 | .. automodule:: asgardpy.data.dataset_1d 15 | :members: Dataset1DGeneration, Datasets1DAnalysisStep, Dataset1DBaseConfig, Dataset1DInfoConfig, Dataset1DConfig 16 | :undoc-members: 17 | :show-inheritance: 18 | -------------------------------------------------------------------------------- /docs/source/_api_docs/data/data_3d.rst: -------------------------------------------------------------------------------- 1 | asgardpy.data.dataset_3d module 2 | =============================== 3 | 4 | Basic overview 5 | -------------- 6 | 7 | The class :class:`asgardpy.data.dataset_3d.Datasets3DAnalysisStep` gathers information for a list of instruments by using the class :class:`asgardpy.data.dataset_3d.Dataset3DGeneration`. 8 | 9 | The main config for this dataset is defined by :class:`asgardpy.data.dataset_3d.Dataset3DConfig` which is simply a collection of basic information as defined in :class:`asgardpy.data.dataset_3d.Dataset3DBaseConfig`. This collection is a combination of :class:`asgardpy.io.io.InputConfig` and :class:`asgardpy.data.dataset_3d.Dataset3DInfoConfig` information. 10 | 11 | Classes 12 | ------- 13 | 14 | .. automodule:: asgardpy.data.dataset_3d 15 | :members: Dataset3DGeneration, Datasets3DAnalysisStep, Dataset3DBaseConfig, Dataset3DInfoConfig, Dataset3DConfig 16 | :undoc-members: 17 | :show-inheritance: 18 | -------------------------------------------------------------------------------- /docs/source/_api_docs/data/data_dl4.rst: -------------------------------------------------------------------------------- 1 | asgardpy.data.dl4 module 2 | ======================== 3 | 4 | .. automodule:: asgardpy.data.dl4 5 | :members: FitAnalysisStep, FluxPointsAnalysisStep, FitConfig, FluxPointsConfig 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/data/data_target.rst: -------------------------------------------------------------------------------- 1 | asgardpy.data.target module 2 | =========================== 3 | 4 | This Module is large and contains various classes and functions defined to support Models generation and assignments. 5 | These are separated in different pages as - 6 | 7 | .. toctree:: 8 | :maxdepth: 1 9 | :glob: 10 | 11 | target/* 12 | -------------------------------------------------------------------------------- /docs/source/_api_docs/data/target/data_target_b.rst: -------------------------------------------------------------------------------- 1 | asgardpy.data.target module: Classes 2 | ==================================== 3 | 4 | .. automodule:: asgardpy.data.target 5 | :members: BrokenPowerLaw2SpectralModel, ExpCutoffLogParabolaSpectralModel, Target, SpectralModelConfig, SpatialModelConfig, ModelParams, EBLAbsorptionModel, ModelComponent 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/data/target/data_target_f.rst: -------------------------------------------------------------------------------- 1 | asgardpy.data.target module: Functions 2 | ====================================== 3 | 4 | .. currentmodule:: asgardpy.data.target 5 | 6 | .. autofunction:: set_models 7 | .. autofunction:: config_to_dict 8 | .. autofunction:: apply_models_mask_in_roi 9 | .. autofunction:: get_models_from_catalog 10 | .. autofunction:: apply_selection_mask_to_models 11 | .. autofunction:: read_models_from_asgardpy_config 12 | -------------------------------------------------------------------------------- /docs/source/_api_docs/gammapy.rst: -------------------------------------------------------------------------------- 1 | asgardpy.gammapy module 2 | ======================= 3 | 4 | .. automodule:: asgardpy.gammapy 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | :glob: 9 | 10 | gammapy/* 11 | -------------------------------------------------------------------------------- /docs/source/_api_docs/gammapy/interop_model.rst: -------------------------------------------------------------------------------- 1 | asgardpy.gammapy.interoperate_models module 2 | =========================================== 3 | 4 | .. automodule:: asgardpy.gammapy.interoperate_models 5 | :members: xml_spatial_model_to_gammapy, xml_spectral_model_to_gammapy, params_renaming_to_gammapy, params_rescale_to_gammapy, get_gammapy_spectral_model 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Spectral model parameter renaming to Gammapy default 10 | ==================================================== 11 | 12 | .. image:: ../../_static/asgardpy_target_params_renaming.png 13 | :width: 800px 14 | :align: center 15 | -------------------------------------------------------------------------------- /docs/source/_api_docs/gammapy/read_models.rst: -------------------------------------------------------------------------------- 1 | asgardpy.gammapy.read_models module 2 | =================================== 3 | 4 | .. automodule:: asgardpy.gammapy.read_models 5 | :members: create_gal_diffuse_skymodel, create_iso_diffuse_skymodel, create_source_skymodel, read_fermi_xml_models_list, update_aux_info_from_fermi_xml 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/index.rst: -------------------------------------------------------------------------------- 1 | API Docs 2 | ======== 3 | 4 | .. toctree:: 5 | :maxdepth: 1 6 | :glob: 7 | 8 | * 9 | -------------------------------------------------------------------------------- /docs/source/_api_docs/io.rst: -------------------------------------------------------------------------------- 1 | asgardpy.io Main module 2 | ======================= 3 | 4 | .. automodule:: asgardpy.io 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | :glob: 9 | 10 | io/* 11 | -------------------------------------------------------------------------------- /docs/source/_api_docs/io/input_dl3.rst: -------------------------------------------------------------------------------- 1 | asgardpy.io.input_dl3 module 2 | ============================ 3 | 4 | .. automodule:: asgardpy.io.input_dl3 5 | :members: DL3Files, InputDL3Config 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/io/io_dl4.rst: -------------------------------------------------------------------------------- 1 | asgardpy.io.io_dl4 module 2 | ========================= 3 | 4 | .. automodule:: asgardpy.io.io_dl4 5 | :members: DL4Files, DL4BaseConfig, InputDL4Config, get_reco_energy_bins 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/stats.rst: -------------------------------------------------------------------------------- 1 | asgardpy.stats module 2 | ===================== 3 | 4 | .. automodule:: asgardpy.stats 5 | 6 | .. toctree:: 7 | :maxdepth: 1 8 | :glob: 9 | 10 | stats/* 11 | -------------------------------------------------------------------------------- /docs/source/_api_docs/stats/stats.rst: -------------------------------------------------------------------------------- 1 | asgardpy.stats.stats module 2 | =========================== 3 | 4 | .. automodule:: asgardpy.stats.stats 5 | :members: check_model_preference_lrt, check_model_preference_aic, get_chi2_sig_pval, get_goodness_of_fit_stats, get_ts_target, fetch_pivot_energy 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_api_docs/stats/utils.rst: -------------------------------------------------------------------------------- 1 | asgardpy.stats.utils module 2 | =========================== 3 | 4 | .. automodule:: asgardpy.stats.utils 5 | :members: fetch_all_analysis_fit_info, get_model_config_files, tabulate_best_fit_stats, copy_target_config 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /docs/source/_static/SED_mult_instr_Crab.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/docs/source/_static/SED_mult_instr_Crab.png -------------------------------------------------------------------------------- /docs/source/_static/asgardpy_target_params_renaming.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/docs/source/_static/asgardpy_target_params_renaming.png -------------------------------------------------------------------------------- /docs/source/_static/asgardpy_workflow.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/docs/source/_static/asgardpy_workflow.png -------------------------------------------------------------------------------- /docs/source/_static/css/custom.css: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/docs/source/_static/css/custom.css -------------------------------------------------------------------------------- /docs/source/_static/favicon.ico: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/docs/source/_static/favicon.ico -------------------------------------------------------------------------------- /docs/source/_templates/template.yaml: -------------------------------------------------------------------------------- 1 | # Configuration file template for Data analysis of 3D+1D DL3 datasets 2 | 3 | # General settings 4 | general: 5 | log: 6 | level: info 7 | filename: filename.log 8 | filemode: a 9 | format: "%(asctime)s - %(message)s" 10 | datefmt: "%d-%b-%y %H:%M:%S" 11 | outdir: . 12 | n_jobs: 10 13 | parallel_backend: multiprocessing 14 | steps: [datasets-3d, datasets-1d, fit, flux-points] 15 | overwrite: true 16 | stacked_dataset: true 17 | 18 | # Target settings 19 | target: 20 | source_name: PG1553+113 21 | sky_position: &source_pos {frame: icrs, lon: 238.92934976 deg, lat: 11.19010155 deg, radius: 0 deg} 22 | use_uniform_position: true 23 | datasets_with_fov_bkg_model: [] 24 | #models_file: "template_model.yaml" 25 | #use_catalog: 26 | #name: "3fgl" 27 | #selection_radius: 0.4 deg 28 | #exclusion_radius: 0.5 deg 29 | components: 30 | - name: PG1553+113 31 | type: SkyModel 32 | spectral: 33 | type: LogParabolaSpectralModel 34 | parameters: 35 | - name: amplitude 36 | value: 1.0e-05 37 | unit: cm-2 s-1 TeV-1 38 | error: 1.5e-06 39 | min: 1.0e-13 40 | max: 0.01 41 | frozen: false 42 | - name: reference 43 | value: 0.0015 44 | unit: TeV 45 | error: 0.0 46 | min: 0.0001 47 | max: 100.0 48 | frozen: true 49 | - name: alpha 50 | value: 1.5 51 | unit: '' 52 | error: 0.1 53 | min: 0.5 54 | max: 5.0 55 | frozen: false 56 | - name: beta 57 | value: 0.1 58 | unit: '' 59 | error: 0.01 60 | min: 1.0e-6 61 | max: 1.0 62 | frozen: false 63 | ebl_abs: 64 | reference: dominguez 65 | type: EBLAbsorptionNormSpectralModel 66 | redshift: 0.433 67 | alpha_norm: 1.0 68 | covariance: None 69 | from_3d: false 70 | roi_selection: 71 | roi_radius: 5 deg 72 | free_sources: [None] 73 | 74 | # Instrument datasets with 3D info 75 | dataset3d: 76 | type: "3d" 77 | instruments: 78 | - name: Fermi-LAT 79 | input_dl3: 80 | - type: lat 81 | input_dir: "." 82 | glob_pattern: 83 | events: "*MkTime.fits*" 84 | edisp: "*eDRM.fits*" 85 | xml_model: "*out.xml" 86 | exposure: "*BinnedMap.fits*" 87 | psf: "*psf.fits*" 88 | - type: lat-aux 89 | input_dir: "." 90 | glob_pattern: 91 | gal_diffuse: "gll_iem_v07.fits*" 92 | iso_diffuse: "iso_P8R3_SOURCE_V3_*.txt" 93 | input_dl4: false 94 | dataset_info: 95 | name: Fermi-LAT 96 | key: ["FRONT", "BACK"] 97 | # map_selection: [] 98 | geom: 99 | from_events_file: True 100 | wcs: 101 | binsize: 0.1 deg 102 | proj: TAN 103 | map_frame_shape: 104 | width: 10 deg 105 | height: 10 deg 106 | axes: 107 | - name: energy 108 | axis: 109 | min: "100 MeV" 110 | max: "1 TeV" 111 | nbins: 8 112 | per_decade: true 113 | background: 114 | # method: reflected 115 | # parameters: 116 | exclusion: 117 | target_source: true 118 | regions: 119 | - type: CircleAnnulusSkyRegion 120 | name: None 121 | position: *source_pos 122 | parameters: 123 | rad_0: 8 deg 124 | rad_1: 30 deg 125 | safe_mask: 126 | methods: [] 127 | parameters: 128 | min: 0.001 TeV 129 | max: 1 TeV 130 | on_region: *source_pos 131 | # radius: 0.4 deg 132 | containment_correction: true 133 | dl4_dataset_info: 134 | # dl4_dataset: 135 | # type: MapDatasetOnOff 136 | # input_dir: "." 137 | # glob_pattern: 138 | # dl4_files: "pha*fits" 139 | # dl4_format: "ogip" 140 | spectral_energy_range: 141 | name: energy 142 | axis: 143 | min: "100 MeV" 144 | max: "1 TeV" 145 | nbins: 4 146 | per_decade: true 147 | # axis_custom: 148 | # edges: [0.01, 0.05, 0.1, 0.5, 1, 5, 10] 149 | # unit: "TeV" 150 | # - name: CTA 151 | # ... full description ... 152 | 153 | # Instrument datasets with 1D info 154 | dataset1d: 155 | type: "1d" 156 | instruments: 157 | - name: LST-1 158 | input_dl3: 159 | - type: gadf-dl3 160 | input_dir: "." 161 | glob_pattern: 162 | dl3_files: "" 163 | input_dl4: false 164 | dataset_info: 165 | name: LST-1 166 | geom: 167 | wcs: 168 | binsize: 0.05 deg 169 | proj: TAN 170 | map_frame_shape: 171 | width: 6.25 deg 172 | height: 6.25 deg 173 | selection: 174 | offset_max: "2.5 deg" 175 | axes: 176 | - name: energy 177 | axis: &lst_energy 178 | min: "10 GeV" 179 | max: "10 TeV" 180 | nbins: 5 181 | per_decade: true 182 | - name: energy_true 183 | axis: *lst_energy 184 | observation: 185 | # obs_ids: [] 186 | # obs_file: None 187 | # obs_time: 188 | required_irfs: ["aeff", "edisp", "rad_max"] 189 | background: 190 | method: reflected 191 | region_finder_method: wobble 192 | parameters: 193 | n_off_regions: 1 194 | # exclusion: 195 | safe_mask: 196 | methods: ["custom-mask"] #["aeff-max", "aeff-default", "edisp-bias", "offset-max", "bkg-peak"] 197 | parameters: 198 | min: "10 GeV" 199 | max: "10 TeV" 200 | aeff_percent: 10 201 | bias_percent: 10 202 | fixed_offset: 0.1 deg 203 | offset_max: 3 deg 204 | position: *source_pos 205 | on_region: *source_pos 206 | containment_correction: false 207 | map_selection: [counts, exposure, edisp] 208 | dl4_dataset_info: 209 | # dl4_dataset: 210 | # type: SpectrumDatasetOnOff 211 | # input_dir: "." 212 | # glob_pattern: 213 | # dl4_files: "obs_*fits" 214 | # dl4_format: "gadf" 215 | spectral_energy_range: 216 | name: energy 217 | axis: 218 | min: "10 GeV" 219 | max: "10 TeV" 220 | nbins: 5 221 | per_decade: true 222 | #axis_custom: 223 | # edges: [0.01, 0.05, 0.1, 0.5, 1, 5, 10] 224 | # unit: "TeV" 225 | # - name: MAGIC 226 | # ... full description ... 227 | 228 | # Fit parameters 229 | fit_params: 230 | fit_range: 231 | min: "100 MeV" 232 | max: "1 TeV" 233 | backend: minuit 234 | optimize_opts: {} 235 | covariance_opts: {} 236 | confidence_opts: {} 237 | store_trace: true 238 | 239 | flux_points_params: 240 | parameters: 241 | selection_optional: "all" 242 | reoptimize: false 243 | -------------------------------------------------------------------------------- /docs/source/_templates/template_model.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: LogParabolaSpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-06 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: reference 15 | value: 0.0015 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: alpha 22 | value: 2 23 | unit: '' 24 | error: 0.1 25 | min: 0.5 26 | max: 5.0 27 | frozen: false 28 | - name: beta 29 | value: 0.1 30 | unit: '' 31 | error: 0.01 32 | min: 0.01 33 | max: 1.0 34 | frozen: false 35 | -------------------------------------------------------------------------------- /docs/source/additional_dataset_input.rst: -------------------------------------------------------------------------------- 1 | Additional Support of Datasets input 2 | ==================================== 3 | 4 | 5 | :ref:`dataset-intro` provides the general overview of the inputs used in asgardpy 6 | for the data reduction of DL3 data to DL4 data, following the Gammapy documentation on 7 | `Data Reduction (DL3 to DL4) `_ 8 | 9 | 10 | Asgardpy can also read from already reduced DL4 datasets, like the examples from gammapy-data, 11 | using the module :class:`~asgardpy.io.io_dl4`. This is done by using 12 | :class:`~asgardpy.data.dataset_3d.Dataset3DBaseConfig.input_dl4` ``= True`` 13 | or :class:`~asgardpy.data.dataset_1d.Dataset1DBaseConfig.input_dl4` ``= True`` and filling 14 | :class:`~asgardpy.data.dataset_3d.Dataset3DBaseConfig.dl4_dataset_info` and 15 | :class:`~asgardpy.data.dataset_1d.Dataset1DBaseConfig.dl4_dataset_info` respectively. 16 | 17 | This can be done for an additional compilation of DL4 datasets to run the joint 18 | likelihood analysis using Gammapy. Tests are included for checking an example 19 | of this support. 20 | -------------------------------------------------------------------------------- /docs/source/additional_model_input.rst: -------------------------------------------------------------------------------- 1 | Additional Support of Models input 2 | ================================== 3 | 4 | :ref:`models-intro` gives a brief overview on including the Models to the DL4 datasets. 5 | 6 | The list of associated Models can be provided by - 7 | 8 | #. Using models defined in Gammapy 9 | The list of Models used in Gammapy can be seen in 10 | `Model Gallery `_. 11 | 12 | Additional models following the Gammapy standards, are defined in 13 | :doc:`_api_docs/data/target/data_target_b`, and to use these models, 14 | one needs to invoke that module. 15 | 16 | #. Using a list of models written in a different way than the Gammapy standard 17 | The module :class:`~asgardpy.gammapy.read_models` and 18 | :class:`~asgardpy.gammapy.interoperate_models` are used to read such models, 19 | for e.g. XML model definitions used by Fermi-LAT. 20 | 21 | A test covering all of these models is included in asgardpy, using a test 22 | xml file, included in the additional test data. 23 | 24 | #. Using a Catalog available in Gammapy 25 | This is done by adding information in :class:`~asgardpy.data.target.Target.use_catalog`. 26 | The list of available catalogs in Gammapy is documented at 27 | `Source Catalogs `_ 28 | 29 | #. Using FoV Background Model as defined in Gammapy 30 | To add a default Gammapy `FoVBackgroundModel` to the 3D dataset, use 31 | :class:`~asgardpy.data.target.Target.add_fov_bkg_model` ``= True``. 32 | 33 | One can also include it in the config file by defining its spectral and/or 34 | spatial model components. 35 | -------------------------------------------------------------------------------- /docs/source/additional_stats.rst: -------------------------------------------------------------------------------- 1 | Fetching the Goodness of Fit 2 | ============================ 3 | 4 | 5 | The Goodness of Fit is evaluated for the target source region using 6 | :class:`~asgardpy.stats.stats.get_ts_target` and 7 | :class:`~asgardpy.stats.stats.get_goodness_of_fit_stats`. 8 | 9 | The different Fit Statistic function used for the different types of Datasets 10 | input are mentioned in the 11 | `Gammapy documentation `_ 12 | and are: 13 | 14 | #. `cash `_ 15 | Used for dataset containing Poisson data with background model. 16 | 17 | #. `wstat `_ 18 | Used for dataset containing Poisson data with background measurement. 19 | 20 | #. chi2 21 | Used for `FluxPointsDataset` read from a file like in a Gammapy 22 | `example `_ 23 | where the pre-computed flux points are used to perform the likelihood fit, 24 | when no convolution with IRFs are needed. 25 | 26 | :class:`~asgardpy.stats.stats.get_ts_target` uses the above Fit Statistic functions, 27 | to get the test statistic for the best fit and the max fit for the target source 28 | region in the provided joint datasets object. 29 | 30 | For more general information follow the Gammapy 31 | `documentation `_ 32 | -------------------------------------------------------------------------------- /docs/source/changelog.rst: -------------------------------------------------------------------------------- 1 | .. _changelog: 2 | 3 | ========= 4 | Changelog 5 | ========= 6 | 7 | .. towncrier release notes start 8 | 9 | .. changelog:: 10 | :towncrier: ../../ 11 | :changelog_file: ../../CHANGES.rst 12 | :towncrier-skip-if-empty: 13 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | import logging 8 | import os 9 | import sys 10 | import tomllib 11 | from datetime import datetime 12 | 13 | from asgardpy.version import __public_version__, __version__ # noqa: E402 14 | 15 | # -- Path setup -------------------------------------------------------------- 16 | 17 | # If extensions (or modules to document with autodoc) are in another directory, 18 | # add these directories to sys.path here. If the directory is relative to the 19 | # documentation root, use os.path.abspath to make it absolute, like shown here. 20 | # 21 | 22 | sys.path.insert(0, os.path.abspath("../../")) 23 | 24 | # -- Project information ----------------------------------------------------- 25 | 26 | work_dir_path = os.path.join(os.path.dirname(__file__), "../../") 27 | 28 | with open(os.path.join(work_dir_path, "pyproject.toml"), "rb") as f: 29 | project_info = tomllib.load(f) 30 | 31 | project = project_info["project"]["name"] 32 | 33 | # Read the list of author names 34 | author = "" 35 | for auth in project_info["project"]["authors"]: 36 | author += auth["name"] + ", " 37 | author = author[:-2] 38 | 39 | copyright = f"{datetime.today().year}, {author}" 40 | 41 | version = __version__ 42 | # The full version contains alpha, beta, rc tags 43 | release = __public_version__ 44 | 45 | 46 | # -- General configuration --------------------------------------------------- 47 | 48 | # Add any Sphinx extension module names here, as strings. They can be 49 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 50 | # ones. 51 | extensions = [ 52 | "sphinx.ext.autodoc", 53 | "sphinx.ext.autosummary", 54 | "sphinx.ext.napoleon", 55 | "myst_parser", 56 | "sphinx.ext.intersphinx", 57 | "sphinx.ext.extlinks", 58 | "sphinx.ext.mathjax", 59 | "sphinx.ext.viewcode", 60 | "matplotlib.sphinxext.plot_directive", 61 | "IPython.sphinxext.ipython_directive", 62 | "IPython.sphinxext.ipython_console_highlighting", 63 | "sphinx.ext.doctest", 64 | "sphinx_copybutton", 65 | "sphinx_autodoc_typehints", 66 | "sphinx.ext.inheritance_diagram", 67 | "sphinxcontrib.autodoc_pydantic", 68 | "sphinxcontrib.towncrier.ext", 69 | "sphinx_changelog", 70 | ] 71 | 72 | # Tell myst-parser to assign header anchors for h1-h3. 73 | myst_heading_anchors = 3 74 | 75 | suppress_warnings = ["myst.header"] 76 | 77 | # Add any paths that contain templates here, relative to this directory. 78 | templates_path = ["_templates"] 79 | 80 | # List of patterns, relative to source directory, that match files and 81 | # directories to ignore when looking for source files. 82 | # This pattern also affects html_static_path and html_extra_path. 83 | exclude_patterns = ["_build", "changes"] 84 | 85 | source_suffix = [".rst", ".md"] 86 | 87 | intersphinx_mapping = { 88 | "python": ("https://docs.python.org/3.11/", None), 89 | "gammapy": ("https://docs.gammapy.org/1.3/", None), 90 | "astropy": ("https://docs.astropy.org/en/latest/", None), 91 | # Uncomment these if you use them in your codebase: 92 | # "torch": ("https://pytorch.org/docs/stable", None), 93 | # "datasets": ("https://huggingface.co/docs/datasets/master/en", None), 94 | # "transformers": ("https://huggingface.co/docs/transformers/master/en", None), 95 | } 96 | 97 | # By default, sort documented members by type within classes and modules. 98 | autodoc_member_order = "groupwise" 99 | 100 | autodoc_pydantic_model_show_json = False 101 | autodoc_pydantic_model_show_config = False 102 | autodoc_pydantic_model_show_config_member = False 103 | autodoc_pydantic_model_show_config_summary = False 104 | autodoc_pydantic_model_show_field_summary = True 105 | 106 | # Options: draft/sphinx-version/sphinx-release 107 | towncrier_draft_autoversion_mode = "draft" 108 | towncrier_draft_include_empty = True 109 | towncrier_draft_working_directory = work_dir_path 110 | # Not yet supported: 111 | # towncrier_draft_config_path = 'pyproject.toml' # relative to cwd 112 | 113 | # Include default values when documenting parameter types. 114 | typehints_defaults = "comma" 115 | 116 | 117 | # Define the json_url for our version switcher. 118 | # json_url = "https://asgardpy.readthedocs.io/en/latest/_static/switcher.json" 119 | 120 | # Define the version we use for matching in the version switcher., 121 | # version_match = os.getenv("READTHEDOCS_VERSION") 122 | # If READTHEDOCS_VERSION doesn't exist, we're not on RTD 123 | # If it is an integer, we're in a PR build and the version isn't correct. 124 | # if not version_match or version_match.isdigit(): 125 | # For local development, infer the version to match from the package. 126 | # if "dev" in release or "rc" in release: 127 | # version_match = "latest" 128 | # else: 129 | # version_match = release 130 | 131 | # We want to keep the relative reference when on a pull request or locally 132 | # json_url = "_static/switcher.json" 133 | 134 | 135 | # -- Options for HTML output ------------------------------------------------- 136 | 137 | # The theme to use for HTML and HTML Help pages. See the documentation for 138 | # a list of builtin themes. 139 | # 140 | html_theme = "furo" 141 | 142 | html_title = f"{project} v{__version__}" 143 | 144 | # Add any paths that contain custom static files (such as style sheets) here, 145 | # relative to this directory. They are copied after the builtin static files, 146 | # so a file named "default.css" will overwrite the builtin "default.css". 147 | html_static_path = ["_static"] 148 | 149 | html_css_files = ["css/custom.css"] 150 | 151 | html_favicon = "_static/favicon.ico" 152 | 153 | html_theme_options = { 154 | "footer_icons": [ 155 | { 156 | "name": "GitHub", 157 | "url": "https://github.com/chaimain/asgardpy", 158 | "html": """ 159 | 160 | 161 | 162 | """, # noqa: E501 163 | "class": "", 164 | }, 165 | ], 166 | # "navbar_start": ["navbar-logo", "version-switcher"], 167 | # "switcher": { 168 | # "version_match": version_match, 169 | # "json_url": json_url, 170 | # }, 171 | } 172 | 173 | # -- Hack to get rid of stupid warnings from sphinx_autodoc_typehints -------- 174 | 175 | 176 | class ShutupSphinxAutodocTypehintsFilter(logging.Filter): 177 | def filter(self, record: logging.LogRecord) -> bool: 178 | if "Cannot resolve forward reference" in record.msg: 179 | return False 180 | return True 181 | 182 | 183 | logging.getLogger("sphinx.sphinx_autodoc_typehints").addFilter(ShutupSphinxAutodocTypehintsFilter()) 184 | -------------------------------------------------------------------------------- /docs/source/crab_sed.rst: -------------------------------------------------------------------------------- 1 | Multi-instrument analysis of Crab Nebula 2 | ======================================== 3 | 4 | Here is the more extensive example of a multi-instrument joint-fit analyses of the Crab Nebula. 5 | Only publicly available data are used to show this example, and the contribution of the Crab Pulsar has not been removed from the data. 6 | 7 | Description of the datasets 8 | --------------------------- 9 | 10 | #. Fermi-LAT - **3D DL3** Dataset for the period of 3 years 5 months, generated by `Lea Heckmann `_ 11 | 12 | #. H.E.S.S. - **3D DL3** Dataset available in ``gammapy-datasets`` with livetime of ~1 hr 13 | 14 | #. MAGIC - **1D DL3** Dataset available at `DOI: 10.5281/zenodo.13898269 `_ with a total livetime period of ~76.61 hrs. This includes all the data available there, which includes data taken in ``moon`` condition with different NSB levels, and in ``dark`` condition with multiple offsets. 15 | 16 | #. CTAO-LST1 - **1D DL3** Dataset used in C. Priyadarshi's `thesis `_ with a period of ~50 hrs (11/2020 - 03/2022), which includes data taken with zenith up to 52.5 deg 17 | 18 | #. FACT - **1D DL4** Dataset available in ``gammapy-datasets`` and used in `DOI: 10.1051/0004-6361/201834938 `_ with livetime of ~10.13 hrs 19 | 20 | #. VERITAS - **1D DL4** Dataset available in ``gammapy-datasets`` and used in `DOI: 10.1051/0004-6361/201834938 `_ with livetime of ~0.60 hrs 21 | 22 | #. HAWC - Flux Points (**DL5**) dataset available in ``gammapy-datasets`` as taken from `DOI: 10.3847/1538-4357/ab2f7d `_ 23 | 24 | #. LHAASO (WCDA and KM2A) - Flux Points (**DL5**) dataset as taken from `DOI: 10.1126/science.abg5137 `_ 25 | 26 | 27 | .. image:: ./_static/SED_mult_instr_Crab.png 28 | :width: 700px 29 | :align: center 30 | 31 | 32 | The peak energy of the Log Parabola model here is 61.448 +/- 2.718 GeV 33 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. asgardpy documentation master file, created by 2 | sphinx-quickstart on Tue Sep 21 08:07:48 2021. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root toctree directive. 5 | 6 | **asgardpy pipeline** 7 | ===================== 8 | 9 | Gammapy-based pipeline to support high-level analysis for multi-instruments joint datasets. 10 | Follow the `Gammapy v1.3 `_ documentation for understanding the core Gammapy objects. 11 | 12 | The various Data Levels used here follow the descriptions suggested by 13 | `GADF v0.3 `_ and `CTAO `_ Data Model. 14 | 15 | The pipeline was developed with first testing with Fermi-LAT (`enrico `_ and 16 | `fermipy `_) files and LST-1 (`cta-lstchain `_) 17 | DL3 files (with energy-dependent and global selection cuts) for point-like sources. 18 | 19 | It also allows for a preliminary analysis of HAWC datasets (stored in Gammapy-readable data). The pipeline can be further expanded to support more types of DL3 files of gamma-ray instruments. 20 | See some example notebooks and tests, which use the public data used in Gammapy Tutorials, with an additional Fermi-LAT data. 21 | 22 | See :doc:`need_for_asgardpy` for a brief overview of the pipeline and its future and :doc:`crab_sed` for an extended example of the usage of the pipeline. 23 | 24 | 25 | GitHub Repository: https://github.com/chaimain/asgardpy 26 | 27 | .. _introduction: 28 | 29 | .. toctree:: 30 | :maxdepth: 2 31 | :caption: Introduction 32 | :name: _introduction 33 | 34 | overview 35 | additional_dataset_input 36 | additional_model_input 37 | additional_stats 38 | need_for_asgardpy 39 | crab_sed 40 | installation 41 | 42 | .. _changes: 43 | 44 | .. toctree:: 45 | :maxdepth: 2 46 | :caption: Changelogs 47 | :name: _changes 48 | 49 | changelog 50 | 51 | .. _api: 52 | 53 | .. toctree:: 54 | :maxdepth: 1 55 | :caption: API Documentation 56 | :name: _api 57 | 58 | _api_docs/index 59 | 60 | .. toctree:: 61 | :hidden: 62 | :caption: Development 63 | 64 | License 65 | CODE_OF_CONDUCT 66 | CONTRIBUTING 67 | GitHub Repository 68 | Issue Tracker 69 | 70 | 71 | Team 72 | ---- 73 | 74 | **asgardpy** is developed and maintained by `Chaitanya Priyadarshi `_. 75 | To learn more about who specifically contributed to this codebase, see 76 | `our contributors `_ page. 77 | 78 | 79 | Cite 80 | ---- 81 | 82 | If you use Asgardpy in a publication, please cite the exact version you used from Zenodo *Cite as* https://doi.org/10.5281/zenodo.8106369 83 | 84 | 85 | License 86 | ------- 87 | 88 | **asgardpy** is licensed under `Apache 2.0 `_. 89 | A full copy of the license can be found on `GitHub `_. 90 | 91 | Indices and tables 92 | ------------------ 93 | 94 | * :ref:`genindex` 95 | * :ref:`modindex` 96 | 97 | 98 | Dependencies 99 | ------------ 100 | 101 | * `astropy `_ managing physical units and astronomical distances; 102 | 103 | * `gammapy `_ for main high-level analysis 104 | -------------------------------------------------------------------------------- /docs/source/installation.md: -------------------------------------------------------------------------------- 1 | Installation 2 | ============ 3 | 4 | **asgardpy** supports Python >= 3.11. 5 | 6 | ## Installing with `pip` 7 | 8 | **asgardpy** is available [on PyPI](https://pypi.org/project/asgardpy/). For the latest version just run 9 | 10 | ```bash 11 | pip install asgardpy 12 | ``` 13 | 14 | and for specific versions, run 15 | 16 | ```bash 17 | pip install asgardpy==VERSION 18 | ``` 19 | 20 | For example, using the Hotfix release of v0.4.4 with extended support for Gammapy v1.1, run 21 | 22 | ```bash 23 | pip install asgardpy==0.4.4 24 | ``` 25 | 26 | ## Installing from source 27 | 28 | To install **asgardpy** from source, first clone [the repository](https://github.com/chaimain/asgardpy): 29 | 30 | ```bash 31 | git clone https://github.com/chaimain/asgardpy.git 32 | cd asgardpy 33 | ``` 34 | 35 | Then for users, run 36 | 37 | ```bash 38 | pip install -e . 39 | ``` 40 | 41 | and for developers, run 42 | 43 | ```bash 44 | pip install -e .[dev] 45 | ``` 46 | 47 | ## Creating conda environment 48 | 49 | In general, for the latest version, one can use 50 | 51 | ```bash 52 | conda env create -f environment.yml 53 | ``` 54 | 55 | and for the Hotfix release, 56 | 57 | ```bash 58 | conda env create -f environment_0.4.4.yml 59 | ``` 60 | 61 | This method was included after v0.5.0, and for earlier (`_, starting from DL3 level data 8 | 9 | #. Using `High Level Interface (HLI) `_ for 1D datasets with energy-dependent RAD_MAX values 10 | 11 | #. Additional supporting features while using `HLI `_ 12 | 13 | Unique features 14 | --------------- 15 | 16 | #. While creating a multi-instrument list of DL4 datasets, letting the central spatial coordinate in the ``geom`` objects, be commonly shared 17 | 18 | #. Being able to read Fermi files as produced by `enrico `_ 19 | 20 | #. Distinct function to read Fermi XML file into Gammapy ``Models`` objects with :class:`~asgardpy.gammapy.read_models.read_fermi_xml_models_list` (Example of simple usage with asgardpy can be seen in the `first section of the example notebook `_) 21 | 22 | #. Useful scripts and functions to help with the spectral analyses. 23 | 24 | Reasons that may be added in future Gammapy releases 25 | ---------------------------------------------------- 26 | 27 | The following features may become redundant in asgardpy, after `Gammapy 2.0 `_ - 28 | 29 | #. Being able to read Fermi files as produced by `fermipy `_ 30 | 31 | #. Easily reading existing Fermi XML files into Gammapy ``Models`` objects 32 | 33 | #. Using the `HLI `_ for 1D dataset with energy-dependent ``RAD_MAX`` cuts (see `Workflow module `_) 34 | 35 | #. Having intermediate analysis steps, distinct for DL3 -> DL4 -> DL5 (see `Workflow module `_) 36 | 37 | #. Providing Goodness of Fit estimation for 3D + 1D datasets 38 | 39 | #. Using multiple time interval selection for a given list of observations 40 | -------------------------------------------------------------------------------- /docs/source/overview.rst: -------------------------------------------------------------------------------- 1 | Overview of asgardpy 2 | ==================== 3 | 4 | Main structure 5 | -------------- 6 | 7 | The package is structured in 2 ways - 8 | 9 | #. Creating the AnalysisConfig based on several Config components - :class:`~asgardpy.config` 10 | 11 | #. Generating AsgardpyAnalysis based on several Analysis Step components - :class:`~asgardpy.analysis` 12 | 13 | Analysis Steps 14 | -------------- 15 | 16 | The configuration-based pipeline separates the Gammapy-based High-Level Analysis into serialized intermediate steps. 17 | Check :class:`~asgardpy.analysis.analysis_step_base` for more details. 18 | The steps are: 19 | 20 | #. datasets-3d :class:`~asgardpy.data.dataset_3d.Datasets3DAnalysisStep` 21 | 22 | #. datasets-1d :class:`~asgardpy.data.dataset_1d.Datasets1DAnalysisStep` 23 | 24 | #. fit :class:`~asgardpy.data.dl4.FitAnalysisStep` 25 | 26 | #. flux-points :class:`~asgardpy.data.dl4.FluxPointsAnalysisStep` 27 | 28 | The main purpose of this pipeline is accomplished for - 29 | 30 | #. Reducing DL3 data from multiple gamma-ray astronomical instruments to a joint DL4 dataset. 31 | 32 | #. update the joint DL4 data with appropriate Gammapy Models object. 33 | 34 | #. perform DL4 to DL5 (SED only) after performing joint-likelihood fitting. 35 | 36 | 37 | .. image:: ./_static/asgardpy_workflow.png 38 | :width: 600px 39 | :align: center 40 | 41 | .. _dataset-intro: 42 | 43 | DL3 Data component 44 | ------------------ 45 | 46 | The "DL3 level" data files for any instrument is read by providing the path location and a search glob pattern in the Config file. These are read 47 | by the :class:`~asgardpy.io.io.DL3Files`. 48 | 49 | The main modules dealing with the 2 types of data being read are - 50 | 51 | #. 3D Dataset :class:`~asgardpy.data.dataset_3d` 52 | 53 | #. 1D Dataset :class:`~asgardpy.data.dataset_1d` 54 | 55 | They each build their Config components using classes defined with, 56 | 57 | #. a base in :class:`~asgardpy.base.base`, 58 | 59 | #. from distinct modules - 60 | 61 | #. Base Geometry :class:`~asgardpy.base.geom` 62 | 63 | #. Dataset Reduction :class:`~asgardpy.base.reduction` 64 | 65 | #. and from their own respective modules 66 | 67 | The processing of Dataset creation is performed by :class:`~asgardpy.data.dataset_3d.Dataset3DGeneration` and :class:`~asgardpy.data.dataset_1d.Dataset1DGeneration` 68 | 69 | For the DL3 files with energy-dependent directional (RAD_MAX) cuts, the ON region is defined by a ``PointSkyRegion`` whereas for files with global cuts, the ON region is defined by a ``CircleSkyRegion``. 70 | The usage of either of these can be generalized by providing the source sky position with a value of radius as 0 deg (by default) or a non-zero angular radius, respectively. 71 | 72 | Following `Gammapy v1.3 `_ we have the usage of parallel processing for DL4 Dataset creation, Flux Points Estimation among others. 73 | For the first two processes, here we have the parameters of ``n_jobs`` and ``parallel_backend`` defined in :class:`~asgardpy.config.generator.GeneralConfig` as can be seen in :class:`~asgardpy.config.generator.AsgardpyConfig`. 74 | 75 | .. _models-intro: 76 | 77 | Models 78 | ------ 79 | 80 | The :doc:`_api_docs/data/target/data_target_b` contains various classes for various Models objects and :doc:`_api_docs/data/target/data_target_f` contains various functions for handling them. 81 | 82 | 83 | The information regarding the model to be used for the target source is given by :class:`~asgardpy.data.target.Target` and the various input options are - 84 | 85 | #. Include the model information in :class:`~asgardpy.data.target.Target.components` 86 | 87 | #. Include the path for a separate model file in :class:`~asgardpy.data.target.Target.models_file` 88 | 89 | #. Use :class:`~asgardpy.data.target.Target.from_3d` ``= True``, if the model is included in the list of Models provided with the 3D Dataset 90 | 91 | 92 | While combining DL4 datasets from multiple instruments, the positions of the target source, included within these data, may not be exactly the same. 93 | This will cause computation issue for the binned analysis performed with Gammapy. To resolve this issue, use :class:`~asgardpy.data.target.Target.use_uniform_position` ``= True``. 94 | 95 | 96 | The :class:`~asgardpy.data.target.apply_selection_mask_to_models` function is used to apply various selections on the given list of models. 97 | 98 | 99 | High-level Analysis 100 | ------------------- 101 | 102 | The various Config components and Analysis steps for the high-level analysis can be found in :class:`~asgardpy.data.dl4`. 103 | 104 | For the analysis step of flux-points :class:`~asgardpy.data.dl4.FluxPointsAnalysisStep`, the flux points are estimated for each instrument dataset, using the respective energy ranges 105 | provided in the respective config section of ``spectral_energy_range``. 106 | 107 | For deriving the correct EBL-deabsorbed spectrum, one can use :class:`~asgardpy.analysis.analysis.AsgardpyAnalysis.get_correct_intrinsic_model` and 108 | :class:`~asgardpy.analysis.analysis.AsgardpyAnalysis.get_correct_ebl_deabs_flux_points` after running the flux-points analysis step. 109 | 110 | .. _stats-intro: 111 | 112 | Statistics 113 | ---------- 114 | 115 | The :class:`~asgardpy.stats` contains various functions to perform some statistics with the fitted DL4 datasets. 116 | One can perform tests on the preference of the assumed spectral model of the target source, by using either :class:`~asgardpy.stats.stats.check_model_preference_lrt` or :class:`~asgardpy.stats.stats.check_model_preference_aic`. 117 | -------------------------------------------------------------------------------- /environment.yml: -------------------------------------------------------------------------------- 1 | # Conda environment for asgardpy 2 | # 3 | # Install: conda env create -f environment.yml 4 | # Update: conda env update -f environment.yml 5 | # Activate: conda activate asgardpy 6 | # Deactivate: conda deactivate 7 | 8 | name: asgardpy 9 | 10 | channels: 11 | - conda-forge 12 | 13 | variables: 14 | PYTHONNOUSERSITE: "1" 15 | 16 | dependencies: 17 | # core dependencies 18 | - python=3.12 19 | - pip 20 | - astropy>=7.0,<8.0 21 | - numpy>2.0 22 | - gammapy~=1.3 23 | - pydantic>=2.5 24 | - ruamel.yaml 25 | - regions>=0.5 26 | - matplotlib>=3.4,<3.10 27 | - scipy>=1.12 28 | - iminuit>=2.8.0 29 | - towncrier>=24.7 30 | - xmltodict 31 | # test dependencies 32 | - pytest>8.0 33 | - pytest-cov 34 | - tqdm 35 | - pre-commit 36 | - tox 37 | # extra dependencies 38 | - ipython 39 | - jupyter 40 | - jupyterlab 41 | # dev dependencies 42 | - ruff 43 | - black 44 | - codespell 45 | - mypy 46 | - types-PyYAML 47 | - isort 48 | - nbsphinx 49 | - numdifftools 50 | - pandoc 51 | - pydocstyle 52 | - pylint 53 | - setuptools_scm 54 | - sphinx<9 55 | - furo 56 | - myst-parser 57 | - sphinx-copybutton>=0.5.0 58 | - sphinx-autobuild>=2021.3.14 59 | - sphinx-autodoc-typehints 60 | - sphinx_changelog 61 | - sphinxcontrib-towncrier 62 | - packaging 63 | - pip: 64 | - pytest-sphinx 65 | - autodoc_pydantic>=2.1 66 | - asgardpy 67 | -------------------------------------------------------------------------------- /environment_0.4.4.yml: -------------------------------------------------------------------------------- 1 | # Conda environment for asgardpy v0.4.4 2 | # 3 | # Install: conda env create -f environment.yml 4 | # Update: conda env update -f environment.yml 5 | # Activate: conda activate asgardpy_0.4.4 6 | # Deactivate: conda deactivate 7 | 8 | name: asgardpy_0.4.4 9 | 10 | channels: 11 | - conda-forge 12 | 13 | variables: 14 | PYTHONNOUSERSITE: "1" 15 | 16 | dependencies: 17 | # core dependencies 18 | - python=3.11 19 | - pip 20 | - astropy>=5.1,<6.0 21 | - numpy<2.0 22 | - gammapy~=1.1 23 | - pydantic<2 24 | - ruamel.yaml 25 | - regions>=0.5 26 | - matplotlib>=3.4 27 | - scipy~=1.11.4 28 | - iminuit>=2.8.0 29 | - towncrier<24.7 30 | - xmltodict 31 | # test dependencies 32 | - pytest>7.0 33 | - pytest-cov 34 | - tqdm 35 | - pre-commit 36 | - tox 37 | # extra dependencies 38 | - ipython 39 | - jupyter 40 | - jupyterlab 41 | # dev dependencies 42 | - ruff 43 | - black>=22.10 44 | - codespell 45 | - mypy>=1.2 46 | - isort>=5.10 47 | - nbsphinx 48 | - numdifftools 49 | - pandoc 50 | - pydocstyle 51 | - pylint 52 | - setuptools_scm 53 | - sphinx 54 | - furo>=2022.12.7 55 | - myst-parser>=2.0 56 | - sphinx-copybutton>=0.5.0 57 | - sphinx-autobuild>=2021.3.14 58 | - sphinx-autodoc-typehints 59 | - sphinxcontrib-towncrier 60 | - packaging 61 | - pip: 62 | - pytest-sphinx 63 | - autodoc_pydantic<2 64 | - asgardpy==0.4.4 65 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | # For more information on the content of this file, 2 | # the developer documentation and the PyPA packaging documentation 3 | # * https://packaging.python.org/en/latest/specifications/declaring-project-metadata/#declaring-project-metadata 4 | # * The setuptools and setuptools_scm documentation for the respective sections 5 | [build-system] 6 | requires = ["setuptools>=60", "setuptools-scm[toml]>=8.0"] 7 | build-backend = "setuptools.build_meta" 8 | 9 | [project] 10 | # See https://setuptools.pypa.io/en/latest/userguide/quickstart.html for more project configuration options. 11 | name = "asgardpy" 12 | 13 | description = "Gammapy-based pipeline for easy joint analysis of different gamma-ray datasets" 14 | 15 | # needed for setuptools_scm, we don't define a static version 16 | dynamic = ["version"] 17 | 18 | readme = "README.md" 19 | authors = [ 20 | {name = "Chaitanya Priyadarshi", email = "chaitanya.p.astrphys@gmail.com"}, 21 | {name = "Mireia Nievas Rosillo", email = "mnievas.work@gmail.com"}, 22 | ] 23 | maintainers = [ 24 | {name = "Chaitanya Priyadarshi", email = "chaitanya.p.astrphys@gmail.com"}, 25 | ] 26 | license = {text = "Apache-2.0"} 27 | # license_files = "LICENSE" For when PEP 639 is approved 28 | 29 | keywords = [ 30 | "Astronomy", 31 | "Gamma-rays", 32 | "Data analysis", 33 | ] 34 | classifiers = [ 35 | "Intended Audience :: Science/Research", 36 | "Development Status :: 3 - Alpha", 37 | "License :: OSI Approved :: Apache Software License", 38 | "Programming Language :: Python :: 3.11", 39 | "Programming Language :: Python :: 3.12", 40 | "Programming Language :: Python :: 3.13", 41 | "Topic :: Scientific/Engineering :: Astronomy", 42 | ] 43 | 44 | requires-python = ">=3.11" 45 | dependencies = [ 46 | "numpy>2.0", 47 | "scipy>=1.12", 48 | "astropy>=7.0,<8.0", 49 | "jupyter", 50 | "gammapy~=1.3", 51 | "regions>0.5", 52 | "matplotlib>=3.4,<3.10", 53 | "seaborn", 54 | "iminuit>=2.8.0", 55 | "ruamel.yaml", 56 | "pre-commit", 57 | "xmltodict", 58 | "tox", 59 | "tqdm", 60 | "pydantic>=2.5", 61 | "towncrier>=24.7", 62 | ] 63 | 64 | [project.urls] 65 | Homepage = "https://github.com/chaimain/asgardpy" 66 | Repository = "https://github.com/chaimain/asgardpy" 67 | Documentation = "https://asgardpy.readthedocs.io/en/latest/" 68 | 69 | [project.optional-dependencies] 70 | # Have more references like doc, test, etc? 71 | dev = [ 72 | "ruff>=0.9.3", 73 | "codespell>=2.4.0", 74 | "mypy>=1.14", 75 | "isort>=5.13", 76 | "types-PyYAML", 77 | "pytest>8.0", 78 | "pytest-sphinx", 79 | "pytest-cov", 80 | "build", 81 | "setuptools_scm", 82 | "Sphinx>=8.0", 83 | "furo>=2022.12.7", 84 | "myst-parser>=2.0", 85 | "sphinx-copybutton>=0.5.0", 86 | "sphinx-autobuild>=2021.3.14", 87 | "sphinx-autodoc-typehints", 88 | "sphinxcontrib-towncrier", 89 | "sphinx_changelog", 90 | "autodoc_pydantic>=2.1", 91 | "packaging", 92 | ] 93 | 94 | [tool.setuptools.packages.find] 95 | where = ["src"] 96 | exclude = [ 97 | "asgardpy._dev_version", 98 | "tests", 99 | "docs", 100 | "scripts", 101 | ] 102 | 103 | [tool.setuptools] 104 | include-package-data = true 105 | 106 | [tool.setuptools.package-data] 107 | asgardpy = ["py.typed"] 108 | 109 | [tool.setuptools_scm] 110 | write_to = 'src/asgardpy/_version.py' 111 | 112 | [tool.towncrier] 113 | package = "asgardpy" 114 | directory = "docs/changes" 115 | filename = "CHANGES.rst" 116 | template = "docs/changes/template.rst" 117 | # let towncrier create proper links to the merged PR 118 | issue_format = "`#{issue} `__" 119 | 120 | [tool.towncrier.fragment.feature] 121 | name = "New Features" 122 | showcontent = true 123 | 124 | [tool.towncrier.fragment.bugfix] 125 | name = "Bug Fixes" 126 | showcontent = true 127 | 128 | [tool.towncrier.fragment.api] 129 | name = "API Changes" 130 | showcontent = true 131 | 132 | [tool.towncrier.fragment.maintenance] 133 | name = "Maintenance" 134 | showcontent = true 135 | 136 | [[tool.towncrier.section]] 137 | name = "" 138 | path = "" 139 | 140 | 141 | [tool.ruff] 142 | # src = ["src"] # Not needed for v0.6+ 143 | line-length = 115 144 | # whitespace before : E203 145 | exclude = [ 146 | ".venv", 147 | ".git", 148 | "__pycache__", 149 | "docs/build", 150 | "dist", 151 | ".mypy_cache", 152 | "src/asgardpy/version.py", 153 | "src/asgardpy/_version.py" 154 | ] 155 | 156 | [tool.ruff.lint] 157 | ignore = ["E203"] 158 | extend-select = [ 159 | "B", # flake8-bugbear 160 | "I", # isort 161 | "UP", # pyupgrade 162 | ] 163 | 164 | [tool.ruff.lint.per-file-ignores] 165 | "__init__.py" = ["F401", "I001"] 166 | "*/**/**/__init__.py" = ["F401", "E501", "I001"] 167 | 168 | [tool.mypy] 169 | ignore_missing_imports = true 170 | no_site_packages = true 171 | strict = false 172 | enable_error_code = ["ignore-without-code", "redundant-expr", "truthy-bool"] 173 | warn_unreachable = true 174 | 175 | [[tool.mypy.overrides]] 176 | module = [ 177 | "tests.*", 178 | "yaml", 179 | ] 180 | strict_optional = false 181 | ignore_missing_imports = true 182 | 183 | [tool.codespell] 184 | skip = '*.ipynb,*.map,*.css,*.js' 185 | ignore-words = "dev/codespell_ignore_words.txt" 186 | count = '' 187 | quiet-level = 3 188 | 189 | [tool.isort] 190 | profile = "black" 191 | multi_line_output = 3 192 | 193 | [tool.pytest.ini_options] 194 | minversion = "8.0" 195 | addopts = ["-ra", "--showlocals", "--strict-markers", "--strict-config"] 196 | xfail_strict = true 197 | testpaths = "tests/" 198 | python_classes = [ 199 | "Test*", 200 | "*Test" 201 | ] 202 | markers = ["test_data"] 203 | filterwarnings = [ 204 | "error::astropy.utils.exceptions.AstropyDeprecationWarning", 205 | "error::gammapy.utils.deprecation.GammapyDeprecationWarning", 206 | ] 207 | log_format = "%(asctime)s - %(levelname)s - %(name)s - %(message)s" 208 | log_level = "INFO" 209 | log_cli_level = "INFO" 210 | -------------------------------------------------------------------------------- /scripts/check_preferred_model.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | from pathlib import Path 4 | 5 | import numpy as np 6 | 7 | from asgardpy.analysis import AsgardpyAnalysis 8 | from asgardpy.config import AsgardpyConfig, write_asgardpy_model_to_file 9 | from asgardpy.stats import ( 10 | check_model_preference_aic, 11 | copy_target_config, 12 | fetch_all_analysis_fit_info, 13 | get_model_config_files, 14 | tabulate_best_fit_stats, 15 | ) 16 | 17 | log = logging.getLogger(__name__) 18 | 19 | parser = argparse.ArgumentParser(description="Get preferred best-fit spectral model") 20 | 21 | parser.add_argument( 22 | "--config", 23 | "-c", 24 | help="Path to the config file", 25 | ) 26 | 27 | parser.add_argument("--ebl-scale-factor", "-e", help="Value of EBL Norm Scale Factor", default=1.0, type=float) 28 | 29 | parser.add_argument( 30 | "--ebl-model-name", 31 | "-m", 32 | help="Name of EBL model as used by Gammapy", 33 | default="dominguez", 34 | type=str, 35 | ) 36 | 37 | parser.add_argument( 38 | "--write-config", 39 | help="Boolean to write the best-fit model into a separate file.", 40 | default=True, 41 | type=bool, 42 | ) 43 | 44 | 45 | def fetch_all_analysis_objects(main_config, spec_model_temp_files, ebl_scale_factor, ebl_model_name): 46 | """For a list of spectral models, initiate AsgardpyAnalysis objects.""" 47 | main_analysis_list = {} 48 | spec_models_list = [] 49 | 50 | for temp in spec_model_temp_files: 51 | temp_model = AsgardpyAnalysis(main_config) 52 | temp_model.config.target.models_file = temp 53 | 54 | temp_model_2 = AsgardpyAnalysis(temp_model.config) 55 | 56 | copy_target_config(temp_model, temp_model_2) 57 | 58 | if ebl_scale_factor != 1.0: 59 | temp_model_2.config.target.components[0].spectral.ebl_abs.alpha_norm = ebl_scale_factor 60 | 61 | if ebl_model_name != "dominguez": 62 | temp_model_2.config.target.components[0].spectral.ebl_abs.reference = ebl_model_name.replace("_", "-") 63 | else: 64 | temp_model_2.config.target.components[ 65 | 0 66 | ].spectral.ebl_abs.reference = temp_model.config.target.components[0].spectral.ebl_abs.reference 67 | 68 | spec_tag = temp.name.split(".")[0].split("_")[-1] 69 | spec_models_list.append(spec_tag) 70 | main_analysis_list[spec_tag] = {} 71 | 72 | main_analysis_list[spec_tag]["Analysis"] = temp_model_2 73 | 74 | spec_models_list = np.array(spec_models_list) 75 | 76 | return main_analysis_list, spec_models_list 77 | 78 | 79 | def get_best_preferred_model_lrt(best_sp_idx_lrt, pref_over_pl_chi2_list, spec_models_list, PL_idx, log): 80 | """ 81 | From a list of a given spectral model's preference over PL model as per LRT, 82 | get the index of the best spectral model and write appropriate logs. 83 | """ 84 | for idx in best_sp_idx_lrt: 85 | if pref_over_pl_chi2_list[idx] > 5: 86 | sp_idx_lrt = idx 87 | log.info("Best preferred spectral model over PL is %s", spec_models_list[idx]) 88 | else: 89 | sp_idx_lrt = PL_idx 90 | log.info("No other model preferred over PL") 91 | return sp_idx_lrt, log 92 | 93 | 94 | def get_best_preferred_model_aic(best_sp_idx_aic, list_rel_p, spec_models_list, fit_success_list, PL_idx, log): 95 | """ 96 | From a list of a given spectral model's relative p-value from a list of 97 | spectral models, as per AIC, get the index of the best spectral model and 98 | write appropriate logs. 99 | """ 100 | for idx in best_sp_idx_aic: 101 | if list_rel_p[idx] > 0.95: 102 | sp_idx_aic = idx 103 | log.info("Best preferred spectral model is %s", spec_models_list[fit_success_list][idx]) 104 | else: 105 | sp_idx_aic = PL_idx 106 | log.info("No other model preferred, hence PL is selected") 107 | 108 | return sp_idx_aic, log 109 | 110 | 111 | def main(): 112 | args = parser.parse_args() 113 | 114 | main_config = AsgardpyConfig.read(args.config) 115 | config_path = Path(args.config) 116 | config_path_file_name = config_path.name.split(".")[0] 117 | target_source_name = main_config.target.source_name 118 | 119 | steps_list = [] 120 | for s in main_config.general.steps: 121 | if s != "flux-points": 122 | steps_list.append(s) 123 | log.info("Target source is: %s", target_source_name) 124 | 125 | spec_model_temp_files = get_model_config_files(["lp", "bpl", "ecpl", "pl", "eclp", "sbpl"]) 126 | 127 | main_analysis_list, spec_models_list = fetch_all_analysis_objects( 128 | main_config, spec_model_temp_files, args.ebl_scale_factor, args.ebl_model_name 129 | ) 130 | 131 | # Run Analysis Steps till Fit 132 | PL_idx = 0 133 | 134 | for i, tag in enumerate(spec_models_list): 135 | log.info("Spectral model being tested: %s", tag) 136 | 137 | main_analysis_list[tag]["Analysis"].run(steps_list) 138 | 139 | if tag == "pl": 140 | PL_idx = i 141 | 142 | fit_success_list, stat_list, dof_list, pref_over_pl_chi2_list = fetch_all_analysis_fit_info( 143 | main_analysis_list, spec_models_list 144 | ) 145 | 146 | # If any spectral model has at least 5 sigmas preference over PL 147 | best_sp_idx_lrt = np.nonzero(pref_over_pl_chi2_list == np.nanmax(pref_over_pl_chi2_list))[0] 148 | sp_idx_lrt, log = get_best_preferred_model_lrt( 149 | best_sp_idx_lrt, 150 | pref_over_pl_chi2_list, 151 | spec_models_list, 152 | PL_idx, 153 | log, 154 | ) 155 | 156 | list_rel_p = check_model_preference_aic(stat_list, dof_list) 157 | 158 | best_sp_idx_aic = np.nonzero(list_rel_p == np.nanmax(list_rel_p))[0] 159 | 160 | sp_idx_aic, log = get_best_preferred_model_aic( 161 | best_sp_idx_aic, 162 | list_rel_p, 163 | spec_models_list, 164 | fit_success_list, 165 | PL_idx, 166 | log, 167 | ) 168 | 169 | stats_table = tabulate_best_fit_stats(spec_models_list, fit_success_list, main_analysis_list, list_rel_p) 170 | 171 | stats_table.meta["Target source name"] = target_source_name 172 | stats_table.meta["EBL model"] = args.ebl_model_name 173 | stats_table.meta["EBL scale factor"] = args.ebl_scale_factor 174 | 175 | file_name = f"{config_path_file_name}_{args.ebl_model_name}_{args.ebl_scale_factor}_fit_stats.ecsv" 176 | stats_table.write( 177 | main_config.general.outdir / file_name, 178 | format="ascii.ecsv", 179 | overwrite=True, 180 | ) 181 | 182 | if args.write_config: 183 | log.info("Write the spectral model") 184 | 185 | for idx, name in zip([sp_idx_lrt, sp_idx_aic], ["lrt", "aic"], strict=False): 186 | tag = spec_models_list[fit_success_list][idx] 187 | 188 | path = config_path.parent / f"{config_path_file_name}_model_most_pref_{name}.yaml" 189 | 190 | write_asgardpy_model_to_file( 191 | gammapy_model=main_analysis_list[tag]["Analysis"].final_model[0], output_file=path 192 | ) 193 | 194 | 195 | if __name__ == "__main__": 196 | main() 197 | -------------------------------------------------------------------------------- /scripts/download_asgardpy_data.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | 3 | set -eu 4 | 5 | GAMMAPY_DATA=${GAMMAPY_DATA:-""} 6 | 7 | # Used "zip -r9 fermipy_crab_zipped.zip ./" to zip the files 8 | OUT_ZIP="dev/fermipy_crab_zipped.zip" 9 | 10 | echo $GAMMAPY_DATA" is the path to Gammapy datasets" 11 | 12 | OUT_DIR=$GAMMAPY_DATA"fermipy-crab/" 13 | 14 | mkdir -p $OUT_DIR 15 | 16 | unzip \ 17 | -u \ 18 | ${OUT_ZIP} \ 19 | -d $OUT_DIR 20 | 21 | # Extra CTA-LST1 Crab Nebula data, from https://zenodo.org/records/11445184 22 | OUT_ZIP="dev/lst1_crab_dl4.zip" 23 | 24 | OUT_DIR=$GAMMAPY_DATA"cta-lst1/" 25 | 26 | mkdir -p $OUT_DIR 27 | 28 | unzip \ 29 | -u \ 30 | ${OUT_ZIP} \ 31 | -d $OUT_DIR 32 | -------------------------------------------------------------------------------- /scripts/run_asgardpy_full.py: -------------------------------------------------------------------------------- 1 | import argparse 2 | import logging 3 | 4 | from asgardpy.analysis import AsgardpyAnalysis 5 | from asgardpy.config import AsgardpyConfig 6 | 7 | log = logging.getLogger(__name__) 8 | 9 | parser = argparse.ArgumentParser(description="Run Asgardpy") 10 | 11 | parser.add_argument( 12 | "--config", 13 | "-c", 14 | help="Path to the Config file", 15 | ) 16 | 17 | 18 | def main(): 19 | args = parser.parse_args() 20 | base_config = AsgardpyConfig() 21 | 22 | main_config = base_config.read(args.config) 23 | log.info(f"Analysis steps mentioned in the config file: {main_config.general.steps}") 24 | log.info(f"Target source is: {main_config.target.source_name}") 25 | 26 | analysis = AsgardpyAnalysis(main_config) 27 | analysis.log = log 28 | 29 | analysis.run() 30 | 31 | 32 | if __name__ == "__main__": 33 | main() 34 | -------------------------------------------------------------------------------- /src/asgardpy/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | asgardpy - Gammapy-based pipeline for easy joint analysis of different gamma-ray datasets 3 | 4 | Licensed under `Apache 2.0 `_. 5 | See `License `_. 6 | """ 7 | 8 | from . import analysis, base, config, data, gammapy, io, stats 9 | from .version import __public_version__, __version__ 10 | 11 | __all__ = [ 12 | "analysis", 13 | "base", 14 | "config", 15 | "data", 16 | "gammapy", 17 | "io", 18 | "stats", 19 | "__public_version__", 20 | "__version__", 21 | ] 22 | -------------------------------------------------------------------------------- /src/asgardpy/_dev_version/__init__.py: -------------------------------------------------------------------------------- 1 | # Try to use setuptools_scm to get the current version; this is only used 2 | # in development installations from the git repository. 3 | # see ctapipe/version.py for details 4 | try: 5 | from setuptools_scm import get_version 6 | 7 | version = get_version(root="../..", relative_to=__file__) # pragma: no cover 8 | except Exception as e: # pragma: no cover 9 | raise ImportError(f"setuptools_scm broken or not installed: {e}") from e 10 | -------------------------------------------------------------------------------- /src/asgardpy/analysis/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Main Analysis Module 3 | 4 | isort:skip_file 5 | """ 6 | 7 | from asgardpy.analysis.analysis import AsgardpyAnalysis 8 | from asgardpy.analysis.step import AnalysisStep 9 | from asgardpy.analysis.step_base import AnalysisStepBase, AnalysisStepEnum 10 | 11 | __all__ = [ 12 | "AsgardpyAnalysis", 13 | "AnalysisStep", 14 | "AnalysisStepBase", 15 | "AnalysisStepEnum", 16 | ] 17 | -------------------------------------------------------------------------------- /src/asgardpy/analysis/step.py: -------------------------------------------------------------------------------- 1 | """ 2 | Main Class for the intermediate High-level Analysis Steps 3 | """ 4 | 5 | from gammapy.utils.registry import Registry 6 | 7 | from asgardpy.data import ( 8 | Datasets1DAnalysisStep, 9 | Datasets3DAnalysisStep, 10 | FitAnalysisStep, 11 | FluxPointsAnalysisStep, 12 | ) 13 | 14 | __all__ = ["AnalysisStep"] 15 | 16 | ANALYSIS_STEP_REGISTRY = Registry( 17 | [ 18 | Datasets1DAnalysisStep, 19 | Datasets3DAnalysisStep, 20 | FitAnalysisStep, 21 | FluxPointsAnalysisStep, 22 | ] 23 | ) 24 | 25 | 26 | class AnalysisStep: 27 | """ 28 | Base class for creating Asgardpy Analysis Steps. 29 | """ 30 | 31 | @staticmethod 32 | def create(tag, config, **kwargs): 33 | """ 34 | Create one of the Analysis Step class listed in the Registry. 35 | """ 36 | cls = ANALYSIS_STEP_REGISTRY.get_cls(tag) 37 | return cls(config, **kwargs) 38 | -------------------------------------------------------------------------------- /src/asgardpy/analysis/step_base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base Classes for creating the intermediate High-level Analysis Steps 3 | """ 4 | 5 | import abc 6 | import logging 7 | from enum import Enum 8 | 9 | __all__ = [ 10 | "AnalysisStepBase", 11 | "AnalysisStepEnum", 12 | ] 13 | 14 | 15 | class AnalysisStepBase(abc.ABC): 16 | """Config section for creating a basic AsgardpyAnalysis Step.""" 17 | 18 | tag = "analysis-step" 19 | 20 | def __init__(self, config, log=None, overwrite=True): 21 | self.config = config 22 | self.overwrite = overwrite 23 | 24 | self.datasets = None 25 | self.instrument_spectral_info = None 26 | 27 | if log is None: 28 | log = logging.getLogger(__name__) 29 | self.log = log 30 | 31 | def run(self, datasets=None, instrument_spectral_info=None): 32 | """ 33 | One can provide datasets and instrument_spectral_info to be used, 34 | especially for the High-level Analysis steps. 35 | """ 36 | self.datasets = datasets 37 | self.instrument_spectral_info = instrument_spectral_info 38 | 39 | final_product = self._run() 40 | self.log.info("Analysis Step %s completed", self.tag) 41 | 42 | return final_product 43 | 44 | 45 | class AnalysisStepEnum(str, Enum): 46 | """Config section for list of Analysis Steps.""" 47 | 48 | datasets_1d = "datasets-1d" 49 | datasets_3d = "datasets-3d" 50 | fit = "fit" 51 | flux_points = "flux-points" 52 | -------------------------------------------------------------------------------- /src/asgardpy/analysis/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/src/asgardpy/analysis/tests/__init__.py -------------------------------------------------------------------------------- /src/asgardpy/analysis/tests/test_analysis_steps.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from asgardpy.analysis import AsgardpyAnalysis 4 | 5 | 6 | def test_joint_3d_1d(base_config): 7 | """ 8 | Test to run a major Fermi (3D) + HESS (1D) + MAGIC (1D) joint analysis. 9 | """ 10 | 11 | analysis = AsgardpyAnalysis(base_config) 12 | 13 | extra_config = base_config.model_copy() 14 | extra_config.general.n_jobs = 33 15 | analysis.update_config(extra_config) 16 | 17 | analysis.run(["datasets-3d"]) 18 | analysis.run(["datasets-1d"]) 19 | analysis.run_fit() 20 | 21 | assert analysis.config.general.n_jobs == 33 22 | assert len(analysis.datasets) == 4 23 | assert len(analysis.models) == 11 24 | assert analysis.fit_result.success is True 25 | 26 | 27 | def test_analysis_basics(gammapy_data_path, base_config): 28 | """Testing some basic analysis functions.""" 29 | 30 | other_config_path_1 = f"{gammapy_data_path}fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml" 31 | 32 | base_config.target.models_file = other_config_path_1 33 | 34 | analysis_1 = AsgardpyAnalysis(base_config) 35 | 36 | with pytest.raises(RuntimeError): 37 | print(analysis_1.models) 38 | 39 | spec_model_name = analysis_1.config.target.components[0].spectral.type 40 | 41 | assert spec_model_name == "LogParabolaSpectralModel" 42 | 43 | other_config_path_2 = f"{gammapy_data_path}fermi-3fhl-crab/Fermi-LAT-3FHL_datasets.yaml" 44 | 45 | base_config.target.models_file = other_config_path_2 46 | 47 | with pytest.raises(TypeError): 48 | AsgardpyAnalysis(base_config) 49 | 50 | config_dict = {"general": {"n_jobs": 111}} 51 | analysis_1.config = config_dict 52 | 53 | assert analysis_1.config.general.n_jobs == 111 54 | 55 | wrong_config = [] 56 | with pytest.raises(TypeError): 57 | analysis_1.config(wrong_config) 58 | 59 | 60 | def test_ebl_deabsorbed(gammapy_data_path, ebl_hess_pks): 61 | """Testing generation of EBL-deabsorbed Flux points.""" 62 | from asgardpy.config.generator import write_asgardpy_model_to_file 63 | 64 | analysis = AsgardpyAnalysis(ebl_hess_pks) 65 | 66 | analysis.run() 67 | 68 | analysis.get_correct_ebl_deabs_flux_points() 69 | 70 | assert len(analysis.model_deabs.parameters) == 3 71 | assert analysis.flux_points_deabs 72 | 73 | write_asgardpy_model_to_file( 74 | gammapy_model=analysis.final_model, 75 | output_file=None, 76 | ) 77 | -------------------------------------------------------------------------------- /src/asgardpy/base/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Base Module 3 | 4 | isort:skip_file 5 | """ 6 | 7 | from asgardpy.base.base import ( 8 | AngleType, 9 | BaseConfig, 10 | EnergyRangeConfig, 11 | EnergyType, 12 | FrameEnum, 13 | PathType, 14 | TimeFormatEnum, 15 | TimeInterval, 16 | ) 17 | from asgardpy.base.reduction import ( 18 | ObservationsConfig, 19 | ReductionTypeEnum, 20 | RegionsConfig, 21 | RequiredHDUEnum, 22 | BackgroundConfig, 23 | BackgroundMethodEnum, 24 | ExclusionRegionsConfig, 25 | MapSelectionEnum, 26 | SafeMaskConfig, 27 | SafeMaskMethodsEnum, 28 | generate_dl4_dataset, 29 | get_bkg_maker, 30 | get_dataset_maker, 31 | get_dataset_reference, 32 | get_exclusion_region_mask, 33 | get_filtered_observations, 34 | get_safe_mask_maker, 35 | ) 36 | from asgardpy.base.geom import ( 37 | MapAxesConfig, 38 | MapFrameShapeConfig, 39 | ProjectionEnum, 40 | SelectionConfig, 41 | SkyPositionConfig, 42 | EnergyAxisConfig, 43 | EnergyEdgesCustomConfig, 44 | GeomConfig, 45 | WcsConfig, 46 | create_counts_map, 47 | generate_geom, 48 | get_energy_axis, 49 | get_source_position, 50 | ) 51 | 52 | __all__ = [ 53 | "AngleType", 54 | "BackgroundConfig", 55 | "BackgroundMethodEnum", 56 | "BaseConfig", 57 | "EnergyAxisConfig", 58 | "EnergyEdgesCustomConfig", 59 | "EnergyRangeConfig", 60 | "EnergyType", 61 | "ExclusionRegionsConfig", 62 | "FrameEnum", 63 | "GeomConfig", 64 | "MapAxesConfig", 65 | "MapFrameShapeConfig", 66 | "MapSelectionEnum", 67 | "ObservationsConfig", 68 | "PathType", 69 | "ProjectionEnum", 70 | "ReductionTypeEnum", 71 | "RegionsConfig", 72 | "RequiredHDUEnum", 73 | "SafeMaskConfig", 74 | "SafeMaskMethodsEnum", 75 | "SelectionConfig", 76 | "SkyPositionConfig", 77 | "TimeFormatEnum", 78 | "TimeInterval", 79 | "WcsConfig", 80 | "create_counts_map", 81 | "generate_dl4_dataset", 82 | "generate_geom", 83 | "get_bkg_maker", 84 | "get_dataset_maker", 85 | "get_dataset_reference", 86 | "get_energy_axis", 87 | "get_exclusion_region_mask", 88 | "get_filtered_observations", 89 | "get_safe_mask_maker", 90 | "get_source_position", 91 | ] 92 | -------------------------------------------------------------------------------- /src/asgardpy/base/base.py: -------------------------------------------------------------------------------- 1 | """ 2 | Classes containing the Base for the Analysis steps and some Basic Config types. 3 | """ 4 | 5 | import html 6 | from dataclasses import dataclass 7 | from enum import Enum 8 | from pathlib import Path 9 | from typing import Annotated 10 | 11 | from astropy import units as u 12 | from astropy.time import Time 13 | from pydantic import ( 14 | BaseModel, 15 | BeforeValidator, 16 | ConfigDict, 17 | GetCoreSchemaHandler, 18 | PlainSerializer, 19 | ) 20 | from pydantic_core import core_schema 21 | 22 | __all__ = [ 23 | "AngleType", 24 | "BaseConfig", 25 | "EnergyRangeConfig", 26 | "EnergyType", 27 | "FrameEnum", 28 | "PathType", 29 | "TimeFormatEnum", 30 | "TimeInterval", 31 | ] 32 | 33 | 34 | # Base Angle Type Quantity 35 | def validate_angle_type(v: str) -> u.Quantity: 36 | """Validation for Base Angle Type Quantity""" 37 | if isinstance(v, u.Quantity): 38 | v_ = v 39 | elif isinstance(v, str): 40 | v_ = u.Quantity(v) 41 | if v_.unit.physical_type != "angle": 42 | raise ValueError(f"Invalid unit for angle: {v_.unit!r}") 43 | else: 44 | return v_ 45 | 46 | 47 | AngleType = Annotated[ 48 | str | u.Quantity, 49 | BeforeValidator(validate_angle_type), 50 | PlainSerializer(lambda x: f"{x.value} {x.unit}", when_used="json-unless-none", return_type=str), 51 | ] 52 | 53 | 54 | # Base Energy Type Quantity 55 | def validate_energy_type(v: str) -> u.Quantity: 56 | """Validation for Base Energy Type Quantity""" 57 | if isinstance(v, u.Quantity): 58 | v_ = v 59 | elif isinstance(v, str): 60 | v_ = u.Quantity(v) 61 | if v_.unit.physical_type != "energy": 62 | raise ValueError(f"Invalid unit for energy: {v_.unit!r}") 63 | else: 64 | return v_ 65 | 66 | 67 | EnergyType = Annotated[ 68 | str | u.Quantity, 69 | BeforeValidator(validate_energy_type), 70 | PlainSerializer(lambda x: f"{x.value} {x.unit}", when_used="json-unless-none", return_type=str), 71 | ] 72 | 73 | 74 | # Base Path Type Quantity 75 | def validate_path_type(v: str) -> Path: 76 | """Validation for Base Path Type Quantity""" 77 | if v == "None": 78 | return Path(".") 79 | else: 80 | path_ = Path(v).resolve() 81 | # Only check if the file location or directory path exists 82 | if path_.is_file(): 83 | path_ = path_.parent 84 | 85 | if path_.exists(): 86 | return Path(v) 87 | else: 88 | raise ValueError(f"Path {v} does not exist") 89 | 90 | 91 | PathType = Annotated[ 92 | str | Path, 93 | BeforeValidator(validate_path_type), 94 | PlainSerializer(lambda x: Path(x), when_used="json-unless-none", return_type=Path), 95 | ] 96 | 97 | 98 | class FrameEnum(str, Enum): 99 | """Config section for list of frames on creating a SkyCoord object.""" 100 | 101 | icrs = "icrs" 102 | galactic = "galactic" 103 | 104 | 105 | class TimeFormatEnum(str, Enum): 106 | """Config section for list of formats for creating a Time object.""" 107 | 108 | datetime = "datetime" 109 | fits = "fits" 110 | iso = "iso" 111 | isot = "isot" 112 | jd = "jd" 113 | mjd = "mjd" 114 | unix = "unix" 115 | 116 | 117 | @dataclass 118 | class TimeInterval: 119 | """ 120 | Config section for getting main information for creating a Time Interval 121 | object. 122 | """ 123 | 124 | interval: dict[str, str | float] 125 | 126 | def build(self) -> dict: 127 | value_dict = {} 128 | value_dict["format"] = Time(self.interval["start"]).format 129 | 130 | value_dict["start"] = str(self.interval["start"]) 131 | 132 | value_dict["stop"] = str(self.interval["stop"]) 133 | 134 | return value_dict 135 | 136 | @classmethod 137 | def __get_pydantic_core_schema__( 138 | cls, source: type[dict], handler: GetCoreSchemaHandler 139 | ) -> core_schema.CoreSchema: 140 | assert source is TimeInterval 141 | return core_schema.no_info_after_validator_function( 142 | cls._validate, 143 | core_schema.dict_schema(keys_schema=core_schema.str_schema(), values_schema=core_schema.str_schema()), 144 | serialization=core_schema.plain_serializer_function_ser_schema( 145 | cls._serialize, 146 | info_arg=False, 147 | return_schema=core_schema.dict_schema( 148 | keys_schema=core_schema.str_schema(), values_schema=core_schema.str_schema() 149 | ), 150 | ), 151 | ) 152 | 153 | @staticmethod 154 | def _validate(value: dict) -> "TimeInterval": 155 | inv_dict: dict[str, str | float] = {} 156 | 157 | inv_dict["format"] = value["format"] 158 | 159 | # Read all values as string 160 | value["start"] = str(value["start"]) 161 | value["stop"] = str(value["stop"]) 162 | 163 | inv_dict["start"] = Time(value["start"], format=value["format"]) 164 | inv_dict["stop"] = Time(value["stop"], format=value["format"]) 165 | 166 | return TimeInterval(inv_dict) 167 | 168 | @staticmethod 169 | def _serialize(value: "TimeInterval") -> dict: 170 | return value.build() 171 | 172 | 173 | class BaseConfig(BaseModel): 174 | """ 175 | Base Config class for creating other Config sections with specific encoders. 176 | """ 177 | 178 | model_config = ConfigDict( 179 | arbitrary_types_allowed=True, 180 | validate_assignment=True, 181 | extra="forbid", 182 | validate_default=True, 183 | use_enum_values=True, 184 | ) 185 | 186 | def _repr_html_(self): # pragma: no cover 187 | try: 188 | return self.to_html() 189 | except AttributeError: 190 | return f"
{html.escape(str(self))}
" 191 | 192 | 193 | # Basic Quantity ranges Type for building the Config 194 | class EnergyRangeConfig(BaseConfig): 195 | """ 196 | Config section for getting a energy range information for creating an 197 | Energy type Quantity object. 198 | """ 199 | 200 | min: EnergyType = 1 * u.GeV 201 | max: EnergyType = 1 * u.TeV 202 | -------------------------------------------------------------------------------- /src/asgardpy/config/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Configuration Module 3 | """ 4 | 5 | from asgardpy.config.generator import ( 6 | AsgardpyConfig, 7 | GeneralConfig, 8 | gammapy_model_to_asgardpy_model_config, 9 | write_asgardpy_model_to_file, 10 | ) 11 | from asgardpy.config.operations import ( 12 | all_model_templates, 13 | compound_model_dict_converstion, 14 | deep_update, 15 | get_model_template, 16 | recursive_merge_dicts, 17 | ) 18 | 19 | __all__ = [ 20 | "all_model_templates", 21 | "compound_model_dict_converstion", 22 | "deep_update", 23 | "AsgardpyConfig", 24 | "GeneralConfig", 25 | "gammapy_model_to_asgardpy_model_config", 26 | "get_model_template", 27 | "recursive_merge_dicts", 28 | "write_asgardpy_model_to_file", 29 | ] 30 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_bpl.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: BrokenPowerLawSpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-06 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: ebreak 15 | value: 0.02 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: index1 22 | value: 2.0 23 | unit: '' 24 | error: 0.1 25 | min: 0.5 26 | max: 5.0 27 | frozen: false 28 | - name: index2 29 | value: 3.0 30 | unit: '' 31 | error: 0.01 32 | min: 0.5 33 | max: 5.0 34 | frozen: false 35 | 36 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_bpl2.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: BrokenPowerLaw2SpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-06 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: ebreak 15 | value: 0.02 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: index1 22 | value: 2.0 23 | unit: '' 24 | error: 0.1 25 | min: 0.5 26 | max: 5.0 27 | frozen: false 28 | - name: index_diff 29 | value: 1.0 30 | unit: '' 31 | error: 0.001 32 | min: 0.001 33 | max: 3.0 34 | frozen: false 35 | 36 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_eclp.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: ExpCutoffLogParabolaSpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-06 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: reference 15 | value: 0.0015 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: alpha_1 22 | value: 1.5 23 | unit: '' 24 | error: 0.1 25 | min: 0.5 26 | max: 5.0 27 | frozen: false 28 | - name: beta 29 | value: 0.1 30 | unit: '' 31 | error: 0.01 32 | min: 0.01 33 | max: 1.0 34 | frozen: false 35 | - name: alpha_2 36 | value: 1 37 | unit: '' 38 | error: 0.1 39 | min: 0.1 40 | max: 5.0 41 | frozen: true 42 | - name: lambda_ 43 | value: 0.05 44 | unit: TeV-1 45 | error: 0.01 46 | min: 1.0e-2 47 | max: 1.0e6 48 | frozen: false 49 | 50 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_ecpl-3fgl.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - spectral: 4 | type: ExpCutoffPowerLaw3FGLSpectralModel 5 | parameters: 6 | - {name: index, value: 1.5} 7 | - {name: amplitude, value: 1.0e-12, unit: TeV-1 s-1 cm-2} 8 | - {name: reference, unit: TeV} 9 | - {name: ecut, value: 10.0, unit: TeV} 10 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_ecpl.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: ExpCutoffPowerLawSpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-06 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: reference 15 | value: 0.0015 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: index 22 | value: 1.7 23 | unit: '' 24 | error: 0.1 25 | min: 0.1 26 | max: 5.0 27 | frozen: false 28 | - name: lambda_ 29 | value: 8 30 | unit: 'TeV-1' 31 | error: 0.01 32 | min: 1.0e-3 33 | max: 1.0e3 34 | frozen: false 35 | - name: alpha 36 | value: 1.0 37 | unit: '' 38 | error: 0.1 39 | min: 0.5 40 | max: 5.0 41 | frozen: true 42 | 43 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_ecpl2.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: ExpCutoffPowerLawSpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-06 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: reference 15 | value: 0.0015 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: index 22 | value: 1.7 23 | unit: '' 24 | error: 0.1 25 | min: 0.1 26 | max: 5.0 27 | frozen: false 28 | - name: lambda_ 29 | value: 8 30 | unit: 'TeV-1' 31 | error: 0.01 32 | min: 1.0e-3 33 | max: 1.0e3 34 | frozen: false 35 | - name: alpha 36 | value: 1.0 37 | unit: '' 38 | error: 0.1 39 | min: 0.5 40 | max: 5.0 41 | frozen: false 42 | 43 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_fov.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: FoVBackgroundModel 4 | spectral: 5 | type: PowerLawNormSpectralModel 6 | parameters: 7 | - name: norm 8 | value: 1.3 9 | unit: '' 10 | min: 0.0 11 | max: .nan 12 | frozen: false 13 | error: 0.075 14 | - name: tilt 15 | value: 0.0 16 | unit: '' 17 | min: .nan 18 | max: .nan 19 | frozen: true 20 | error: 0.0 21 | - name: reference 22 | value: 1.0 23 | unit: TeV 24 | min: .nan 25 | max: .nan 26 | frozen: true 27 | error: 0.0 28 | spatial: 29 | type: ConstantSpatialModel 30 | parameters: 31 | - value: 10 32 | unit: "sr-1" 33 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_lp.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: LogParabolaSpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-06 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: reference 15 | value: 0.0015 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: alpha 22 | value: 2 23 | unit: '' 24 | error: 0.1 25 | min: 0.5 26 | max: 5.0 27 | frozen: false 28 | - name: beta 29 | value: 0.1 30 | unit: '' 31 | error: 0.01 32 | min: 0.01 33 | max: 1.0 34 | frozen: false 35 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_pl.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: PowerLawSpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-06 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: reference 15 | value: 0.0015 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: index 22 | value: 2 23 | unit: '' 24 | error: 0.1 25 | min: 0.1 26 | max: 5.0 27 | frozen: false 28 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_pl_ebl.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - spectral: 4 | type: PowerLawSpectralModel 5 | parameters: 6 | - {name: index, value: 2.5549376930151024, error: 0.29823835157332484, 7 | min: 0.5, max: 5.0} 8 | - {name: amplitude, value: 1.2975798577708647e-11, unit: TeV-1 s-1 cm-2, 9 | error: 1.947125474966853e-12, min: 1.0e-13, max: 0.01} 10 | - {name: reference, unit: TeV, min: 0.0001, max: 100.0} 11 | ebl_abs: {filename: /home/chaitanya/software/gammapy-datasets/1.3/ebl/ebl_dominguez11.fits.gz, 12 | redshift: 0.11599999999999999} 13 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_sbpl.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: SmoothBrokenPowerLawSpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-06 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: reference 15 | value: 0.0015 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: ebreak 22 | value: 0.02 23 | unit: TeV 24 | error: 0.0 25 | min: 0.0001 26 | max: 100.0 27 | frozen: false 28 | - name: index1 29 | value: 2.0 30 | unit: '' 31 | error: 0.1 32 | min: 0.5 33 | max: 5.0 34 | frozen: false 35 | - name: index2 36 | value: 3.0 37 | unit: '' 38 | error: 0.01 39 | min: 0.5 40 | max: 5.0 41 | frozen: false 42 | - name: beta 43 | value: 0.01 44 | unit: '' 45 | error: 0.001 46 | min: 0.0 47 | max: 5 48 | frozen: false 49 | -------------------------------------------------------------------------------- /src/asgardpy/config/model_templates/model_template_secpl.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: SuperExpCutoffPowerLaw4FGLDR3SpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-08 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: reference 15 | value: 0.0015 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: index_1 22 | value: 1.5 23 | unit: '' 24 | error: 0.1 25 | min: 0.1 26 | max: 5.0 27 | frozen: false 28 | - name: index_2 29 | value: 2.0 30 | unit: '' 31 | error: 0.01 32 | min: 0.5 33 | max: 5.0 34 | frozen: false 35 | - name: expfactor 36 | value: 0.001 37 | unit: '' 38 | error: 0.001 39 | min: 1e-5 40 | max: 0.1 41 | frozen: false 42 | -------------------------------------------------------------------------------- /src/asgardpy/config/operations.py: -------------------------------------------------------------------------------- 1 | """ 2 | Main AsgardpyConfig Operations Module 3 | """ 4 | 5 | import logging 6 | from collections.abc import Mapping 7 | from pathlib import Path 8 | 9 | import numpy as np 10 | from gammapy.modeling.models import Models, SkyModel 11 | 12 | __all__ = [ 13 | "all_model_templates", 14 | "compound_model_dict_converstion", 15 | "get_model_template", 16 | "recursive_merge_dicts", 17 | "deep_update", 18 | ] 19 | 20 | CONFIG_PATH = Path(__file__).resolve().parent 21 | 22 | log = logging.getLogger(__name__) 23 | 24 | 25 | def all_model_templates(): 26 | """ 27 | Collect all Template Models provided in Asgardpy, and their small tag names. 28 | """ 29 | template_files = sorted(list(CONFIG_PATH.glob("model_templates/model_template*yaml"))) 30 | 31 | all_tags = [] 32 | for file in template_files: 33 | all_tags.append(file.name.split("_")[-1].split(".")[0]) 34 | all_tags = np.array(all_tags) 35 | 36 | return all_tags, template_files 37 | 38 | 39 | def get_model_template(spec_model_tag): 40 | """ 41 | Read a particular template model yaml filename to create an AsgardpyConfig 42 | object. 43 | """ 44 | all_tags, template_files = all_model_templates() 45 | new_model_file = None 46 | 47 | for file, tag in zip(template_files, all_tags, strict=True): 48 | if spec_model_tag == tag: 49 | new_model_file = file 50 | return new_model_file 51 | 52 | 53 | def check_gammapy_model(gammapy_model): 54 | """ 55 | For a given object type, try to read it as a Gammapy Models object. 56 | """ 57 | if isinstance(gammapy_model, Models | SkyModel): 58 | models_gpy = Models(gammapy_model) 59 | else: 60 | try: 61 | models_gpy = Models.read(gammapy_model) 62 | except KeyError: 63 | raise TypeError("%s File cannot be read by Gammapy Models", gammapy_model) from KeyError 64 | 65 | return models_gpy 66 | 67 | 68 | def recursive_merge_lists(final_config_key, extra_config_key, value): 69 | """ 70 | Recursively merge from lists of dicts. Distinct function as an auxiliary for 71 | the recursive_merge_dicts function. 72 | """ 73 | new_config = [] 74 | 75 | for key_, value_ in zip(final_config_key, value, strict=False): 76 | key_ = recursive_merge_dicts(key_ or {}, value_) 77 | new_config.append(key_) 78 | 79 | # For example moving from a smaller list of model parameters to a 80 | # longer list. 81 | if len(final_config_key) < len(extra_config_key): 82 | for value_ in value[len(final_config_key) :]: 83 | new_config.append(value_) 84 | return new_config 85 | 86 | 87 | def recursive_merge_dicts(base_config, extra_config): 88 | """ 89 | Recursively merge two dictionaries. 90 | Entries in extra_config override entries in base_config. The built-in 91 | update function cannot be used for hierarchical dicts. 92 | 93 | Also for the case when there is a list of dicts involved, one has to be 94 | more careful. The extra_config may have longer list of dicts as compared 95 | with the base_config, in which case, the extra items are simply added to 96 | the merged final list. 97 | 98 | Combined here are 2 options from SO. 99 | 100 | See: 101 | http://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth/3233356#3233356 102 | and also 103 | https://stackoverflow.com/questions/3232943/update-value-of-a-nested-dictionary-of-varying-depth/18394648#18394648 104 | 105 | Parameters 106 | ---------- 107 | base_config : dict 108 | dictionary to be merged 109 | extra_config : dict 110 | dictionary to be merged 111 | Returns 112 | ------- 113 | final_config : dict 114 | merged dict 115 | """ 116 | final_config = base_config.copy() 117 | 118 | for key, value in extra_config.items(): 119 | if key in final_config and isinstance(final_config[key], list): 120 | final_config[key] = recursive_merge_lists(final_config[key], extra_config[key], value) 121 | elif key in final_config and isinstance(final_config[key], dict): 122 | final_config[key] = recursive_merge_dicts(final_config.get(key) or {}, value) 123 | else: 124 | final_config[key] = value 125 | 126 | return final_config 127 | 128 | 129 | def deep_update(d, u): 130 | """ 131 | Recursively update a nested dictionary. 132 | 133 | Just like in Gammapy, taken from: https://stackoverflow.com/a/3233356/19802442 134 | """ 135 | for k, v in u.items(): 136 | if isinstance(v, Mapping): 137 | d[k] = deep_update(d.get(k, {}), v) 138 | else: 139 | d[k] = v 140 | return d 141 | 142 | 143 | def compound_model_dict_converstion(dict): 144 | """ 145 | Given a Gammapy CompoundSpectralModel as a dict object, convert it into 146 | an Asgardpy form. 147 | """ 148 | ebl_abs = dict["model2"] 149 | ebl_abs["alpha_norm"] = ebl_abs["parameters"][0]["value"] 150 | ebl_abs["redshift"] = ebl_abs["parameters"][1]["value"] 151 | ebl_abs.pop("parameters", None) 152 | 153 | dict["type"] = dict["model1"]["type"] 154 | dict["parameters"] = dict["model1"]["parameters"] 155 | dict["ebl_abs"] = ebl_abs 156 | 157 | dict.pop("model1", None) 158 | dict.pop("model2", None) 159 | dict.pop("operator", None) 160 | 161 | return dict 162 | -------------------------------------------------------------------------------- /src/asgardpy/config/template_model.yaml: -------------------------------------------------------------------------------- 1 | target: 2 | components: 3 | - type: SkyModel 4 | spectral: 5 | type: LogParabolaSpectralModel 6 | parameters: 7 | - name: amplitude 8 | value: 1.0e-05 9 | unit: cm-2 s-1 TeV-1 10 | error: 1.5e-06 11 | min: 1.0e-13 12 | max: 0.01 13 | frozen: false 14 | - name: reference 15 | value: 0.0015 16 | unit: TeV 17 | error: 0.0 18 | min: 0.0001 19 | max: 100.0 20 | frozen: true 21 | - name: alpha 22 | value: 2 23 | unit: '' 24 | error: 0.1 25 | min: 0.5 26 | max: 5.0 27 | frozen: false 28 | - name: beta 29 | value: 0.1 30 | unit: '' 31 | error: 0.01 32 | min: 0.01 33 | max: 1.0 34 | frozen: false 35 | -------------------------------------------------------------------------------- /src/asgardpy/config/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/src/asgardpy/config/tests/__init__.py -------------------------------------------------------------------------------- /src/asgardpy/config/tests/test_config.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from asgardpy.config import AsgardpyConfig 4 | 5 | 6 | def test_config_basic(): 7 | """Test on basic Config features.""" 8 | 9 | config = AsgardpyConfig() 10 | assert "AsgardpyConfig\n\n" in str(config) 11 | assert "AsgardpyConfig(general" in repr(config) 12 | 13 | config_str_0 = """ 14 | general: 15 | n_jobs: 100 16 | """ 17 | config_100 = AsgardpyConfig.from_yaml(config_str_0) 18 | assert config_100.general.n_jobs == 100 19 | 20 | with pytest.raises(ValueError): 21 | config_str_1 = """ 22 | fit_params: 23 | fit_range: 24 | min: 10 s 25 | max: 10 TeV 26 | """ 27 | AsgardpyConfig.from_yaml(config_str_1) 28 | 29 | with pytest.raises(ValueError): 30 | config_str_2 = """ 31 | general: 32 | outdir: ./bla/ 33 | """ 34 | AsgardpyConfig.from_yaml(config_str_2) 35 | 36 | with pytest.raises(ValueError): 37 | config_str_3 = """ 38 | target: 39 | sky_position: 40 | lon: 30 TeV 41 | """ 42 | AsgardpyConfig.from_yaml(config_str_3) 43 | 44 | 45 | def test_config_time(): 46 | """Test for reading Time inputs.""" 47 | from pydantic import ValidationError 48 | 49 | config = AsgardpyConfig() 50 | 51 | with pytest.raises(ValidationError): 52 | config.dataset1d.instruments[0].dataset_info.observation.obs_time = [ 53 | {"format": "abc", "start": "2000-01-01", "stop": "2001-01-01"} 54 | ] 55 | with pytest.raises(ValueError): 56 | config.dataset1d.instruments[0].dataset_info.observation.obs_time = [ 57 | {"format": "iso", "start": "60000", "stop": "2001-01-01"} 58 | ] 59 | with pytest.raises(ValueError): 60 | config.dataset1d.instruments[0].dataset_info.observation.obs_time = [ 61 | {"format": "iso", "start": "2001-01-01", "stop": "60000"} 62 | ] 63 | 64 | 65 | def test_get_model_template(): 66 | """Test for reading a model template by a given tag.""" 67 | 68 | from asgardpy.config.operations import get_model_template 69 | 70 | new_model = get_model_template("eclp") 71 | 72 | new_config = AsgardpyConfig.read(new_model) 73 | 74 | assert new_config.target.components[0].spectral.type == "ExpCutoffLogParabolaSpectralModel" 75 | 76 | with pytest.raises(IOError): 77 | new_config.write(new_model, overwrite=False) 78 | 79 | 80 | def test_create_config_from_dict(): 81 | """Test to create AsgardpyConfig from a simple dict.""" 82 | 83 | gen_dict = {"general": {"log": {"level": "warning"}}} 84 | config = AsgardpyConfig(**gen_dict) 85 | 86 | assert config.general.log.level == "warning" 87 | 88 | 89 | def test_config_update_gammapy(gammapy_data_path, base_config_1d): 90 | """Tests to update target model config from Gammapy-based YAML files.""" 91 | 92 | import os 93 | 94 | from asgardpy.config.generator import gammapy_model_to_asgardpy_model_config 95 | 96 | main_config = AsgardpyConfig() 97 | 98 | other_config_path = f"{gammapy_data_path}fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml" 99 | other_config_path_2 = f"{gammapy_data_path}estimators/pks2155_hess_lc/models.yaml" 100 | 101 | other_config_1 = gammapy_model_to_asgardpy_model_config( 102 | other_config_path, 103 | recursive_merge=False, 104 | ) 105 | other_config_2 = gammapy_model_to_asgardpy_model_config( 106 | other_config_path, 107 | base_config_1d, 108 | recursive_merge=False, 109 | ) 110 | 111 | main_config.write("test_base_config.yaml", overwrite=True) 112 | other_config_3 = gammapy_model_to_asgardpy_model_config( 113 | other_config_path_2, 114 | "test_base_config.yaml", 115 | recursive_merge=False, 116 | ) 117 | 118 | main_config_1 = main_config.update(other_config_1) 119 | main_config_2 = base_config_1d.update(other_config_2) 120 | main_config_3 = main_config.update(other_config_3) 121 | 122 | new_spectral_model_name = main_config_1.target.components[0].spectral.type 123 | new_spectral_model_name_2 = main_config_2.target.components[0].spectral.type 124 | new_spectral_model_name_3 = main_config_3.target.components[0].spectral.type 125 | 126 | index_max = main_config_3.target.components[0].spectral.parameters[0].max 127 | 128 | assert new_spectral_model_name == "LogParabolaSpectralModel" 129 | assert new_spectral_model_name_2 == "LogParabolaSpectralModel" 130 | assert new_spectral_model_name_3 == "PowerLawSpectralModel" 131 | assert index_max == 10.0 132 | 133 | os.remove("test_base_config.yaml") 134 | 135 | 136 | def test_config_update(): 137 | """Tests to update target model config from other AsgardpyConfig file.""" 138 | 139 | from asgardpy.config.operations import get_model_template 140 | 141 | main_config = AsgardpyConfig() 142 | 143 | spec_model_template_file_1 = get_model_template("bpl") 144 | spec_model_template_file_2 = get_model_template("sbpl") 145 | 146 | other_config_1 = AsgardpyConfig.read(spec_model_template_file_1) 147 | other_config_2 = AsgardpyConfig.read(spec_model_template_file_2) 148 | 149 | main_config = main_config.update(other_config_1) 150 | new_spectral_model_name_1 = main_config.target.components[0].spectral.type 151 | 152 | new_config_str = """ 153 | general: 154 | n_jobs: 100 155 | """ 156 | main_config_2 = main_config.update(new_config_str) 157 | 158 | main_config = main_config.update(other_config_2, merge_recursive=True) 159 | 160 | new_spectral_model_name_2 = main_config.target.components[0].spectral.type 161 | spectral_model_params = main_config.target.components[0].spectral.parameters 162 | 163 | assert new_spectral_model_name_1 == "BrokenPowerLawSpectralModel" 164 | assert main_config_2.general.n_jobs == 100 165 | with pytest.raises(TypeError): 166 | main_config.update(5) 167 | assert new_spectral_model_name_2 == "SmoothBrokenPowerLawSpectralModel" 168 | assert len(spectral_model_params) == 6 169 | 170 | 171 | def test_write_model_config(): 172 | """From a Gammapy Models object, write it as an AsgardpyConfig file.""" 173 | 174 | from gammapy.modeling.models import ( 175 | ExpCutoffPowerLaw3FGLSpectralModel, 176 | Models, 177 | SkyModel, 178 | ) 179 | 180 | from asgardpy.analysis import AsgardpyAnalysis 181 | from asgardpy.config.generator import AsgardpyConfig, write_asgardpy_model_to_file 182 | from asgardpy.config.operations import CONFIG_PATH, get_model_template 183 | 184 | config_ = AsgardpyConfig() 185 | analysis_ = AsgardpyAnalysis(config_) 186 | model_ = SkyModel(name="Template", spectral_model=ExpCutoffPowerLaw3FGLSpectralModel()) 187 | model_.spectral_model.index.value = 1.5 188 | 189 | analysis_.final_model = Models(model_) 190 | 191 | assert get_model_template("ecpl-3fgl") 192 | 193 | write_asgardpy_model_to_file( 194 | gammapy_model=model_, 195 | output_file=str(CONFIG_PATH) + "/model_templates/model_template_ecpl-3fgl.yaml", 196 | ) 197 | 198 | with pytest.raises(TypeError): 199 | write_asgardpy_model_to_file() 200 | 201 | write_asgardpy_model_to_file( 202 | gammapy_model=model_, 203 | output_file=None, 204 | ) 205 | -------------------------------------------------------------------------------- /src/asgardpy/conftest.py: -------------------------------------------------------------------------------- 1 | import os 2 | 3 | import pytest 4 | from gammapy.utils.check import check_tutorials_setup 5 | 6 | 7 | # add a marker for the tests that need private data and don't run them 8 | # by default 9 | def pytest_configure(config): 10 | if "test_data" not in config.option.markexpr: # pragma: no cover 11 | if config.option.markexpr: 12 | config.option.markexpr += " and " 13 | config.option.markexpr += "not test_data" 14 | 15 | 16 | # Check if gammapy-data is downloaded and if not, then download it. 17 | check_tutorials_setup(download_datasets_path="./gammapy-data") 18 | 19 | 20 | @pytest.fixture # (scope="session") 21 | def base_config_path(): 22 | """Get the base config path for basic tests.""" 23 | 24 | return "src/asgardpy/tests/config_test_base.yaml" 25 | 26 | 27 | @pytest.fixture # (scope="session") 28 | def mwl_config_path(): 29 | """Get the Gammapy MWL tutorial config path.""" 30 | 31 | return "src/asgardpy/tests/config_gpy_mwl.yaml" 32 | 33 | 34 | @pytest.fixture # (scope="session") 35 | def hess_magic_config_path(): 36 | """Get the config path for HESS (3D) + MAGIC (1D).""" 37 | 38 | return "src/asgardpy/tests/config_test_gadf.yaml" 39 | 40 | 41 | @pytest.fixture # (scope="session") 42 | def hawc_config_path(): 43 | """Get the config path for HAWC (3D).""" 44 | 45 | return "src/asgardpy/tests/config_hawc.yaml" 46 | 47 | 48 | @pytest.fixture # (scope="session") 49 | def ebl_deabs_path(): 50 | """Get the base config path for basic tests.""" 51 | 52 | return "src/asgardpy/tests/config_test_ebl.yaml" 53 | 54 | 55 | @pytest.fixture # (scope="session") 56 | def gammapy_data_path(): 57 | """Save a copy of path of gammapy-data for easy and general use.""" 58 | 59 | # Check first for the path used in CI test 60 | if os.path.exists("./gammapy-datasets/1.3/"): 61 | GAMMAPY_DATA = "./gammapy-datasets/1.3/" 62 | # Update the environ for builtin EBL models 63 | os.environ["GAMMAPY_DATA"] = GAMMAPY_DATA 64 | else: 65 | # Using the saved path in the environ for users 66 | GAMMAPY_DATA = os.environ.get("GAMMAPY_DATA", "not set") 67 | 68 | return GAMMAPY_DATA 69 | 70 | 71 | @pytest.fixture # (scope="session") 72 | def base_config(base_config_path, gammapy_data_path): 73 | """Define the base config for basic tests.""" 74 | 75 | from asgardpy.config import AsgardpyConfig 76 | 77 | config = AsgardpyConfig().read(base_config_path) 78 | 79 | # Update DL3 file paths 80 | config.dataset3d.instruments[0].input_dl3[0].input_dir = f"{gammapy_data_path}fermipy-crab/" 81 | 82 | config.dataset3d.instruments[0].input_dl3[1].input_dir = f"{gammapy_data_path}fermipy-crab/" 83 | 84 | config.dataset1d.instruments[0].input_dl3[0].input_dir = f"{gammapy_data_path}hess-dl3-dr1/" 85 | 86 | config.dataset1d.instruments[1].input_dl3[0].input_dir = f"{gammapy_data_path}magic/rad_max/data/" 87 | 88 | return config 89 | 90 | 91 | @pytest.fixture # (scope="session") 92 | def base_config_1d(base_config): 93 | """Define base config for only 1D analysis.""" 94 | 95 | base_config_1d = base_config 96 | base_config_1d.target.source_name = "Crab Nebula" 97 | 98 | # Update model parameters 99 | base_config_1d.target.components[0].spectral.parameters[0].value = 1.0e-9 100 | base_config_1d.target.components[0].spectral.parameters[1].value = 0.4 101 | base_config_1d.target.components[0].spectral.parameters[2].value = 2.0 102 | 103 | base_config_1d.fit_params.fit_range.min = "100 GeV" 104 | 105 | return base_config_1d 106 | 107 | 108 | @pytest.fixture # (scope="session") 109 | def gpy_mwl_config(mwl_config_path, gammapy_data_path): 110 | """Define the Gammapy MWL Tutorial config.""" 111 | 112 | from asgardpy.config import AsgardpyConfig, gammapy_model_to_asgardpy_model_config 113 | 114 | config = AsgardpyConfig().read(mwl_config_path) 115 | 116 | # Update DL4 file paths and models file path 117 | config.target.models_file = f"{gammapy_data_path}fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml" 118 | config.dataset3d.instruments[ 119 | 0 120 | ].dl4_dataset_info.dl4_dataset.input_dir = f"{gammapy_data_path}fermi-3fhl-crab/Fermi-LAT-3FHL_datasets.yaml" 121 | config.dataset1d.instruments[ 122 | 0 123 | ].dl4_dataset_info.dl4_dataset.input_dir = f"{gammapy_data_path}joint-crab/spectra/hess/" 124 | 125 | other_config = gammapy_model_to_asgardpy_model_config(config.target.models_file, recursive_merge=False) 126 | 127 | config = config.update(other_config) 128 | 129 | return config 130 | 131 | 132 | @pytest.fixture # (scope="session") 133 | def gpy_hess_magic(hess_magic_config_path, gammapy_data_path): 134 | """Define the config for HESS (3D) + MAGIC (1D).""" 135 | 136 | from asgardpy.config import AsgardpyConfig 137 | 138 | config = AsgardpyConfig().read(hess_magic_config_path) 139 | 140 | # Update DL3 file paths 141 | config.dataset3d.instruments[0].input_dl3[0].input_dir = f"{gammapy_data_path}hess-dl3-dr1/" 142 | 143 | config.dataset3d.instruments[ 144 | 0 145 | ].dataset_info.background.exclusion.exclusion_file = ( 146 | f"{gammapy_data_path}joint-crab/exclusion/exclusion_mask_crab.fits.gz" 147 | ) 148 | 149 | config.dataset1d.instruments[0].input_dl3[0].input_dir = f"{gammapy_data_path}magic/rad_max/data/" 150 | 151 | return config 152 | 153 | 154 | @pytest.fixture # (scope="session") 155 | def ebl_hess_pks(ebl_deabs_path, gammapy_data_path): 156 | """Define the config for HESS PKS 2155-304 data.""" 157 | 158 | from asgardpy.config import AsgardpyConfig 159 | 160 | config = AsgardpyConfig().read(ebl_deabs_path) 161 | 162 | # Update DL4 file path 163 | config.dataset1d.instruments[0].dl4_dataset_info.dl4_dataset.input_dir = f"{gammapy_data_path}PKS2155-steady/" 164 | 165 | return config 166 | 167 | 168 | @pytest.fixture # (scope="session") 169 | def hawc_dl3_config(hawc_config_path, gammapy_data_path): 170 | """Define the config for HAWC (3D).""" 171 | 172 | from asgardpy.config import AsgardpyConfig 173 | 174 | config = AsgardpyConfig().read(hawc_config_path) 175 | 176 | # Update DL3 file path 177 | config.dataset3d.instruments[0].input_dl3[0].input_dir = f"{gammapy_data_path}hawc/crab_events_pass4/" 178 | 179 | return config 180 | -------------------------------------------------------------------------------- /src/asgardpy/data/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Data Module 3 | 4 | isort:skip_file 5 | """ 6 | 7 | from asgardpy.data.dataset_1d import ( 8 | Dataset1DConfig, 9 | Dataset1DGeneration, 10 | Datasets1DAnalysisStep, 11 | ) 12 | from asgardpy.data.dataset_3d import ( 13 | Dataset3DConfig, 14 | Dataset3DGeneration, 15 | Datasets3DAnalysisStep, 16 | ) 17 | from asgardpy.data.dl4 import ( 18 | FitAnalysisStep, 19 | FitConfig, 20 | FluxPointsAnalysisStep, 21 | FluxPointsConfig, 22 | ) 23 | from asgardpy.data.target import ( 24 | BrokenPowerLaw2SpectralModel, 25 | ExpCutoffLogParabolaSpectralModel, 26 | Target, 27 | add_ebl_model_from_config, 28 | apply_selection_mask_to_models, 29 | config_to_dict, 30 | get_models_from_catalog, 31 | set_models, 32 | ) 33 | 34 | __all__ = [ 35 | "BrokenPowerLaw2SpectralModel", 36 | "Dataset1DConfig", 37 | "Dataset1DGeneration", 38 | "Dataset3DConfig", 39 | "Dataset3DGeneration", 40 | "Datasets1DAnalysisStep", 41 | "Datasets3DAnalysisStep", 42 | "ExpCutoffLogParabolaSpectralModel", 43 | "FitAnalysisStep", 44 | "FitConfig", 45 | "FluxPointsAnalysisStep", 46 | "FluxPointsConfig", 47 | "Target", 48 | "add_ebl_model_from_config", 49 | "apply_selection_mask_to_models", 50 | "config_to_dict", 51 | "get_models_from_catalog", 52 | "set_models", 53 | ] 54 | -------------------------------------------------------------------------------- /src/asgardpy/data/dl4.py: -------------------------------------------------------------------------------- 1 | """ 2 | Main classes to define High-level Analysis Config and the Analysis Steps. 3 | """ 4 | 5 | from enum import Enum 6 | 7 | from astropy import units as u 8 | from gammapy.datasets import Datasets, FluxPointsDataset 9 | from gammapy.estimators import FluxPointsEstimator 10 | from gammapy.modeling import Fit 11 | from gammapy.utils.metadata import CreatorMetaData 12 | 13 | from asgardpy.analysis.step_base import AnalysisStepBase 14 | from asgardpy.base import BaseConfig, EnergyRangeConfig 15 | from asgardpy.version import __public_version__ 16 | 17 | __all__ = [ 18 | "FitAnalysisStep", 19 | "FitConfig", 20 | "FluxPointsAnalysisStep", 21 | "FluxPointsConfig", 22 | ] 23 | 24 | 25 | # Defining various components of High-level Analysis Config 26 | class BackendEnum(str, Enum): 27 | """Config section for a list Fitting backend methods.""" 28 | 29 | minuit = "minuit" 30 | scipy = "scipy" 31 | 32 | 33 | class FitConfig(BaseConfig): 34 | """Config section for parameters to use for Fit function.""" 35 | 36 | fit_range: EnergyRangeConfig = EnergyRangeConfig() 37 | backend: BackendEnum = BackendEnum.minuit 38 | optimize_opts: dict = {} 39 | covariance_opts: dict = {} 40 | confidence_opts: dict = {} 41 | store_trace: bool = True 42 | 43 | 44 | class FluxPointsConfig(BaseConfig): 45 | """Config section for parameters to use for FluxPointsEstimator function.""" 46 | 47 | parameters: dict = {"selection_optional": "all"} 48 | reoptimize: bool = False 49 | 50 | 51 | # The main Analysis Steps 52 | class FitAnalysisStep(AnalysisStepBase): 53 | """ 54 | Using the Fitting parameters as defined in the Config, with the given 55 | datasets perform the fit of the models to the updated list of datasets. 56 | """ 57 | 58 | tag = "fit" 59 | 60 | def _run(self): 61 | self.fit_params = self.config.fit_params 62 | 63 | self._setup_fit() 64 | final_dataset = self._set_datasets() 65 | self.fit_result = self.fit.run(datasets=final_dataset) 66 | 67 | self.log.info(self.fit_result) 68 | 69 | def _setup_fit(self): 70 | """ 71 | Setup the Gammapy Fit function with all the provided parameters from 72 | the config. 73 | """ 74 | self.fit = Fit( 75 | backend=self.fit_params.backend, 76 | optimize_opts=self.fit_params.optimize_opts, 77 | covariance_opts=self.fit_params.covariance_opts, 78 | confidence_opts=self.fit_params.confidence_opts, 79 | store_trace=self.fit_params.store_trace, 80 | ) 81 | 82 | def _set_datasets(self): 83 | """ 84 | Prepare each dataset for running the Fit function, by setting the 85 | energy range. 86 | """ 87 | en_min = u.Quantity(self.fit_params.fit_range.min) 88 | en_max = u.Quantity(self.fit_params.fit_range.max) 89 | 90 | final_dataset = Datasets() 91 | for data in self.datasets: 92 | if not isinstance(data, FluxPointsDataset): 93 | geom = data.counts.geom 94 | data.mask_fit = geom.energy_mask(en_min, en_max) 95 | final_dataset.append(data) 96 | 97 | return final_dataset 98 | 99 | 100 | class FluxPointsAnalysisStep(AnalysisStepBase): 101 | """ 102 | Using the Flux Points Estimator parameters in the config, and the given 103 | datasets and instrument_spectral_info perform the Flux Points Estimation 104 | and store the result in a list of flux points for each dataset. 105 | """ 106 | 107 | tag = "flux-points" 108 | 109 | def _run(self): 110 | self.flux_points = [] 111 | datasets, energy_edges = self._sort_datasets_info() 112 | 113 | for dataset, energy_edge in zip(datasets, energy_edges, strict=True): 114 | self._set_fpe(energy_edge) 115 | flux_points = self.fpe.run(datasets=dataset) 116 | flux_points.name = dataset.names 117 | 118 | flux_points.meta["creation"] = CreatorMetaData() 119 | flux_points.meta["creation"].creator += f", Asgardpy {__public_version__}" 120 | flux_points.meta["optional"] = { 121 | "instrument": flux_points.name, 122 | } 123 | 124 | self.flux_points.append(flux_points) 125 | 126 | def _set_fpe(self, energy_bin_edges): 127 | """ 128 | Setup the Gammapy FluxPointsEstimator function with all the 129 | provided parameters. 130 | """ 131 | fpe_settings = self.config.flux_points_params.parameters 132 | 133 | self.fpe = FluxPointsEstimator( 134 | energy_edges=energy_bin_edges, 135 | source=self.config.target.source_name, 136 | n_jobs=self.config.general.n_jobs, 137 | parallel_backend=self.config.general.parallel_backend, 138 | reoptimize=self.config.flux_points_params.reoptimize, 139 | **fpe_settings, 140 | ) 141 | 142 | def _sort_datasets_info(self): 143 | """ 144 | The given list of datasets may contain sub-instrument level datasets. 145 | With the help of the dict information for instrument specific name and 146 | spectral energy edges, this function, sorts the datasets and returns 147 | them to be passed to the Flux Points Estimator function. 148 | 149 | Returns 150 | ------- 151 | sorted_datasets: List of Datasets object. 152 | sorted_energy_edges: List of energy edges for flux points estimation 153 | for respective instruments' datasets 154 | """ 155 | dataset_name_list = self.datasets.names 156 | sorted_datasets = [] 157 | sorted_energy_edges = [] 158 | 159 | for i, name in enumerate(self.instrument_spectral_info["name"]): 160 | dataset_list = [] 161 | for j, dataset_names in enumerate(dataset_name_list): 162 | if name in dataset_names: 163 | dataset_list.append(self.datasets[j]) 164 | if len(dataset_list) != 0: 165 | sorted_energy_edges.append(self.instrument_spectral_info["spectral_energy_ranges"][i]) 166 | dataset_list = Datasets(dataset_list) 167 | sorted_datasets.append(dataset_list) 168 | 169 | return sorted_datasets, sorted_energy_edges 170 | -------------------------------------------------------------------------------- /src/asgardpy/data/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/src/asgardpy/data/tests/__init__.py -------------------------------------------------------------------------------- /src/asgardpy/data/tests/test_catalog.py: -------------------------------------------------------------------------------- 1 | from asgardpy.analysis import AsgardpyAnalysis 2 | 3 | 4 | def test_3d_hess_1d_magic_catalog(gpy_hess_magic): 5 | """ 6 | Test the usage of Source Catalog for FoV background source models. 7 | """ 8 | 9 | analysis = AsgardpyAnalysis(gpy_hess_magic) 10 | 11 | analysis.config.target.use_catalog.selection_radius = "10 deg" 12 | analysis.config.target.roi_selection.roi_radius = "2.8 deg" 13 | 14 | analysis.config.general.steps = ["datasets-3d", "datasets-1d"] 15 | 16 | analysis.run() 17 | 18 | assert "3FHL J0536.2+1733" in analysis.final_model.names 19 | assert len(list(analysis.final_model.parameters.free_parameters)) == 32 20 | -------------------------------------------------------------------------------- /src/asgardpy/data/tests/test_dataset1d.py: -------------------------------------------------------------------------------- 1 | from asgardpy.analysis import AsgardpyAnalysis 2 | 3 | 4 | def test_dataset1d(base_config_1d): 5 | """Test for creating 1D stacked DL4 dataset.""" 6 | 7 | from gammapy.datasets import SpectrumDatasetOnOff 8 | 9 | analysis = AsgardpyAnalysis(base_config_1d) 10 | 11 | analysis.config.dataset1d.instruments[0].dataset_info.safe_mask.methods = ["custom-mask"] 12 | analysis.config.dataset1d.instruments[0].dataset_info.safe_mask.parameters = {} 13 | analysis.config.dataset1d.instruments[0].dataset_info.safe_mask.parameters["min"] = "200 GeV" 14 | analysis.config.dataset1d.instruments[0].dataset_info.safe_mask.parameters["max"] = "10 TeV" 15 | 16 | analysis.get_1d_datasets() 17 | 18 | assert isinstance(analysis.datasets[0], SpectrumDatasetOnOff) 19 | assert round(analysis.datasets[0].energy_range[-1].data[0][0]) == 10 20 | 21 | analysis_1 = AsgardpyAnalysis(base_config_1d) 22 | analysis_1.config.dataset1d.instruments[0].input_dl3[0].glob_pattern.pop("dl3_files", None) 23 | analysis_1.config.dataset1d.instruments[0].input_dl3[0].glob_pattern["dl3_files"] = "data/hess_*fits.gz" 24 | analysis_1.run(["datasets-1d"]) 25 | 26 | assert len(analysis_1.datasets) == 2 27 | 28 | 29 | def test_dataset1d_no_stack(base_config_1d): 30 | """Test for creating 1D unstacked DL4 dataset.""" 31 | 32 | analysis = AsgardpyAnalysis(base_config_1d) 33 | 34 | analysis.config.general.stacked_dataset = False 35 | analysis.config.dataset1d.instruments[0].dataset_info.background.region_finder_method = "reflected" 36 | analysis.config.dataset1d.instruments[0].dataset_info.background.parameters = {} 37 | analysis.config.dataset1d.instruments[0].dataset_info.safe_mask.methods = [] 38 | 39 | analysis.get_1d_datasets() 40 | 41 | assert len(analysis.datasets) == 4 42 | assert int(round(analysis.datasets[0].energy_range[-1].data[0][0])) == 100 43 | -------------------------------------------------------------------------------- /src/asgardpy/data/tests/test_dataset3d.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from asgardpy.analysis import AsgardpyAnalysis 4 | 5 | 6 | def test_dataset3d(base_config, caplog): 7 | """Test for creating 3D DL4 dataset.""" 8 | 9 | from gammapy.datasets import MapDataset 10 | 11 | base_config.dataset3d.instruments[0].input_dl3[1].glob_pattern["iso_diffuse"] = "" 12 | base_config.dataset3d.instruments[0].input_dl3[1].glob_pattern["gal_diffuse"] = "" 13 | 14 | base_config.dataset3d.instruments[0].dataset_info.background.exclusion.regions = [] 15 | 16 | analysis = AsgardpyAnalysis(base_config) 17 | 18 | analysis.get_3d_datasets() 19 | 20 | assert len(analysis.datasets) == 2 21 | assert isinstance(analysis.datasets[0], MapDataset) 22 | assert analysis.final_model[0].spectral_model.parameters[1].value == 0.015 23 | assert analysis.datasets[0].counts.geom.npix == (222, 222) 24 | assert caplog.record_tuples[-3][2] == "Using counts_map to create safe mask" 25 | 26 | 27 | def test_dataset3d_different_config(base_config, caplog): 28 | """Test for creating 3D DL4 dataset with target model info from DL3 files.""" 29 | 30 | analysis_0 = AsgardpyAnalysis(base_config) 31 | 32 | analysis_0.config.target.from_3d = True 33 | analysis_0.config.dataset3d.instruments[0].dataset_info.geom.from_events_file = False 34 | 35 | analysis_0.get_3d_datasets() 36 | 37 | assert analysis_0.final_model[0].spectral_model.parameters[1].value == 0.01 38 | assert analysis_0.datasets[0].counts.geom.npix == (40, 40) 39 | 40 | # Use coordinates for the central region of the exclusion mask 41 | base_config.dataset3d.instruments[0].dataset_info.background.exclusion.regions[0].name = "" 42 | 43 | analysis_1 = AsgardpyAnalysis(base_config) 44 | analysis_1.config.dataset3d.instruments[0].dataset_info.key = [] 45 | 46 | analysis_1.get_3d_datasets() 47 | print(caplog.record_tuples[-4][2]) 48 | assert caplog.record_tuples[-4][2][:15] == "No distinct key" 49 | 50 | with pytest.raises(ValueError): 51 | analysis_2 = AsgardpyAnalysis(base_config) 52 | analysis_2.config.dataset3d.instruments[0].dataset_info.key = ["12"] 53 | analysis_2.get_3d_datasets() 54 | 55 | 56 | def test_fermi_fits_file(gammapy_data_path): 57 | """Basic test on I/O of Fermi-LAT Fits files.""" 58 | 59 | from astropy.io import fits 60 | 61 | from asgardpy.base.geom import get_source_position 62 | from asgardpy.config import AsgardpyConfig 63 | 64 | config = AsgardpyConfig() 65 | fits_file = f"{gammapy_data_path}fermipy-crab/ft1_test.fits" 66 | fits_header = fits.open(fits_file)[1].header 67 | 68 | source_pos = get_source_position(config.target.sky_position, fits_header) 69 | 70 | assert source_pos["center"].ra.deg == 83.633 71 | 72 | 73 | def test_hawc_analysis(hawc_dl3_config): 74 | """Basic test on running analysis of HAWC DL3 data.""" 75 | 76 | from asgardpy.analysis import AsgardpyAnalysis 77 | 78 | analysis = AsgardpyAnalysis(hawc_dl3_config) 79 | 80 | analysis.run() 81 | flux_table = analysis.flux_points[0].to_table(sed_type="e2dnde", formatted=True, format="gadf-sed") 82 | 83 | assert flux_table["counts"][3].sum() == 463.0 84 | -------------------------------------------------------------------------------- /src/asgardpy/data/tests/test_gpy_mwl.py: -------------------------------------------------------------------------------- 1 | from regions import PointSkyRegion 2 | 3 | from asgardpy.analysis import AsgardpyAnalysis 4 | 5 | 6 | def test_gpy_mwl(gpy_mwl_config, gammapy_data_path): 7 | """ 8 | Test for running the 3D+1D joint analysis tutorial example from Gammapy. 9 | """ 10 | 11 | from gammapy.datasets import FluxPointsDataset 12 | from gammapy.estimators import FluxPoints 13 | from gammapy.modeling.models import create_crab_spectral_model 14 | 15 | from asgardpy.data.target import set_models 16 | 17 | analysis = AsgardpyAnalysis(gpy_mwl_config) 18 | 19 | # Update model parameters 20 | # LP-amplitude 21 | analysis.config.target.components[0].spectral.parameters[0].value /= 1e4 22 | analysis.config.target.components[0].spectral.parameters[0].min = 1.0e-13 23 | analysis.config.target.components[0].spectral.parameters[0].max = 0.01 24 | analysis.config.target.components[0].spectral.parameters[0].frozen = False 25 | 26 | # LP-reference 27 | analysis.config.target.components[0].spectral.parameters[1].value *= 1e3 28 | analysis.config.target.components[0].spectral.parameters[1].min = 0.001 29 | analysis.config.target.components[0].spectral.parameters[1].max = 100 30 | 31 | # LP-alpha 32 | analysis.config.target.components[0].spectral.parameters[2].min = 0.5 33 | analysis.config.target.components[0].spectral.parameters[2].max = 5.0 34 | analysis.config.target.components[0].spectral.parameters[2].frozen = False 35 | 36 | # LP-beta 37 | analysis.config.target.components[0].spectral.parameters[3].min = 0.001 38 | analysis.config.target.components[0].spectral.parameters[3].max = 1.0 39 | analysis.config.target.components[0].spectral.parameters[3].frozen = False 40 | 41 | # Spatial-lon 42 | analysis.config.target.components[0].spatial.parameters[0].error = 1.0e-6 43 | analysis.config.target.components[0].spatial.parameters[0].min = 83.0 44 | analysis.config.target.components[0].spatial.parameters[0].max = 84.0 45 | 46 | # Spatial-lat 47 | analysis.config.target.components[0].spatial.parameters[1].error = 1.0e-6 48 | analysis.config.target.components[0].spatial.parameters[1].min = -90 49 | analysis.config.target.components[0].spatial.parameters[1].max = +90 50 | 51 | # FoV-bkg-Norm - Not being read exactly 52 | analysis.config.target.components[1].spectral.parameters[0].min = 0.0 53 | analysis.config.target.components[1].spectral.parameters[0].max = 10.0 54 | analysis.config.target.components[1].spectral.parameters[0].frozen = False 55 | 56 | analysis.run(["datasets-3d"]) 57 | analysis.run(["datasets-1d"]) 58 | 59 | # Include HAWC Flux Points 60 | # Read to Gammapy objects 61 | filename = f"{gammapy_data_path}hawc_crab/HAWC19_flux_points.fits" 62 | fp_hawc = FluxPoints.read(filename, reference_model=create_crab_spectral_model("meyer")) 63 | fpd_hawc = FluxPointsDataset(data=fp_hawc, name="HAWC") 64 | 65 | analysis.datasets.append(fpd_hawc) 66 | 67 | # Update other dataset info 68 | analysis.dataset_name_list.append("HAWC") 69 | 70 | """ 71 | # FPE to only run for Fermi and HESS datasets, as HAWC is already estimated. 72 | analysis.instrument_spectral_info["name"].append("HAWC") 73 | 74 | hawc_en = np.array([1, 1.78, 3.16, 5.62, 10.0, 17.8, 31.6, 56.2, 100, 177, 316]) * u.TeV 75 | analysis.instrument_spectral_info["spectral_energy_ranges"].append(hawc_en) 76 | analysis.instrument_spectral_info["en_bins"] += 10 77 | analysis.instrument_spectral_info["DoF"] += 10 78 | """ 79 | 80 | # Reset models to the updated dataset 81 | analysis.datasets, analysis.final_model = set_models( 82 | analysis.config.target, 83 | analysis.datasets, 84 | analysis.dataset_name_list, 85 | models=analysis.final_model, 86 | ) 87 | 88 | # Update Fit energy range 89 | analysis.config.fit_params.fit_range.max = "300 TeV" 90 | 91 | analysis.run(["fit"]) 92 | analysis.get_flux_points() 93 | 94 | assert analysis.fit_result.success is True 95 | assert len(analysis.datasets) == 3 96 | assert len(analysis.flux_points) == 2 97 | assert analysis.datasets[1].counts.geom.region is None 98 | 99 | 100 | def test_3d_hess_1d_magic(gpy_hess_magic): 101 | """Test for running HESS (3D) + MAGIC (1D) joint analysis.""" 102 | 103 | analysis = AsgardpyAnalysis(gpy_hess_magic) 104 | analysis.config.dataset3d.instruments[0].dataset_info.background.method = "ring" 105 | analysis.config.dataset3d.instruments[0].dataset_info.background.parameters = { 106 | "r_in": "1 deg", 107 | "width": "3 deg", 108 | } 109 | 110 | analysis.run(["datasets-3d", "datasets-1d", "fit"]) 111 | 112 | assert int(analysis.datasets[0].gti.time_sum.value) == 5056 113 | assert isinstance(analysis.datasets[1].counts.geom.region, PointSkyRegion) 114 | 115 | 116 | def test_gpy_mwl_failsafe(gpy_hess_magic): 117 | """Test for checking some failsafe options.""" 118 | import pytest 119 | 120 | analysis_1 = AsgardpyAnalysis(gpy_hess_magic) 121 | with pytest.raises(ValueError): 122 | analysis_1.config.dataset1d.instruments[0].dataset_info.background.exclusion.regions[0].type = "a" 123 | analysis_1.run(["datasets-1d"]) 124 | 125 | analysis_2 = AsgardpyAnalysis(gpy_hess_magic) 126 | with pytest.raises(ValueError): 127 | analysis_2.config.target.components[0].name = "" 128 | analysis_2.config.dataset1d.instruments[0].dataset_info.background.exclusion.regions[ 129 | 0 130 | ].type = "CircleSkyRegion" 131 | analysis_2.run(["datasets-1d"]) 132 | 133 | analysis_3 = AsgardpyAnalysis(gpy_hess_magic) 134 | analysis_3.config.target.source_name = "" 135 | analysis_3.config.target.components[0].name = "Crab Nebula" 136 | analysis_3.run(["datasets-3d", "datasets-1d"]) 137 | 138 | assert analysis_3.final_model.names[0] == "" 139 | -------------------------------------------------------------------------------- /src/asgardpy/data/tests/test_target.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | 4 | def test_models_from_config(): 5 | """Test reading models from asgardpy config.""" 6 | 7 | from asgardpy.config import AsgardpyConfig, get_model_template 8 | from asgardpy.data.target import read_models_from_asgardpy_config, set_models 9 | 10 | config_eclp = AsgardpyConfig.read(get_model_template("eclp")) 11 | config_bpl2 = AsgardpyConfig.read(get_model_template("bpl2")) 12 | config_lp = AsgardpyConfig.read(get_model_template("lp")) 13 | config_fov = AsgardpyConfig.read(get_model_template("fov")) 14 | 15 | config_eclp.target.components[0].spectral.ebl_abs.reference = "" 16 | config_bpl2.target.components[0].spectral.ebl_abs.reference = "" 17 | config_lp.target.components[0].spectral.ebl_abs.reference = "" 18 | config_fov.target.components[0].spectral.ebl_abs.reference = "" 19 | 20 | model_eclp = read_models_from_asgardpy_config(config_eclp.target) 21 | model_bpl2 = read_models_from_asgardpy_config(config_bpl2.target) 22 | model_lp = read_models_from_asgardpy_config(config_lp.target) 23 | model_fov = read_models_from_asgardpy_config(config_fov.target) 24 | 25 | assert model_eclp[0].spectral_model.tag[0] == "ExpCutoffLogParabolaSpectralModel" 26 | assert model_bpl2[0].spectral_model.tag[0] == "BrokenPowerLaw2SpectralModel" 27 | assert model_lp[0].spectral_model.tag[0] == "LogParabolaSpectralModel" 28 | assert model_fov[0].spectral_model.tag[0] == "PowerLawNormSpectralModel" 29 | assert model_fov[0].spatial_model.tag[0] == "ConstantSpatialModel" 30 | 31 | # Exception for empty models information in config. 32 | with pytest.raises(TypeError): 33 | _, _ = set_models() 34 | 35 | 36 | def test_set_models(base_config, gammapy_data_path): 37 | """Test non-standard components of Target module.""" 38 | 39 | from asgardpy.analysis import AsgardpyAnalysis 40 | from asgardpy.data.target import set_models 41 | 42 | ebl_file_name = "ebl_franceschini_2017.fits.gz" 43 | ebl_file = f"{gammapy_data_path}ebl/{ebl_file_name}" 44 | model_file_0 = f"{gammapy_data_path}fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml" 45 | model_file_1 = f"{gammapy_data_path}fermi-3fhl-crab/Fermi-LAT-3FHL_datasets.yaml" 46 | 47 | base_config.target.components[0].spectral.ebl_abs.filename = ebl_file 48 | 49 | analysis_0 = AsgardpyAnalysis(base_config) 50 | analysis_1 = AsgardpyAnalysis(base_config) 51 | analysis_2 = AsgardpyAnalysis(base_config) 52 | 53 | # Check when using create_source_skymodel function 54 | analysis_0.config.target.from_3d = True 55 | analysis_2.config.target.from_3d = True 56 | 57 | analysis_0.run(["datasets-3d"]) 58 | analysis_2.run(["datasets-3d"]) 59 | # Check when using read_models_from_asgardpy_config 60 | analysis_1.run(["datasets-1d"]) 61 | 62 | analysis_1.config.target.source_name = "4FGL J0534.5+2201i" 63 | analysis_0.config.target.source_name = "Crab Nebula" 64 | 65 | data_0, model_0 = set_models( 66 | analysis_0.config.target, 67 | analysis_0.datasets, 68 | datasets_name_list=None, 69 | models=model_file_0, 70 | ) 71 | 72 | data_1, model_1 = set_models( 73 | analysis_0.config.target, 74 | analysis_0.datasets, 75 | datasets_name_list=None, 76 | ) 77 | data_2, model_2 = set_models( 78 | analysis_1.config.target, 79 | analysis_1.datasets, 80 | datasets_name_list=None, 81 | ) 82 | 83 | # Check when not providing target source name for creating the center of ROI in the exclusion mask 84 | analysis_2.config.target.source_name = "" 85 | data_3, model_3 = set_models( 86 | analysis_2.config.target, 87 | analysis_2.datasets, 88 | datasets_name_list=None, 89 | ) 90 | 91 | with pytest.raises(KeyError): 92 | _, _ = set_models( 93 | analysis_0.config.target, 94 | analysis_0.datasets, 95 | datasets_name_list=None, 96 | models=model_file_1, 97 | ) 98 | with pytest.raises(TypeError): 99 | _, _ = set_models( 100 | analysis_0.config.target, 101 | analysis_0.datasets, 102 | datasets_name_list=None, 103 | models=1, 104 | ) 105 | assert model_0[0].datasets_names == ["Fermi-LAT_00", "Fermi-LAT_01"] 106 | assert model_1[0].spectral_model.model2.filename.name == ebl_file_name 107 | assert model_2[0].spectral_model.model2.filename.name == ebl_file_name 108 | assert analysis_0.final_model[0].spectral_model.model2.filename.name == ebl_file_name 109 | assert model_3[0].name == "" 110 | -------------------------------------------------------------------------------- /src/asgardpy/gammapy/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module for inter-operable Gammapy objects 3 | """ 4 | 5 | from asgardpy.gammapy.interoperate_models import ( 6 | get_gammapy_spectral_model, 7 | xml_spatial_model_to_gammapy, 8 | xml_spectral_model_to_gammapy, 9 | ) 10 | from asgardpy.gammapy.read_models import ( 11 | create_gal_diffuse_skymodel, 12 | create_iso_diffuse_skymodel, 13 | create_source_skymodel, 14 | read_fermi_xml_models_list, 15 | update_aux_info_from_fermi_xml, 16 | ) 17 | 18 | __all__ = [ 19 | "get_gammapy_spectral_model", 20 | "xml_spatial_model_to_gammapy", 21 | "xml_spectral_model_to_gammapy", 22 | "create_gal_diffuse_skymodel", 23 | "create_iso_diffuse_skymodel", 24 | "create_source_skymodel", 25 | "read_fermi_xml_models_list", 26 | "update_aux_info_from_fermi_xml", 27 | ] 28 | -------------------------------------------------------------------------------- /src/asgardpy/gammapy/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/src/asgardpy/gammapy/tests/__init__.py -------------------------------------------------------------------------------- /src/asgardpy/gammapy/tests/test_xml_model.py: -------------------------------------------------------------------------------- 1 | from pathlib import Path 2 | 3 | from gammapy.modeling.models import Models 4 | 5 | from asgardpy.gammapy.read_models import read_fermi_xml_models_list 6 | 7 | 8 | def test_xml_only_source_models(gammapy_data_path): 9 | """Test reading various different Fermi-XML source models.""" 10 | from asgardpy.config import AsgardpyConfig 11 | 12 | config = AsgardpyConfig() 13 | list_source_models = [] 14 | 15 | dl3_aux_path = Path(f"{gammapy_data_path}fermipy-crab/") 16 | 17 | diffuse_models = {} 18 | 19 | xml_file = dl3_aux_path / "test_fermi_xml_models.xml" 20 | config.target.source_name = "4FGL J0534.5+2201i" 21 | config.target.from_3d = True 22 | 23 | list_source_models, _ = read_fermi_xml_models_list( 24 | list_source_models, dl3_aux_path, xml_file, diffuse_models, config.target 25 | ) 26 | list_source_models = Models(list_source_models) 27 | 28 | assert list_source_models[0].spatial_model.tag[0] == "GaussianSpatialModel" 29 | assert list_source_models[0].spectral_model.tag[1] == "lp" 30 | assert list_source_models[1].spatial_model.tag[0] == "TemplateSpatialModel" 31 | assert list_source_models[2].spatial_model.tag[0] == "PointSpatialModel" 32 | assert list_source_models[2].spectral_model.tag[1] == "ecpl" 33 | assert list_source_models[3].spectral_model.tag[1] == "pl" 34 | assert list_source_models[4].spectral_model.tag[1] == "ecpl" 35 | assert list_source_models[5].spectral_model.tag[1] == "secpl-4fgl-dr3" 36 | assert list_source_models[6].spectral_model.tag[1] == "bpl" 37 | assert list_source_models[7].spectral_model.tag[1] == "sbpl" 38 | assert list_source_models[8].spectral_model.tag[1] == "pl" 39 | assert list_source_models[8].spatial_model.tag[0] == "GaussianSpatialModel" 40 | assert list_source_models[-1].spectral_model.tag[1] == "pl-2" 41 | 42 | 43 | def test_xml_with_diffuse_models(gammapy_data_path): 44 | """Test reading Fermi-XML models with diffuse models included.""" 45 | 46 | list_source_models = [] 47 | diffuse_models = {} 48 | 49 | dl3_aux_path = Path(f"{gammapy_data_path}fermipy-crab/") 50 | 51 | xml_file = dl3_aux_path / "srcmdl_00.xml" 52 | diffuse_models["gal_diffuse"] = dl3_aux_path / "gll_iem_v07_cutout.fits" 53 | diffuse_models["iso_diffuse"] = dl3_aux_path / "iso_P8R3_SOURCE_V3_FRONT_v1.txt" 54 | diffuse_models["key_name"] = None 55 | 56 | list_source_models, diffuse_models = read_fermi_xml_models_list( 57 | list_source_models, dl3_aux_path, xml_file, diffuse_models 58 | ) 59 | list_source_models = Models(list_source_models) 60 | 61 | assert list_source_models[1].name == "4FGL J0534.5+2201s" 62 | assert list_source_models[-1].name == "diffuse-iem" 63 | assert list_source_models[-2].name == "fermi-diffuse-iso" 64 | -------------------------------------------------------------------------------- /src/asgardpy/io/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Input/Output Module 3 | 4 | isort:skip_file 5 | """ 6 | 7 | from asgardpy.io.input_dl3 import InputDL3Config, DL3Files, DL3InputFilePatterns 8 | from asgardpy.io.io_dl4 import ( 9 | InputDL4Config, 10 | DL4Files, 11 | DL4InputFilePatterns, 12 | DL4BaseConfig, 13 | get_reco_energy_bins, 14 | ) 15 | 16 | __all__ = [ 17 | "InputDL3Config", 18 | "DL3Files", 19 | "DL3InputFilePatterns", 20 | "DL4Files", 21 | "DL4InputFilePatterns", 22 | "InputDL4Config", 23 | "DL4BaseConfig", 24 | "get_reco_energy_bins", 25 | ] 26 | -------------------------------------------------------------------------------- /src/asgardpy/io/input_dl3.py: -------------------------------------------------------------------------------- 1 | """ 2 | Basic classes defining Input Config for DL3 files and some functions to 3 | retrieve the DL3 files information. 4 | 5 | Currently supporting files following enrico/fermipy for Fermi-LAT data for 3D 6 | Dataset and DL3 files that follow GADF v0.3 and can be directly read by Gammapy, 7 | for 1D Dataset. 8 | """ 9 | 10 | import logging 11 | 12 | from asgardpy.base import BaseConfig, PathType 13 | 14 | __all__ = ["InputDL3Config", "DL3Files", "DL3InputFilePatterns"] 15 | 16 | EXPECTED_DL3_RANGE = ["gadf-dl3", "lat", "lat-aux", "hawc"] 17 | 18 | 19 | # Basic Components for the DL3 Input Config 20 | class DL3InputFilePatterns(BaseConfig): 21 | """ 22 | Config section for list of file patterns to use for fetching relevant DL3 23 | files. 24 | """ 25 | 26 | events: str = "*events.fits*" 27 | edisp: str = "*DRM.fits*" 28 | exposure: str = "*BinnedMap.fits*" 29 | xml_model: str = "*out.xml" 30 | psf: str = "*psf.fits*" 31 | 32 | dl3_files: str = "dl3*fits" 33 | 34 | gal_diffuse: str = "gll_iem_v*.fits*" 35 | iso_diffuse: str = "iso_P8R3_SOURCE_V*_*.txt" 36 | 37 | en_est: str = "*NN*fits.gz" 38 | transit: str = "TransitsMap*fits.gz" 39 | 40 | 41 | class InputDL3Config(BaseConfig): 42 | """ 43 | Config section for main information on getting the relevant DL3 files. 44 | """ 45 | 46 | type: str = "type" 47 | input_dir: PathType = "None" 48 | glob_pattern: dict = {} 49 | 50 | 51 | # Main Classes for I/O 52 | class DL3Files: 53 | """ 54 | A general class to retrieve information from given DL3 files, along with 55 | other auxiliary files for neighbouring sources, if provided. 56 | """ 57 | 58 | def __init__(self, dir_dict, log=None): 59 | if not log: 60 | self._set_logging() 61 | else: 62 | self.log = log 63 | 64 | self.dl3_path = dir_dict.input_dir 65 | 66 | self.dl3_type = dir_dict.type 67 | self._check_dl3_type() 68 | 69 | self.glob_dict = dir_dict.glob_pattern 70 | 71 | self.events_files = None 72 | self.edrm_files = None 73 | self.xml_files = None 74 | self.expmap_files = None 75 | self.psf_files = None 76 | self.gal_diff_files = None 77 | self.iso_diff_files = None 78 | self.dl3_index_files = None 79 | self.transit = None 80 | 81 | self.xml_f = None 82 | self.gal_diff_f = None 83 | 84 | def _set_logging(self): 85 | self.log = logging.getLogger(__name__) 86 | self.log.setLevel(logging.INFO) 87 | 88 | def _check_dl3_type(self): 89 | if self.dl3_type.lower() not in EXPECTED_DL3_RANGE: 90 | self.log.error("%s is not in the expected range for DL3 files", self.dl3_type) 91 | 92 | def prepare_lat_files(self, key, file_list): 93 | """ 94 | Prepare a list of LAT files following a particular key. If there are no 95 | distinct key types of files, the value is None. 96 | """ 97 | # Try to combine LAT and LAT-AUX files 98 | self.list_dl3_files() 99 | file_list = self.select_unique_files(key, file_list) 100 | 101 | return file_list 102 | 103 | def list_dl3_files(self): 104 | """ 105 | From a given DL3 files path, categorize the different types of DL3 106 | files, to be used for further analysis. 107 | 108 | The dl3_type of 'gadf-dl3' is used for all GADF v0.3 following DL3 109 | files that can be directly read by Gammapy, for 1D Datasets. 110 | """ 111 | match self.dl3_type.lower(): 112 | case "lat": 113 | self.events_files = sorted(list(self.dl3_path.glob(self.glob_dict["events"]))) 114 | self.edrm_files = sorted(list(self.dl3_path.glob(self.glob_dict["edisp"]))) 115 | self.xml_files = sorted(list(self.dl3_path.glob(self.glob_dict["xml_model"]))) 116 | self.expmap_files = sorted(list(self.dl3_path.glob(self.glob_dict["exposure"]))) 117 | self.psf_files = sorted(list(self.dl3_path.glob(self.glob_dict["psf"]))) 118 | 119 | case "lat-aux": 120 | self.gal_diff_files = sorted(list(self.dl3_path.glob(self.glob_dict["gal_diffuse"]))) 121 | self.iso_diff_files = sorted(list(self.dl3_path.glob(self.glob_dict["iso_diffuse"]))) 122 | 123 | case "hawc": 124 | self.transit = sorted(list(self.dl3_path.glob(self.glob_dict["transit"]))) 125 | # All DL3 index files for a given energy estimator type 126 | self.dl3_index_files = sorted(list(self.dl3_path.glob(self.glob_dict["en_est"]))) 127 | 128 | def select_unique_files(self, key, file_list): 129 | """ 130 | Select Unique files from all of the provided LAT files, as per the 131 | given key. If there are no distinct key types of files, the value is None. 132 | """ 133 | if self.dl3_type.lower() in ["lat"]: 134 | var_list = [ 135 | "events_files", 136 | "edrm_files", 137 | "expmap_files", 138 | "psf_files", 139 | ] 140 | file_list["xml_file"] = self.xml_files[0] 141 | 142 | if self.dl3_type.lower() == "lat-aux": 143 | var_list = [] 144 | if key: 145 | if "0" not in key: # For fermipy files, the diffuse files are already unique 146 | var_list = [ 147 | "iso_diff_files", 148 | ] 149 | file_list["iso_diff_file"] = self.iso_diff_files[0] 150 | file_list["gal_diff_file"] = self.gal_diff_files[0] 151 | 152 | if len(var_list) > 0: 153 | for _v in var_list: 154 | if key is not None: 155 | filtered = [K for K in getattr(self, _v) if key in str(K.name)] 156 | if len(filtered) == 1: 157 | self.log.info("Selecting the file with name containing %s", key) 158 | setattr(self, _v.replace("_files", "_f"), filtered[0]) 159 | else: 160 | raise ValueError( 161 | "Variable {%s} does not contain one element after filtering by {%s}", 162 | getattr(self, _v), 163 | key, 164 | ) 165 | else: 166 | self.log.info("No distinct key provided, selecting the first file in the list") 167 | setattr(self, _v.replace("_files", "_f"), getattr(self, _v)[0]) 168 | 169 | file_list[_v.replace("files", "file")] = getattr(self, _v.replace("_files", "_f")) 170 | 171 | return file_list 172 | -------------------------------------------------------------------------------- /src/asgardpy/io/io_dl4.py: -------------------------------------------------------------------------------- 1 | """ 2 | Basic classes defining Input Config for DL4 files and some functions to 3 | retrieve information for the DL4 to DL5 processes. 4 | """ 5 | 6 | import logging 7 | import re 8 | from enum import Enum 9 | from pathlib import Path 10 | 11 | from gammapy.datasets import DATASET_REGISTRY, Datasets 12 | from gammapy.modeling.models import Models 13 | 14 | from asgardpy.base.base import BaseConfig, PathType 15 | from asgardpy.base.geom import MapAxesConfig, get_energy_axis 16 | 17 | __all__ = [ 18 | "InputDL4Config", 19 | "DL4Files", 20 | "DL4InputFilePatterns", 21 | "DL4BaseConfig", 22 | "get_reco_energy_bins", 23 | ] 24 | 25 | 26 | class DatasetTypeEnum(str, Enum): 27 | """ 28 | Config section for list of Dataset types in Gammapy. 29 | """ 30 | 31 | MapDataset = "MapDataset" 32 | MapDatasetOnOff = "MapDatasetOnOff" 33 | SpectrumDataset = "SpectrumDataset" 34 | SpectrumDatasetOnOff = "SpectrumDatasetOnOff" 35 | FluxPointsDataset = "FluxPointsDataset" 36 | 37 | 38 | class DL4FormatEnum(str, Enum): 39 | """ 40 | Config section for list of formats for Datasets in Gammapy. 41 | """ 42 | 43 | ogip = "ogip" 44 | ogip_sherpa = "ogip-sherpa" 45 | gadf = "gadf" 46 | gadf_sed = "gadf-sed" 47 | 48 | 49 | class DL4InputFilePatterns(BaseConfig): 50 | """ 51 | Config section for list of file patterns to use for fetching relevant DL4 52 | files. 53 | """ 54 | 55 | dl4_files: str = "pha*.fits*" 56 | dl4_model_files: str = "model*yaml" 57 | 58 | 59 | class InputDL4Config(BaseConfig): 60 | """ 61 | Config section for main information on getting the relevant DL4 files. 62 | """ 63 | 64 | type: DatasetTypeEnum = DatasetTypeEnum.MapDataset 65 | input_dir: PathType = "None" 66 | # Can be OGIP format (Stacked or unstacked obs) or fits format (stacked obs) 67 | glob_pattern: dict = {} 68 | dl4_format: DL4FormatEnum = DL4FormatEnum.gadf 69 | 70 | 71 | class DL4BaseConfig(BaseConfig): 72 | """Config section for DL4 Dataset for a given instrument.""" 73 | 74 | dl4_dataset: InputDL4Config = InputDL4Config() 75 | spectral_energy_range: MapAxesConfig = MapAxesConfig() 76 | 77 | 78 | # Main class for DL4 I/O 79 | class DL4Files: 80 | """ 81 | A general class to retrieve information from given DL4 files. 82 | """ 83 | 84 | def __init__(self, dl4_dataset_info, log=None): 85 | self.dl4_dataset_info = dl4_dataset_info 86 | self.dl4_dataset = dl4_dataset_info.dl4_dataset 87 | self.dl4_type = self.dl4_dataset.type 88 | self.dl4_path = None 89 | self.dl4_file = None 90 | self.dl4_model = None 91 | 92 | if Path(self.dl4_dataset.input_dir).is_file(): 93 | self.dl4_file = Path(self.dl4_dataset.input_dir) 94 | else: 95 | self.dl4_path = Path(self.dl4_dataset.input_dir) 96 | 97 | if not log: 98 | self._set_logging() 99 | else: 100 | self.log = log 101 | 102 | def _set_logging(self): 103 | self.log = logging.getLogger(__name__) 104 | self.log.setLevel(logging.INFO) 105 | 106 | def fetch_dl4_files_by_filenames(self, all_dl4_files, obs_ids): 107 | """ 108 | Assuming a simple nomenclature from gammapy on storing DL4 datasets 109 | names as pha_obs[OBS_ID].fits or obs_[OBS_ID].fits i.e. a single integer 110 | in the filename, being the OBS_ID or the DL4 dataset name. 111 | """ 112 | dl4_file_list = [] 113 | for dl4_files in all_dl4_files: 114 | obs_num = int(re.findall(r"\d+", dl4_files.name)[0]) 115 | if obs_num in obs_ids: 116 | dl4_file_list.append(dl4_files) 117 | return dl4_file_list 118 | 119 | def read_dl4_file(self, filename): 120 | """ 121 | Read a single file, which may be serialized in FITS or yaml format. 122 | """ 123 | if str(filename)[-4:] == "yaml": 124 | return Datasets.read(filename=filename) 125 | elif str(filename)[-4:] in ["fits", "s.gz"]: 126 | dataset_ = DATASET_REGISTRY.get_cls(self.dl4_type)().read( 127 | filename=filename, format=self.dl4_dataset.dl4_format 128 | ) 129 | return Datasets(dataset_) 130 | else: 131 | return None 132 | 133 | def get_dl4_files(self, observation_config): 134 | """ 135 | Fetch the required DL4 files from the given directory path, file glob 136 | search and possible list of observation ids to select the dataset files 137 | from the full list in the directory. 138 | 139 | If Model files are also given, fetch them as well 140 | """ 141 | dl4_model_files = [] 142 | 143 | all_dl4_files = sorted(list(self.dl4_path.glob(self.dl4_dataset.glob_pattern["dl4_files"]))) 144 | # Get model files as well 145 | if "dl4_model_files" in self.dl4_dataset.glob_pattern.keys(): 146 | dl4_model_files = sorted(list(self.dl4_path.glob(self.dl4_dataset.glob_pattern["dl4_model_files"]))) 147 | 148 | if len(all_dl4_files) == 0: 149 | self.log.error("No datasets found in %s", self.dl4_path) 150 | 151 | obs_ids = observation_config.obs_ids 152 | if len(obs_ids) == 0: 153 | # No filtering required based on observation ids 154 | dl4_file_list = all_dl4_files 155 | else: 156 | dl4_file_list = self.fetch_dl4_files_by_filenames(all_dl4_files, obs_ids) 157 | 158 | self.log.info("List of DL4 files are: %s", dl4_file_list) 159 | 160 | return dl4_file_list, dl4_model_files 161 | 162 | def get_dl4_dataset(self, observation_config=None): 163 | """ 164 | Read the corresponding DL4 dataset with the list of files provided, 165 | along with the dataset format and stack them in a Datasets object. 166 | """ 167 | if self.dl4_file: 168 | datasets = Datasets.read(filename=self.dl4_file) 169 | 170 | elif self.dl4_path: 171 | dl4_file_list, dl4_model_files = self.get_dl4_files(observation_config) 172 | 173 | if len(dl4_model_files) == 0: 174 | datasets = Datasets() 175 | for dl4_file in dl4_file_list: 176 | dataset = self.read_dl4_file(dl4_file) 177 | datasets.append(dataset[0]) 178 | else: 179 | # Assuming a single DL4 file and model 180 | datasets = self.read_dl4_file(dl4_file_list[0]) 181 | datasets.models = Models.read(dl4_model_files[0]) 182 | 183 | return datasets 184 | 185 | def get_spectral_energies(self): 186 | """ 187 | Get the spectral energy information for each Instrument Dataset. 188 | """ 189 | energy_axes = self.dl4_dataset_info.spectral_energy_range 190 | 191 | if len(energy_axes.axis_custom.edges) > 0: 192 | energy_bin_edges = get_energy_axis(energy_axes, only_edges=True, custom_range=True) 193 | else: 194 | energy_bin_edges = get_energy_axis( 195 | energy_axes, 196 | only_edges=True, 197 | ) 198 | 199 | return energy_bin_edges 200 | 201 | 202 | def get_reco_energy_bins(dataset, en_bins): 203 | """ 204 | Calculate the total number of fit reco energy bins in the given dataset 205 | and add to the total value. 206 | """ 207 | en_bins += dataset.mask.geom.axes["energy"].nbin 208 | 209 | return en_bins 210 | -------------------------------------------------------------------------------- /src/asgardpy/io/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/src/asgardpy/io/tests/__init__.py -------------------------------------------------------------------------------- /src/asgardpy/io/tests/test_io.py: -------------------------------------------------------------------------------- 1 | import pytest 2 | 3 | from asgardpy.config import AsgardpyConfig 4 | 5 | 6 | def test_input_dl3(caplog): 7 | """Testing basic fail safes for DL3Files class.""" 8 | 9 | from asgardpy.io.input_dl3 import DL3Files 10 | 11 | config = AsgardpyConfig() 12 | dl3_dict = config.dataset3d.instruments[0].input_dl3[0] 13 | 14 | dl3_files = DL3Files(dir_dict=dl3_dict, log=None) 15 | 16 | assert dl3_files.log.name == "asgardpy.io.input_dl3" 17 | assert dl3_files.log.level == 20 18 | assert caplog.record_tuples[0][2] == "type is not in the expected range for DL3 files" 19 | 20 | 21 | def test_io_dl4(caplog): 22 | """Testing basic fail safes for DL4Files class.""" 23 | 24 | from asgardpy.io.io_dl4 import DL4Files 25 | 26 | config = AsgardpyConfig() 27 | dl4_info = config.dataset1d.instruments[0].dl4_dataset_info 28 | dl4_info.dl4_dataset.glob_pattern["dl4_files"] = "dl4*" 29 | obs_config = config.dataset1d.instruments[0].dataset_info.observation 30 | 31 | dl4_files = DL4Files(dl4_dataset_info=dl4_info, log=None) 32 | 33 | d_list, _ = dl4_files.get_dl4_files(obs_config) 34 | 35 | assert dl4_files.log.name == "asgardpy.io.io_dl4" 36 | assert dl4_files.log.level == 20 37 | assert caplog.record_tuples[0][2] == "No datasets found in ." 38 | with pytest.raises(ZeroDivisionError): 39 | 1 / len(d_list) 40 | 41 | 42 | def test_io_dl4_w_models(gammapy_data_path): 43 | """Testing some more IO options when DL4 have associated models.""" 44 | 45 | from asgardpy.io.io_dl4 import DL4Files 46 | 47 | config = AsgardpyConfig() 48 | dl4_info = config.dataset1d.instruments[0].dl4_dataset_info 49 | obs_config = config.dataset1d.instruments[0].dataset_info.observation 50 | 51 | dl4_info.dl4_dataset.input_dir = f"{gammapy_data_path}fermi-3fhl-crab/" 52 | dl4_info.dl4_dataset.glob_pattern["dl4_files"] = "Fermi*data*fits" 53 | dl4_info.dl4_dataset.glob_pattern["dl4_model_files"] = "Fermi*models.yaml" 54 | 55 | dl4_files = DL4Files(dl4_dataset_info=dl4_info, log=None) 56 | d_list = dl4_files.get_dl4_dataset(obs_config) 57 | 58 | assert len(d_list) == 1 59 | 60 | dl4_info.dl4_dataset.glob_pattern["dl4_files"] = "Fermi*data*yaml" 61 | dl4_files = DL4Files(dl4_dataset_info=dl4_info, log=None) 62 | d_list = dl4_files.get_dl4_dataset(obs_config) 63 | 64 | assert d_list[0].tag == "MapDataset" 65 | 66 | assert dl4_files.read_dl4_file("random.txt") is None 67 | -------------------------------------------------------------------------------- /src/asgardpy/py.typed: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/src/asgardpy/py.typed -------------------------------------------------------------------------------- /src/asgardpy/stats/__init__.py: -------------------------------------------------------------------------------- 1 | """ 2 | Statistics Module 3 | 4 | # order matters to prevent circular imports 5 | isort:skip_file 6 | """ 7 | 8 | from asgardpy.stats.stats import ( 9 | check_model_preference_aic, 10 | check_model_preference_lrt, 11 | fetch_pivot_energy, 12 | get_chi2_sig_pval, 13 | get_goodness_of_fit_stats, 14 | get_ts_target, 15 | ) 16 | from asgardpy.stats.utils import ( 17 | fetch_all_analysis_fit_info, 18 | get_model_config_files, 19 | tabulate_best_fit_stats, 20 | copy_target_config, 21 | ) 22 | 23 | __all__ = [ 24 | "check_model_preference_aic", 25 | "check_model_preference_lrt", 26 | "fetch_pivot_energy", 27 | "get_chi2_sig_pval", 28 | "get_goodness_of_fit_stats", 29 | "get_ts_target", 30 | "fetch_all_analysis_fit_info", 31 | "get_model_config_files", 32 | "tabulate_best_fit_stats", 33 | "copy_target_config", 34 | ] 35 | -------------------------------------------------------------------------------- /src/asgardpy/stats/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/src/asgardpy/stats/tests/__init__.py -------------------------------------------------------------------------------- /src/asgardpy/stats/tests/test_model_pref.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from asgardpy.analysis import AsgardpyAnalysis 4 | from asgardpy.stats import ( 5 | check_model_preference_aic, 6 | check_model_preference_lrt, 7 | copy_target_config, 8 | fetch_all_analysis_fit_info, 9 | get_model_config_files, 10 | tabulate_best_fit_stats, 11 | ) 12 | 13 | 14 | def test_preferred_model(base_config_1d): 15 | """ 16 | Testing the script code of checking the preferred spectral model. 17 | """ 18 | select_model_tags = ["lp", "bpl2", "ecpl", "pl", "eclp"] 19 | spec_model_temp_files = [] 20 | spec_model_temp_files = get_model_config_files(select_model_tags) 21 | 22 | main_analysis_list = {} 23 | spec_models_list = [] 24 | 25 | for temp in spec_model_temp_files: 26 | temp_model = AsgardpyAnalysis(base_config_1d) 27 | temp_model.config.fit_params.fit_range.min = "100 GeV" 28 | 29 | temp_model.config.target.models_file = temp 30 | 31 | temp_model_2 = AsgardpyAnalysis(temp_model.config) 32 | 33 | copy_target_config(temp_model, temp_model_2) 34 | 35 | spec_tag = temp.name.split(".")[0].split("_")[-1] 36 | spec_models_list.append(spec_tag) 37 | main_analysis_list[spec_tag] = {} 38 | 39 | main_analysis_list[spec_tag]["Analysis"] = temp_model_2 40 | 41 | spec_models_list = np.array(spec_models_list) 42 | 43 | # Run Analysis Steps till Fit 44 | for tag in spec_models_list: 45 | main_analysis_list[tag]["Analysis"].run(["datasets-1d", "fit"]) 46 | 47 | fit_success_list, stat_list, dof_list, pref_over_pl_chi2_list = fetch_all_analysis_fit_info( 48 | main_analysis_list, spec_models_list 49 | ) 50 | 51 | # If any spectral model has at least 5 sigmas preference over PL 52 | best_sp_idx_lrt = np.nonzero(pref_over_pl_chi2_list == np.nanmax(pref_over_pl_chi2_list))[0] 53 | for idx in best_sp_idx_lrt: 54 | if pref_over_pl_chi2_list[idx] > 5: 55 | lrt_best_model = spec_models_list[idx] 56 | 57 | list_rel_p = check_model_preference_aic(stat_list, dof_list) 58 | 59 | best_sp_idx_aic = np.nonzero(list_rel_p == np.nanmax(list_rel_p))[0] 60 | 61 | aic_best_model = select_model_tags[best_sp_idx_aic[0]] 62 | 63 | stats_table = tabulate_best_fit_stats(spec_models_list, fit_success_list, main_analysis_list, list_rel_p) 64 | 65 | assert lrt_best_model == "lp" 66 | assert aic_best_model == "lp" 67 | assert len(stats_table.colnames) == 11 68 | 69 | # Check for bad comparisons, same dof 70 | p_val_0, g_sig_0, dof_0 = check_model_preference_lrt(4.4, 2.2, 2, 2) 71 | 72 | assert np.isnan(p_val_0) 73 | -------------------------------------------------------------------------------- /src/asgardpy/stats/tests/test_pivot_energy.py: -------------------------------------------------------------------------------- 1 | from asgardpy.analysis import AsgardpyAnalysis 2 | from asgardpy.stats import fetch_pivot_energy 3 | 4 | 5 | def test_get_pivot_energy(gpy_hess_magic): 6 | """ 7 | Check the pivot energy for given fit model on a dataset. 8 | """ 9 | analysis = AsgardpyAnalysis(gpy_hess_magic) 10 | 11 | analysis.run(["datasets-3d", "datasets-1d"]) 12 | 13 | e_ref = fetch_pivot_energy(analysis) 14 | 15 | assert e_ref.value == 0.20085434771049843 16 | 17 | 18 | def test_get_pivot_energy_from_start(gpy_hess_magic): 19 | """ 20 | Check the pivot energy for given fit model on a dataset from the start of 21 | the AsgardpyAnalysis object. Test using the SpectralModel of ECPL 22 | and without any associated EBL absorption model. 23 | """ 24 | from asgardpy.config.operations import get_model_template 25 | 26 | new_model = get_model_template("ecpl2") 27 | gpy_hess_magic.target.models_file = new_model 28 | gpy_hess_magic.target.components[0].spectral.ebl_abs.reference = "" 29 | 30 | analysis = AsgardpyAnalysis(gpy_hess_magic) 31 | 32 | e_ref = fetch_pivot_energy(analysis) 33 | 34 | assert e_ref.value == 0.030128153004345924 35 | -------------------------------------------------------------------------------- /src/asgardpy/stats/utils.py: -------------------------------------------------------------------------------- 1 | """ 2 | Module containing additional utility functions for selecting a preferred model. 3 | """ 4 | 5 | import numpy as np 6 | from astropy.table import QTable 7 | 8 | from asgardpy.config.operations import all_model_templates 9 | from asgardpy.stats.stats import check_model_preference_lrt 10 | 11 | __all__ = [ 12 | "fetch_all_analysis_fit_info", 13 | "get_model_config_files", 14 | "tabulate_best_fit_stats", 15 | "copy_target_config", 16 | ] 17 | 18 | 19 | def get_model_config_files(select_model_tags): 20 | """From the default model templates, select some.""" 21 | 22 | all_tags, template_files = all_model_templates() 23 | 24 | spec_model_temp_files = [] 25 | for tag in select_model_tags: 26 | spec_model_temp_files.append(template_files[np.where(all_tags == tag)[0][0]]) 27 | 28 | spec_model_temp_files = np.array(spec_model_temp_files) 29 | 30 | return spec_model_temp_files 31 | 32 | 33 | def get_spec_params_indices(aa_config): 34 | """ 35 | For copying the spectral flux amplitude and flux normalization energy, 36 | from one config to another, find the correct parameter indices within a 37 | given config. 38 | """ 39 | par_names = [] 40 | for p in aa_config.config.target.components[0].spectral.parameters: 41 | par_names.append(p.name) 42 | par_names = np.array(par_names) 43 | 44 | amp_idx = None 45 | # For models without this parameter, name has not yet been included or 46 | # checked with Asgardpy 47 | if "amplitude" in par_names: 48 | amp_idx = np.where(par_names == "amplitude")[0][0] 49 | 50 | if "reference" in par_names: 51 | eref_idx = np.where(par_names == "reference")[0][0] 52 | else: 53 | eref_idx = np.where(par_names == "ebreak")[0][0] 54 | 55 | return amp_idx, eref_idx 56 | 57 | 58 | def copy_target_config(aa_config_1, aa_config_2): 59 | """From aa_config_1 update information in aa_config_2.""" 60 | 61 | amp_idx_1, eref_idx_1 = get_spec_params_indices(aa_config_1) 62 | amp_idx_2, eref_idx_2 = get_spec_params_indices(aa_config_2) 63 | 64 | # Have the same value of amplitude 65 | aa_config_2.config.target.components[0].spectral.parameters[amp_idx_2].value = ( 66 | aa_config_1.config.target.components[0].spectral.parameters[amp_idx_1].value 67 | ) 68 | # Have the same value of reference/e_break energy 69 | aa_config_2.config.target.components[0].spectral.parameters[eref_idx_2].value = ( 70 | aa_config_1.config.target.components[0].spectral.parameters[eref_idx_1].value 71 | ) 72 | # Have the same value of redshift value and EBL reference model 73 | aa_config_2.config.target.components[0].spectral.ebl_abs.redshift = aa_config_1.config.target.components[ 74 | 0 75 | ].spectral.ebl_abs.redshift 76 | 77 | # Make sure the source names are the same 78 | aa_config_2.config.target.source_name = aa_config_1.config.target.source_name 79 | aa_config_2.config.target.components[0].name = aa_config_1.config.target.components[0].name 80 | 81 | return aa_config_2 82 | 83 | 84 | def fetch_all_analysis_fit_info(main_analysis_list, spec_models_list): 85 | """ 86 | For a list of spectral models, with the AsgardpyAnalysis run till the fit 87 | step, get the relevant information for testing the model preference. 88 | """ 89 | fit_success_list = [] 90 | pref_over_pl_chi2_list = [] 91 | stat_list = [] 92 | dof_list = [] 93 | 94 | for tag in spec_models_list: 95 | dict_tag = main_analysis_list[tag]["Analysis"].instrument_spectral_info 96 | dict_pl = main_analysis_list["pl"]["Analysis"].instrument_spectral_info 97 | 98 | # Collect parameters for AIC check 99 | stat = dict_tag["best_fit_stat"] 100 | dof = dict_tag["DoF"] 101 | 102 | fit_success = main_analysis_list[tag]["Analysis"].fit_result.success 103 | 104 | fit_success_list.append(fit_success) 105 | stat_list.append(stat) 106 | dof_list.append(dof) 107 | 108 | # Checking the preference of a "nested" spectral model (observed), 109 | # over Power Law. 110 | if tag == "pl": 111 | main_analysis_list[tag]["Pref_over_pl_chi2"] = 0 112 | main_analysis_list[tag]["Pref_over_pl_pval"] = 0 113 | main_analysis_list[tag]["DoF_over_pl"] = 0 114 | pref_over_pl_chi2_list.append(0) 115 | continue 116 | 117 | p_pl_x, g_pl_x, ndof_pl_x = check_model_preference_lrt( 118 | dict_pl["best_fit_stat"], 119 | dict_tag["best_fit_stat"], 120 | dict_pl["DoF"], 121 | dict_tag["DoF"], 122 | ) 123 | 124 | main_analysis_list[tag]["Pref_over_pl_chi2"] = g_pl_x 125 | pref_over_pl_chi2_list.append(g_pl_x) 126 | main_analysis_list[tag]["Pref_over_pl_pval"] = p_pl_x 127 | main_analysis_list[tag]["DoF_over_pl"] = ndof_pl_x 128 | 129 | fit_success_list = np.array(fit_success_list) 130 | 131 | # Only select fit results that were successful for comparisons 132 | stat_list = np.array(stat_list)[fit_success_list] 133 | dof_list = np.array(dof_list)[fit_success_list] 134 | pref_over_pl_chi2_list = np.array(pref_over_pl_chi2_list)[fit_success_list] 135 | 136 | return fit_success_list, stat_list, dof_list, pref_over_pl_chi2_list 137 | 138 | 139 | def tabulate_best_fit_stats(spec_models_list, fit_success_list, main_analysis_list, list_rel_p): 140 | """For a list of spectral models, tabulate the best fit information.""" 141 | 142 | fit_stats_table = [] 143 | 144 | for i, tag in enumerate(spec_models_list[fit_success_list]): 145 | info_ = main_analysis_list[tag]["Analysis"].instrument_spectral_info 146 | 147 | t = main_analysis_list[tag] 148 | 149 | ts_gof = round(info_["best_fit_stat"] - info_["max_fit_stat"], 3) 150 | t_fits = { 151 | "Spectral Model": tag.upper(), 152 | "TS of Best Fit": round(info_["best_fit_stat"], 3), 153 | "TS of Max Fit": round(info_["max_fit_stat"], 3), 154 | "TS of Goodness of Fit": ts_gof, 155 | "DoF of Fit": info_["DoF"], 156 | r"Significance ($\sigma$) of Goodness of Fit": round(info_["fit_chi2_sig"], 3), 157 | "p-value of Goodness of Fit": float(f"{info_['fit_pval']:.4g}"), 158 | "Pref over PL (chi2)": round(t["Pref_over_pl_chi2"], 3), 159 | "Pref over PL (p-value)": float(f"{t['Pref_over_pl_pval']:.4g}"), 160 | "Pref over PL (DoF)": t["DoF_over_pl"], 161 | "Relative p-value (AIC)": float(f"{list_rel_p[i]:.4g}"), 162 | } 163 | fit_stats_table.append(t_fits) 164 | stats_table = QTable(fit_stats_table) 165 | 166 | return stats_table 167 | -------------------------------------------------------------------------------- /src/asgardpy/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/chaimain/asgardpy/18dd8ae7e0a92a6ae71ae0a3adbd3ecc8ad1f81f/src/asgardpy/tests/__init__.py -------------------------------------------------------------------------------- /src/asgardpy/tests/config_gpy_mwl.yaml: -------------------------------------------------------------------------------- 1 | # Configuration file template for testing Fermi-LAT (3D, from DL4 dataset) + 2 | # HESS (1D, from DL4 dataset) joint analysis. 3 | 4 | # General settings 5 | general: 6 | log: 7 | level: info 8 | # filename: ../test_log.log 9 | # filemode: a 10 | # format: "%(asctime)s - %(message)s" 11 | # datefmt: "%d-%b-%y %H:%M:%S" 12 | outdir: . 13 | n_jobs: 10 14 | parallel_backend: multiprocessing 15 | steps: [datasets-3d, datasets-1d, fit, flux-points] 16 | overwrite: true 17 | stacked_dataset: true 18 | 19 | # Target settings 20 | target: 21 | source_name: &source_name "Crab Nebula" 22 | sky_position: &source_pos {frame: icrs, lon: 83.633 deg, lat: 22.02 deg} 23 | use_uniform_position: true 24 | models_file: "." #"../gammapy-datasets/1.3/fermi-3fhl-crab/Fermi-LAT-3FHL_models.yaml" # 25 | # use_catalog: 4fgl 26 | datasets_with_fov_bkg_model: [] 27 | components: 28 | - name: *source_name 29 | type: SkyModel 30 | spectral: 31 | type: LogParabolaSpectralModel 32 | ebl_abs: 33 | reference: dominguez 34 | type: EBLAbsorptionNormSpectralModel 35 | redshift: 0.0 36 | alpha_norm: 1.0 37 | 38 | covariance: None 39 | from_3d: false 40 | 41 | # Instrument datasets with 3D info 42 | dataset3d: 43 | type: "3d" 44 | instruments: 45 | - name: Fermi-LAT 46 | input_dl4: true 47 | dl4_dataset_info: 48 | dl4_dataset: 49 | # type: SpectrumDatasetOnOff 50 | input_dir: "." # ../gammapy-datasets/1.3/fermi-3fhl-crab/Fermi-LAT-3FHL_datasets.yaml 51 | # glob_pattern: 52 | # dl4_files: "obs_*fits" 53 | dl4_format: "gadf" 54 | spectral_energy_range: 55 | name: energy 56 | axis: 57 | min: "10 GeV" 58 | max: "2 TeV" 59 | nbins: 5 60 | per_decade: false 61 | 62 | # Instrument datasets with 1D info 63 | dataset1d: 64 | type: "1d" 65 | instruments: 66 | - name: HESS 67 | input_dl4: true 68 | dataset_info: 69 | observation: 70 | obs_ids: [23523, 23526] 71 | dl4_dataset_info: 72 | dl4_dataset: 73 | type: SpectrumDatasetOnOff 74 | input_dir: "." #../gammapy-datasets/1.3/joint-crab/spectra/hess/ 75 | glob_pattern: 76 | dl4_files: "pha_obs*fits" 77 | dl4_format: "ogip" 78 | spectral_energy_range: 79 | name: energy 80 | axis: 81 | min: "1 TeV" 82 | max: "10 TeV" 83 | nbins: 5 84 | per_decade: true 85 | axis_custom: 86 | unit: TeV 87 | edges: [1.0, 1.5704178, 2.46621207, 3.87298335, 6.082202, 9.55159829, 15.] 88 | 89 | # Fit parameters 90 | fit_params: 91 | fit_range: 92 | min: "10 GeV" 93 | max: "10 TeV" 94 | backend: minuit 95 | optimize_opts: {} 96 | covariance_opts: {} 97 | confidence_opts: {} 98 | store_trace: true 99 | 100 | flux_points_params: 101 | parameters: 102 | selection_optional: "all" 103 | reoptimize: false 104 | -------------------------------------------------------------------------------- /src/asgardpy/tests/config_hawc.yaml: -------------------------------------------------------------------------------- 1 | # Configuration file template for testing HAWC (3D DL3 files) 2 | 3 | # General settings 4 | general: 5 | log: 6 | level: info 7 | # filename: ../test_log.log 8 | # filemode: a 9 | # format: "%(asctime)s - %(message)s" 10 | # datefmt: "%d-%b-%y %H:%M:%S" 11 | outdir: ../ 12 | n_jobs: 10 13 | parallel_backend: multiprocessing 14 | steps: [datasets-3d, fit, flux-points] 15 | overwrite: true 16 | stacked_dataset: true 17 | 18 | # Target settings 19 | target: 20 | source_name: &source_name "4FGL J0534.5+2201i" 21 | sky_position: &source_pos {frame: icrs, lon: 83.6338333 deg, lat: 22.0145 deg} 22 | use_uniform_position: true 23 | # models_file: 24 | # use_catalog: 4fgl 25 | datasets_with_fov_bkg_model: [] 26 | components: 27 | - name: *source_name 28 | type: SkyModel 29 | spectral: 30 | type: LogParabolaSpectralModel 31 | parameters: 32 | - name: amplitude 33 | value: 1.0e-13 34 | unit: cm-2 s-1 TeV-1 35 | error: 1.5e-14 36 | min: 1.0e-15 37 | max: 0.01 38 | frozen: false 39 | - name: reference 40 | value: 9 41 | unit: TeV 42 | error: 0.0 43 | min: 0.0001 44 | max: 100.0 45 | frozen: true 46 | - name: alpha 47 | value: 2.7 48 | unit: '' 49 | error: 0.1 50 | min: 0.5 51 | max: 5.0 52 | frozen: false 53 | - name: beta 54 | value: 0.1 55 | unit: '' 56 | error: 0.001 57 | min: 1.0e-6 58 | max: 1.0 59 | frozen: false 60 | ebl_abs: 61 | reference: dominguez 62 | type: EBLAbsorptionNormSpectralModel 63 | redshift: 0.0 64 | alpha_norm: 1.0 65 | spatial: 66 | type: PointSpatialModel 67 | frame: icrs 68 | parameters: 69 | - name: lon_0 70 | value: 83.6338333 71 | unit: deg 72 | - name: lat_0 73 | value: 22.0145 74 | unit: deg 75 | covariance: None 76 | from_3d: false 77 | # roi_selection: 78 | # roi_radius: 5 deg 79 | # free_sources: ["4FGL J0521.7+2112", "4FGL J0528.3+1817", "4FGL J0536.2+1733", "4FGL J0534.5+2200"] 80 | 81 | # Instrument datasets with 3D info 82 | dataset3d: 83 | type: "3d" 84 | instruments: 85 | - name: HAWC 86 | input_dl3: 87 | - type: hawc 88 | input_dir: "." #"/home/chaitanya/software/gammapy-datasets/1.3/hawc/crab_events_pass4/" 89 | glob_pattern: 90 | en_est: "*NN*fits.gz" 91 | transit: "irfs/TransitsMap*fits.gz" 92 | dataset_info: 93 | name: HAWC 94 | # map_selection: [] 95 | geom: 96 | wcs: 97 | binsize: 0.02 deg 98 | proj: TAN 99 | map_frame_shape: 100 | width: 6 deg 101 | height: 6 deg 102 | reco_psf: True 103 | axes: 104 | - name: energy 105 | axis: 106 | min: "3.16 TeV" 107 | max: "31.6 TeV" 108 | nbins: 4 109 | per_decade: true 110 | axis_custom: 111 | edges: [3.16, 5.62, 10.0, 17.8, 31.6] 112 | unit: "TeV" 113 | - name: energy_true 114 | axis: 115 | min: "1 TeV" 116 | max: "1e3 TeV" 117 | nbins: 8 118 | per_decade: true 119 | observation: 120 | event_type: [5, 6, 7, 8, 9] 121 | background: 122 | # method: reflected 123 | # parameters: 124 | exclusion: 125 | target_source: true 126 | regions: 127 | - type: CircleAnnulusSkyRegion 128 | name: *source_name 129 | position: *source_pos 130 | parameters: 131 | rad_0: 1 deg 132 | rad_1: 30 deg 133 | - type: CircleSkyRegion 134 | name: "4FGL J0521.7+2112" 135 | parameters: 136 | region_radius: 0.5 deg 137 | safe_mask: 138 | methods: ["aeff-max"] 139 | parameters: 140 | aeff_percent: 10 141 | # min: 1 GeV 142 | # max: 1 TeV 143 | on_region: *source_pos 144 | # radius: 0.4 deg # Have to figure how to use this later 145 | containment_correction: true 146 | map_selection: [counts, background, exposure, edisp, psf] 147 | dl4_dataset_info: 148 | # dl4_dataset: 149 | # type: SpectrumDatasetOnOff 150 | # input_dir: "." 151 | # glob_pattern: "obs_*fits" 152 | # dl4_format: "gadf" 153 | spectral_energy_range: 154 | name: energy 155 | axis: 156 | min: "3.16 TeV" 157 | max: "31.6 TeV" 158 | nbins: 4 159 | per_decade: true 160 | 161 | # Fit parameters 162 | fit_params: 163 | fit_range: 164 | min: "3.16 TeV" 165 | max: "31.6 TeV" 166 | backend: minuit 167 | optimize_opts: {} 168 | covariance_opts: {} 169 | confidence_opts: {} 170 | store_trace: true 171 | 172 | flux_points_params: 173 | parameters: 174 | selection_optional: "all" 175 | reoptimize: false 176 | -------------------------------------------------------------------------------- /src/asgardpy/tests/config_test_ebl.yaml: -------------------------------------------------------------------------------- 1 | # Configuration file template for testing EBL absorption using 2 | # HESS (1D) data of PKS 2155-304 3 | 4 | # General settings 5 | general: 6 | log: 7 | level: info 8 | # filename: ../test_log.log 9 | # filemode: a 10 | # format: "%(asctime)s - %(message)s" 11 | # datefmt: "%d-%b-%y %H:%M:%S" 12 | outdir: . 13 | n_jobs: 10 14 | parallel_backend: multiprocessing 15 | steps: [datasets-1d, fit, flux-points] 16 | overwrite: true 17 | stacked_dataset: true 18 | 19 | # Target settings 20 | target: 21 | source_name: &source_name "PKS 2155-304" 22 | sky_position: &source_pos {frame: icrs, lon: 329.71693844 deg, lat: -30.22558846 deg} 23 | use_uniform_position: true 24 | # models_file: 25 | components: 26 | - name: *source_name 27 | type: SkyModel 28 | spectral: 29 | type: PowerLawSpectralModel 30 | parameters: 31 | - name: amplitude 32 | value: 1.2e-11 33 | unit: cm-2 s-1 TeV-1 34 | error: 1.2e-12 35 | min: 1.0e-13 36 | max: 0.01 37 | frozen: false 38 | - name: reference 39 | value: 1.0 40 | unit: TeV 41 | error: 0.0 42 | min: 0.0001 43 | max: 100.0 44 | frozen: true 45 | - name: index 46 | value: 2.5 47 | unit: '' 48 | error: 1.0e-02 49 | min: 0.5 50 | max: 5.0 51 | frozen: false 52 | ebl_abs: 53 | reference: dominguez 54 | type: EBLAbsorptionNormSpectralModel 55 | redshift: 0.116 56 | alpha_norm: 1.0 57 | covariance: None 58 | from_3d: false 59 | 60 | # Instrument datasets with 1D info 61 | dataset1d: 62 | type: "1d" 63 | instruments: 64 | - name: HESS 65 | dataset_info: 66 | name: HESS 67 | input_dl4: true 68 | dl4_dataset_info: 69 | dl4_dataset: 70 | type: SpectrumDatasetOnOff 71 | input_dir: "." #../gammapy-datasets/1.3/PKS2155-steady/ 72 | glob_pattern: 73 | dl4_files: "pks2155-304_steady.fits.gz" 74 | dl4_format: "ogip" 75 | spectral_energy_range: 76 | name: energy 77 | axis: 78 | min: "0.2 TeV" 79 | max: "20 TeV" 80 | nbins: 5 81 | per_decade: true 82 | 83 | # Fit parameters 84 | fit_params: 85 | fit_range: 86 | min: "200 GeV" 87 | max: "20 TeV" 88 | 89 | flux_points_params: 90 | parameters: 91 | selection_optional: "all" 92 | reoptimize: false 93 | -------------------------------------------------------------------------------- /src/asgardpy/version.py: -------------------------------------------------------------------------------- 1 | """ 2 | Asgardpy version 3 | """ 4 | 5 | try: 6 | try: 7 | from ._dev_version import version 8 | except Exception: 9 | from ._version import version 10 | except Exception: # pragma: no cover 11 | import warnings 12 | 13 | warnings.warn( 14 | "Could not determine version; this indicates a broken installation." 15 | " Install from PyPI or from a local git repository." 16 | " Installing github's autogenerated source release tarballs " 17 | " does not include version information and should be avoided." 18 | ) 19 | del warnings 20 | version = "0.0.0" 21 | 22 | __version__ = version 23 | 24 | # Save the public version for creating Tags for release 25 | from packaging.version import Version 26 | 27 | public_version = Version(__version__).public 28 | __public_version__ = public_version 29 | -------------------------------------------------------------------------------- /tox.ini: -------------------------------------------------------------------------------- 1 | [tox] 2 | minversion = 2.0 3 | envlist = lint, type, py{311,312,313}-test{-std,-quick} 4 | requires = 5 | tox >= 4 6 | setuptools >= 60 7 | pip >= 19.3.1 8 | 9 | [testenv] 10 | description = 11 | run tests 12 | quick: in parallel 13 | 14 | deps = 15 | pytest >= 7 16 | pytest-sugar 17 | pytest-xdist 18 | coverage 19 | pytest-cov 20 | quick: tqdm 21 | quick: requests 22 | commands = 23 | pip freeze 24 | std: pytest -v -m 'test_data or not test_data' --cov --cov-append --cov-report xml --pyargs src/asgardpy {posargs} 25 | quick: pytest -v -m 'test_data or not test_data' --cov --cov-append --cov-report xml --pyargs src/asgardpy {posargs} -n auto --dist loadscope 26 | 27 | # Pass through the following environment variables which may be needed for the CI 28 | passenv = 29 | HOME 30 | WINDIR 31 | LC_ALL 32 | LC_CTYPE 33 | CC 34 | CI 35 | TRAVIS 36 | GAMMAPY_DATA 37 | PKG_CONFIG_PATH 38 | 39 | # tox environments are constructed with so-called 'factors' (or terms) 40 | # separated by hyphens, e.g. test-devdeps-cov. Lines below starting with factor: 41 | # will only take effect if that factor is included in the environment name. To 42 | # see a list of example environments that can be run, along with a description, 43 | # run: 44 | # 45 | # tox -l -v 46 | 47 | [testenv:lint] 48 | description = run linters 49 | skip_install = true 50 | deps = 51 | # update commands for all the dev tools 52 | ruff >= 0.9.3 53 | isort >= 5.13.0 54 | commands = 55 | ruff check {posargs:src/asgardpy} 56 | isort {posargs:src/asgardpy} 57 | 58 | [testenv:type] 59 | description = run type checks 60 | deps = 61 | mypy > 1.14 62 | commands = 63 | mypy {posargs:src/asgardpy} 64 | --------------------------------------------------------------------------------