├── .gitattributes ├── .gitconfig ├── .github ├── dependabot.yml └── workflows │ ├── documentation.yml │ ├── python_publish.yml │ └── pythonpackage.yml ├── .gitignore ├── CHANGELOG.txt ├── LICENSE.txt ├── README.md ├── doc ├── Makefile ├── biblio.bib ├── logo │ ├── logo.odg │ ├── logo2.svg │ ├── logo2_without_text.svg │ ├── logo3.pdf │ ├── logo3.svg │ ├── logo3_white.svg │ ├── logo3_without_text.svg │ ├── logo3_without_text_white.svg │ ├── logo_circle_black.svg │ ├── logo_circle_white.svg │ ├── logo_square_black.svg │ └── logo_square_white.svg ├── scatches │ ├── LLHRatio_classes.odg │ ├── analysis_classes.odg │ ├── parameter_handling.odg │ └── parameter_handling.svg ├── sphinx │ ├── Makefile │ ├── _assets │ │ └── gh-pages-redirect.html │ ├── _templates │ │ └── versions.html │ ├── concepts │ │ ├── config.ipynb │ │ ├── datafields.ipynb │ │ ├── dataset.ipynb │ │ ├── detsigyield.ipynb │ │ ├── flux_definition.ipynb │ │ ├── index.rst │ │ ├── parameters.ipynb │ │ ├── pdf.ipynb │ │ └── source_definition.ipynb │ ├── conf.py │ ├── dev_docs │ │ ├── logging.ipynb │ │ └── unit_tests.rst │ ├── examples │ │ ├── index.rst │ │ └── timepdf.ipynb │ ├── faq │ │ ├── index.rst │ │ └── signal_generator.ipynb │ ├── index.rst │ ├── installation.rst │ ├── notes.rst │ ├── requirements.txt │ └── tutorials │ │ ├── getting_started.ipynb │ │ ├── index.rst │ │ ├── publicdata_ps.ipynb │ │ └── publicdata_ps_timedep.ipynb ├── user_manual.pdf └── user_manual.tex ├── examples ├── multiproc.py └── scrambling.py ├── pyproject.toml ├── requirements.txt ├── setup.cfg ├── setup.py ├── skyllh ├── __init__.py ├── _version.py ├── analyses │ ├── __init__.py │ └── i3 │ │ ├── __init__.py │ │ └── publicdata_ps │ │ ├── __init__.py │ │ ├── aeff.py │ │ ├── backgroundpdf.py │ │ ├── bkg_flux.py │ │ ├── detsigyield.py │ │ ├── mcbkg_ps.py │ │ ├── pdfratio.py │ │ ├── scripts │ │ └── mceq_atm_bkg.py │ │ ├── signal_generator.py │ │ ├── signalpdf.py │ │ ├── smearing_matrix.py │ │ ├── time_dependent_ps.py │ │ ├── time_integrated_ps.py │ │ ├── time_integrated_ps_function_energy_spectrum.py │ │ └── utils.py ├── cluster │ ├── __init__.py │ ├── commands.py │ ├── compute_node.py │ ├── master_node.py │ └── srvclt.py ├── core │ ├── README.txt │ ├── __init__.py │ ├── analysis.py │ ├── background_generation.py │ ├── background_generator.py │ ├── backgroundpdf.py │ ├── binning.py │ ├── catalog.py │ ├── config.py │ ├── datafields.py │ ├── dataset.py │ ├── debugging.py │ ├── detsigyield.py │ ├── display.py │ ├── event_selection.py │ ├── expectation_maximization.py │ ├── flux_model.py │ ├── interpolate.py │ ├── livetime.py │ ├── llhratio.py │ ├── math.py │ ├── minimizer.py │ ├── minimizers │ │ ├── __init__.py │ │ ├── crs.py │ │ └── iminuit.py │ ├── model.py │ ├── multiproc.py │ ├── parameters.py │ ├── pdf.py │ ├── pdfratio.py │ ├── pdfratio_fill.py │ ├── progressbar.py │ ├── py.py │ ├── random.py │ ├── scrambling.py │ ├── services.py │ ├── session.py │ ├── signal_generation.py │ ├── signal_generator.py │ ├── signalpdf.py │ ├── smoothing.py │ ├── source_hypo_grouping.py │ ├── source_model.py │ ├── storage.py │ ├── test_statistic.py │ ├── times.py │ ├── timing.py │ ├── tool.py │ ├── trialdata.py │ ├── types.py │ └── utils │ │ ├── __init__.py │ │ ├── analysis.py │ │ ├── coords.py │ │ ├── flux_model.py │ │ ├── multidimgridpdf.py │ │ ├── spline.py │ │ ├── tdm.py │ │ └── trials.py ├── datasets │ ├── __init__.py │ └── i3 │ │ ├── PublicData_10y_ps.py │ │ ├── PublicData_10y_ps_wMC.py │ │ ├── TestData.py │ │ └── __init__.py ├── i3 │ ├── __init__.py │ ├── background_generation.py │ ├── backgroundpdf.py │ ├── config.py │ ├── dataset.py │ ├── detsigyield.py │ ├── livetime.py │ ├── pdf.py │ ├── pdfratio.py │ ├── scrambling.py │ ├── signal_generation.py │ ├── signalpdf.py │ └── utils │ │ ├── __init__.py │ │ ├── analysis.py │ │ └── coords.py ├── plotting │ ├── __init__.py │ ├── core │ │ ├── __init__.py │ │ ├── pdfratio.py │ │ └── signalpdf.py │ ├── i3 │ │ ├── __init__.py │ │ ├── backgroundpdf.py │ │ ├── pdf.py │ │ └── pdfratio.py │ └── utils │ │ ├── __init__.py │ │ └── trials.py └── scripting │ ├── __init__.py │ ├── argparser.py │ └── logging.py ├── tests ├── core │ ├── __init__.py │ ├── test_datafields.py │ ├── test_dataset.py │ ├── test_event_selection.py │ ├── test_flux_model.py │ ├── test_interpolate.py │ ├── test_model.py │ ├── test_parameters.py │ ├── test_py.py │ ├── test_random.py │ ├── test_signal_generator.py │ ├── test_signalpdf.py │ ├── test_source_model.py │ ├── test_storage.py │ ├── test_weights.py │ └── testdata │ │ ├── __init__.py │ │ ├── exp_testdata.npy │ │ ├── livetime_testdata.npy │ │ ├── mc_testdata.npy │ │ └── testdata_generator.py ├── i3 │ ├── __init__.py │ ├── test_background_generation.py │ ├── test_coords.py │ ├── test_livetime.py │ ├── test_scrambling.py │ └── testdata │ │ ├── __init__.py │ │ ├── exp_testdata.npy │ │ ├── grl_testdata.npy │ │ ├── mc_testdata.npy │ │ └── testdata_generator.py └── run.sh └── versioneer.py /.gitattributes: -------------------------------------------------------------------------------- 1 | skyllh/_version.py export-subst 2 | -------------------------------------------------------------------------------- /.gitconfig: -------------------------------------------------------------------------------- 1 | [rebase] 2 | autosquash = true 3 | -------------------------------------------------------------------------------- /.github/dependabot.yml: -------------------------------------------------------------------------------- 1 | # Set update schedule for GitHub Actions 2 | 3 | version: 2 4 | updates: 5 | 6 | - package-ecosystem: "github-actions" 7 | directory: "/" 8 | schedule: 9 | # Check for updates to GitHub Actions every week 10 | interval: "weekly" 11 | ignore: 12 | - dependency-name: "*" 13 | update-types: ["version-update:semver-minor", "version-update:semver-patch"] 14 | -------------------------------------------------------------------------------- /.github/workflows/documentation.yml: -------------------------------------------------------------------------------- 1 | # This workflow builds sphinx documentation and deploys static html files to the gh-pages branch. 2 | # Based on https://github.com/marketplace/actions/github-pages-action#%EF%B8%8F-static-site-generators-with-python 3 | 4 | name: Documentation 5 | 6 | on: 7 | push: 8 | branches: 9 | - master 10 | - docs 11 | pull_request: 12 | branches: 13 | - master 14 | 15 | jobs: 16 | deploy: 17 | runs-on: ubuntu-latest 18 | steps: 19 | - uses: actions/checkout@v4 20 | with: 21 | fetch-depth: 0 22 | 23 | - name: Setup Python 24 | uses: actions/setup-python@v5 25 | with: 26 | python-version: '3.x' 27 | 28 | - name: Upgrade pip 29 | run: | 30 | # install pip=>20.1 to use "pip cache dir" 31 | python3 -m pip install --upgrade pip 32 | 33 | - name: Get pip cache dir 34 | id: pip-cache 35 | run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT 36 | 37 | - name: Cache dependencies 38 | uses: actions/cache@v4 39 | with: 40 | path: ${{ steps.pip-cache.outputs.dir }} 41 | key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} 42 | restore-keys: | 43 | ${{ runner.os }}-pip- 44 | 45 | - name: Install dependencies 46 | run: python3 -m pip install -r ./doc/sphinx/requirements.txt 47 | 48 | - name: Install pandoc 49 | run: sudo apt-get -y install pandoc 50 | 51 | - name: Build documentation 52 | run: | 53 | cd ./doc/sphinx 54 | make html-multiversion 55 | cp _assets/gh-pages-redirect.html _build/index.html 56 | 57 | - name: Deploy 58 | uses: peaceiris/actions-gh-pages@v4 59 | with: 60 | github_token: ${{ secrets.GITHUB_TOKEN }} 61 | publish_dir: ./doc/sphinx/_build 62 | force_orphan: true 63 | -------------------------------------------------------------------------------- /.github/workflows/python_publish.yml: -------------------------------------------------------------------------------- 1 | # This workflow will upload a Python Package using Twine when a release is created 2 | # For more information see: https://docs.github.com/en/actions/automating-builds-and-tests/building-and-testing-python#publishing-to-package-registries 3 | 4 | name: Upload Python Package 5 | 6 | on: 7 | release: 8 | types: [published] 9 | 10 | jobs: 11 | deploy: 12 | runs-on: ubuntu-latest 13 | steps: 14 | - uses: actions/checkout@v4 15 | - name: Set up Python 16 | uses: actions/setup-python@v5 17 | with: 18 | python-version: '3.x' 19 | - name: Install dependencies 20 | run: | 21 | python -m pip install --upgrade pip 22 | pip install setuptools wheel twine 23 | - name: Build and publish package 24 | env: 25 | TWINE_USERNAME: __token__ 26 | TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} 27 | run: | 28 | python setup.py sdist bdist_wheel 29 | twine upload dist/* 30 | -------------------------------------------------------------------------------- /.github/workflows/pythonpackage.yml: -------------------------------------------------------------------------------- 1 | # This workflow will install Python dependencies, run tests and lint with a variety of Python versions 2 | # For more information see: https://help.github.com/actions/language-and-framework-guides/using-python-with-github-actions 3 | 4 | name: Unit tests 5 | 6 | on: 7 | push: 8 | branches: [ master ] 9 | pull_request: 10 | branches: [ master ] 11 | 12 | jobs: 13 | build: 14 | 15 | runs-on: ubuntu-latest 16 | strategy: 17 | matrix: 18 | python-version: ["3.9", "3.10", "3.11", "3.12", "3.13"] 19 | 20 | steps: 21 | - uses: actions/checkout@v4 22 | - name: Set up Python ${{ matrix.python-version }} 23 | uses: actions/setup-python@v5 24 | with: 25 | python-version: ${{ matrix.python-version }} 26 | - name: Install dependencies 27 | run: | 28 | python -m pip install --upgrade pip 29 | pip install -r requirements.txt 30 | - name: Lint with flake8 31 | run: | 32 | pip install flake8 33 | # stop the build if there are Python syntax errors or undefined names 34 | flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics 35 | # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide 36 | flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics 37 | - name: Run tests with unittest 38 | env: 39 | ICECUBE_PASSWORD: ${{ secrets.ICECUBE_PASSWORD }} 40 | run: | 41 | ./tests/run.sh 42 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | build/ 12 | develop-eggs/ 13 | dist/ 14 | downloads/ 15 | eggs/ 16 | .eggs/ 17 | lib/ 18 | lib64/ 19 | parts/ 20 | sdist/ 21 | var/ 22 | wheels/ 23 | pip-wheel-metadata/ 24 | share/python-wheels/ 25 | *.egg-info/ 26 | .installed.cfg 27 | *.egg 28 | MANIFEST 29 | 30 | # PyInstaller 31 | # Usually these files are written by a python script from a template 32 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 33 | *.manifest 34 | *.spec 35 | 36 | # Installer logs 37 | pip-log.txt 38 | pip-delete-this-directory.txt 39 | 40 | # Unit test / coverage reports 41 | htmlcov/ 42 | .tox/ 43 | .nox/ 44 | .coverage 45 | .coverage.* 46 | .cache 47 | nosetests.xml 48 | coverage.xml 49 | *.cover 50 | *.py,cover 51 | .hypothesis/ 52 | .pytest_cache/ 53 | 54 | # Translations 55 | *.mo 56 | *.pot 57 | 58 | # Django stuff: 59 | *.log 60 | local_settings.py 61 | db.sqlite3 62 | db.sqlite3-journal 63 | 64 | # Flask stuff: 65 | instance/ 66 | .webassets-cache 67 | 68 | # Scrapy stuff: 69 | .scrapy 70 | 71 | # Sphinx documentation 72 | doc/sphinx/_build 73 | doc/sphinx/reference 74 | 75 | # PyBuilder 76 | target/ 77 | 78 | # Jupyter Notebook 79 | .ipynb_checkpoints 80 | 81 | # IPython 82 | profile_default/ 83 | ipython_config.py 84 | 85 | # pyenv 86 | .python-version 87 | 88 | # pipenv 89 | # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. 90 | # However, in case of collaboration, if having platform-specific dependencies or dependencies 91 | # having no cross-platform support, pipenv may install dependencies that don't work, or not 92 | # install all needed dependencies. 93 | #Pipfile.lock 94 | 95 | # PEP 582; used by e.g. github.com/David-OConnor/pyflow 96 | __pypackages__/ 97 | 98 | # Celery stuff 99 | celerybeat-schedule 100 | celerybeat.pid 101 | 102 | # SageMath parsed files 103 | *.sage.py 104 | 105 | # Environments 106 | .env 107 | .venv 108 | env/ 109 | venv/ 110 | ENV/ 111 | env.bak/ 112 | venv.bak/ 113 | 114 | # Spyder project settings 115 | .spyderproject 116 | .spyproject 117 | 118 | # Rope project settings 119 | .ropeproject 120 | 121 | # mkdocs documentation 122 | /site 123 | 124 | # mypy 125 | .mypy_cache/ 126 | .dmypy.json 127 | dmypy.json 128 | 129 | # Pyre type checker 130 | .pyre/ 131 | 132 | # VisualCode IDE 133 | .vscode 134 | 135 | # Scalene profiling files 136 | profile.json 137 | profile.html 138 | 139 | # Latex compilation files 140 | *.aux 141 | *.bbl 142 | *.blg 143 | 144 | # Local temporary data repositories 145 | .repository 146 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # SkyLLH 2 | 3 | [![Tests](https://github.com/icecube/skyllh/actions/workflows/pythonpackage.yml/badge.svg)](#) 4 | [![Docs](https://github.com/icecube/skyllh/actions/workflows/documentation.yml/badge.svg)](https://icecube.github.io/skyllh/) 5 | [![License: GPL-3.0](https://img.shields.io/badge/License-GPLv3-blue.svg)](https://opensource.org/licenses/GPL-3.0) 6 | [![PyPI - Version](https://img.shields.io/pypi/v/skyllh)](https://pypi.org/project/skyllh/) 7 | [![conda-forge](https://anaconda.org/conda-forge/skyllh/badges/version.svg)](https://anaconda.org/conda-forge/skyllh) 8 | 9 | [[Full documentation]](https://icecube.github.io/skyllh/). 10 | 11 | The SkyLLH framework is an open-source Python3-based package licensed under the 12 | GPLv3 license. It provides a modular framework for implementing custom 13 | likelihood functions and executing log-likelihood ratio hypothesis tests. 14 | The idea is to provide a class structure tied to the mathematical objects of the 15 | likelihood functions, rather than to entire abstract likelihood models. 16 | 17 | The math formalism used in SkyLLH is described in the 18 | [[math formalism document]](https://github.com/icecube/skyllh/blob/master/doc/user_manual.pdf). 19 | 20 | # Installation 21 | 22 | ## Using pip 23 | 24 | The latest `skyllh` release can be installed from 25 | [PyPI](https://pypi.org/project/skyllh/) repository: 26 | ```bash 27 | pip install skyllh 28 | ``` 29 | 30 | The current development version can be installed using pip: 31 | ```bash 32 | pip install git+https://github.com/icecube/skyllh.git#egg=skyllh 33 | ``` 34 | 35 | Optionally, the editable package version with a specified reference can be 36 | installed by: 37 | ```bash 38 | pip install -e git+https://github.com/icecube/skyllh.git@[ref]#egg=skyllh 39 | ``` 40 | where 41 | - `-e` is an editable flag 42 | - `[ref]` is an optional argument containing a specific commit hash, branch name 43 | or tag 44 | 45 | ## Cloning from GitHub 46 | 47 | The `skyllh` (and an optional private [i3skyllh](#i3skyllh)) package can be 48 | installed by cloning the GitHub repository and adding it to the Python path: 49 | 50 | ```python 51 | import sys 52 | 53 | sys.path.insert(0, '/path/to/skyllh') 54 | sys.path.insert(0, '/path/to/i3skyllh') # optional 55 | ``` 56 | 57 | # Publications 58 | 59 | Several publications about the SkyLLH software are available: 60 | 61 | - IceCube Collaboration, C. Bellenghi, M. Karl, M. Wolf, et al. PoS ICRC2023 (2023) 1061 62 | [DOI](https://doi.org/10.22323/1.444.1061) 63 | - IceCube Collaboration, T. Kontrimas, M. Wolf, et al. PoS ICRC2021 (2022) 1073 64 | [DOI](http://doi.org/10.22323/1.395.1073) 65 | - IceCube Collaboration, M. Wolf, et al. PoS ICRC2019 (2020) 1035 66 | [DOI](https://doi.org/10.22323/1.358.1035) 67 | 68 | # Developer Guidelines 69 | 70 | These guidelines should help new developers of SkyLLH to join the development 71 | process easily. 72 | 73 | ## Code style 74 | 75 | - The code follows PEP8 coding style guidelines as close as possible. 76 | 77 | - Code lines are maximum 80 characters wide. 78 | 79 | - 4 spaces are used as one indentation level. 80 | 81 | ## Branching 82 | 83 | - When implementing a new feature / change, first an issue must be created 84 | describing the new feature / change. Then a branch must be created referring 85 | to this issue. We recommend the branch name `fix`, where 86 | `` is the number of the created issue for this feature / change. 87 | 88 | - In cases when SkyLLH needs to be updated because of a change in the i3skyllh 89 | package (see below), we recommend the branch name `i3skyllh_`, 90 | where `` is the number of the issue created in the i3skyllh 91 | repository. That way the *analysis unit tests* workflow will be able to find 92 | the correct skyllh branch corresponding to the i3skyllh change automatically. 93 | 94 | ## Releases and Versioning 95 | 96 | - Release version numbers follow the format `v..`, where 97 | `` is the current year, `` and `` are the major and minor 98 | version numbers of type integer. Example: `v23.2.0`. 99 | 100 | - Release candidates follow the same format as releases, but have the additional 101 | suffix `.rc`, where `` is an integer starting with 1. 102 | Example: `v23.2.0.rc1` 103 | 104 | - Before creating the release on github, the version number needs to be updated 105 | in the Sphinx documentation: `doc/sphinx/conf.py`. 106 | 107 | # i3skyllh 108 | 109 | The [`i3skyllh`](https://github.com/icecube/i3skyllh) package provides 110 | complementary pre-defined common analyses and datasets for the 111 | [IceCube Neutrino Observatory](https://icecube.wisc.edu) detector in a private 112 | [repository](https://github.com/icecube/i3skyllh). 113 | 114 | # Contributors 115 | 116 | - [Martin Wolf](https://github.com/martwo) - [mail@martin-wolf.org](mailto:mail@martin-wolf.org) 117 | - [Tomas Kontrimas](https://github.com/tomaskontrimas) - [tomas.kontrimas@tum.de](mailto:tomas.kontrimas@tum.de) 118 | - [Chiara Bellenghi](https://github.com/chiarabellenghi) - [chiara.bellenghi@tum.de](mailto:chiara.bellenghi@tum.de) 119 | - [Martina Karl](https://github.com/mskarl) - [martina.karl@eso.org](mailto:martina.karl@eso.org) 120 | -------------------------------------------------------------------------------- /doc/Makefile: -------------------------------------------------------------------------------- 1 | all: docs 2 | 3 | html: 4 | cd sphinx && make html 5 | 6 | user_manual: 7 | pdflatex user_manual 8 | bibtex user_manual 9 | pdflatex user_manual 10 | pdflatex user_manual 11 | 12 | docs: user_manual html 13 | 14 | clean: 15 | rm -f *.aux 16 | rm -f *.dvi 17 | rm -f *.log 18 | rm -f *.toc 19 | rm -f *.blg 20 | rm -f *.bbl 21 | rm -f *.out 22 | rm -f *.pdf 23 | cd sphinx && make clean 24 | -------------------------------------------------------------------------------- /doc/biblio.bib: -------------------------------------------------------------------------------- 1 | @article{TimeDepPSSearchMethods2010, 2 | author = "{Jim Braun, Mike Baker, Jon Dumm, Chad Finley, Albrecht Karle, Teresa Montaruli}", 3 | title = "{Time-Dependent Point Source Search Methods in High Energy Neutrino Astronomy}", 4 | journal = "Astropart.Phys.", 5 | volume = "33", 6 | pages = "175-181", 7 | year = "2010", 8 | eprint = "0912.1572", 9 | archivePrefix = "arXiv" 10 | } -------------------------------------------------------------------------------- /doc/logo/logo.odg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/doc/logo/logo.odg -------------------------------------------------------------------------------- /doc/logo/logo3.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/doc/logo/logo3.pdf -------------------------------------------------------------------------------- /doc/scatches/LLHRatio_classes.odg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/doc/scatches/LLHRatio_classes.odg -------------------------------------------------------------------------------- /doc/scatches/analysis_classes.odg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/doc/scatches/analysis_classes.odg -------------------------------------------------------------------------------- /doc/scatches/parameter_handling.odg: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/doc/scatches/parameter_handling.odg -------------------------------------------------------------------------------- /doc/sphinx/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SOURCEDIR = ./ 8 | BUILDDIR = _build 9 | 10 | # Put it first so that "make" without argument is like "make help". 11 | help: 12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 13 | 14 | .PHONY: help Makefile html clean 15 | 16 | # Build html documentation. 17 | html: 18 | @$(SPHINXBUILD) -M html "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 19 | 20 | # Build html documentation for each branch separately. 21 | html-multiversion: 22 | sphinx-multiversion "$(SOURCEDIR)" "$(BUILDDIR)" 23 | 24 | # Prepend clean target to remove skyllh API reference. 25 | clean: 26 | rm -rf $(SOURCEDIR)/reference 27 | @$(SPHINXBUILD) -M clean "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 28 | 29 | # Catch-all target: route all unknown targets to Sphinx using the new 30 | # "make mode" option. 31 | %: Makefile 32 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) 33 | -------------------------------------------------------------------------------- /doc/sphinx/_assets/gh-pages-redirect.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | Redirecting to master branch 5 | 6 | 7 | 8 | 9 | -------------------------------------------------------------------------------- /doc/sphinx/_templates/versions.html: -------------------------------------------------------------------------------- 1 | {%- if current_version %} 2 |
3 | 4 | Other Versions 5 | v: {{ current_version.name }} 6 | 7 | 8 |
9 | {%- if versions.branches %} 10 |
11 |
Branches
12 | {%- for item in versions.branches %} 13 |
{{ item.name }}
14 | {%- endfor %} 15 |
16 | {%- endif %} 17 | {%- if versions.tags %} 18 |
19 |
Tags
20 | {%- for item in versions.tags %} 21 |
{{ item.name }}
22 | {%- endfor %} 23 |
24 | {%- endif %} 25 |
26 |
27 | {%- endif %} -------------------------------------------------------------------------------- /doc/sphinx/concepts/config.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "attachments": {}, 5 | "cell_type": "markdown", 6 | "metadata": {}, 7 | "source": [ 8 | "# Configuration" 9 | ] 10 | }, 11 | { 12 | "cell_type": "raw", 13 | "metadata": { 14 | "raw_mimetype": "text/restructuredtext" 15 | }, 16 | "source": [ 17 | "An analysis requires a (local) configuration. Such a configuration defines for\n", 18 | "instance the internal units used for the calculations, or defines the project's\n", 19 | "working directory.\n", 20 | "\n", 21 | "The :py:mod:`skyllh.core.config` module provides the \n", 22 | ":py:class:`~skyllh.core.config.Config` class. Instatiating it create a \n", 23 | "configuration with the default base configuration. This base configuration\n", 24 | "can be updated using a *yaml* file via the \n", 25 | ":py:meth:`~skyllh.core.Config.from_yaml` class method or a Python dictionary\n", 26 | "via the :py:meth:`~skyllh.core.Config.from_dict` class method. " 27 | ] 28 | }, 29 | { 30 | "cell_type": "code", 31 | "execution_count": 3, 32 | "metadata": {}, 33 | "outputs": [], 34 | "source": [ 35 | "from skyllh.core.config import Config\n", 36 | "\n", 37 | "# Create default configuration.\n", 38 | "cfg = Config()" 39 | ] 40 | }, 41 | { 42 | "cell_type": "raw", 43 | "metadata": { 44 | "raw_mimetype": "text/restructuredtext" 45 | }, 46 | "source": [ 47 | "The configuration instance will be passed to the \n", 48 | ":py:class:`skyllh.core.analysis.Analysis` class and all other classes that need\n", 49 | "access to the configuration. The ``cfg`` instance is local and could be created\n", 50 | "several times, but should be created only once for an analysis. " 51 | ] 52 | } 53 | ], 54 | "metadata": { 55 | "kernelspec": { 56 | "display_name": "Python 3", 57 | "language": "python", 58 | "name": "python3" 59 | }, 60 | "language_info": { 61 | "codemirror_mode": { 62 | "name": "ipython", 63 | "version": 3 64 | }, 65 | "file_extension": ".py", 66 | "mimetype": "text/x-python", 67 | "name": "python", 68 | "nbconvert_exporter": "python", 69 | "pygments_lexer": "ipython3", 70 | "version": "3.10.6" 71 | }, 72 | "orig_nbformat": 4 73 | }, 74 | "nbformat": 4, 75 | "nbformat_minor": 2 76 | } 77 | -------------------------------------------------------------------------------- /doc/sphinx/concepts/datafields.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "attachments": {}, 5 | "cell_type": "markdown", 6 | "metadata": {}, 7 | "source": [ 8 | "# Loading data fields from data files" 9 | ] 10 | }, 11 | { 12 | "cell_type": "raw", 13 | "metadata": { 14 | "raw_mimetype": "text/restructuredtext" 15 | }, 16 | "source": [ 17 | "An analysis will need to load a set of data fields from a data file. \n", 18 | "Which fields these are is defined in the ``['datafields']`` section of the\n", 19 | ":py:class:`~skyllh.core.config.Config` dictionary instance. Each field has a stage\n", 20 | "assigned which states at what stage the data field is required. There are\n", 21 | "two main stages: data preparation, and analysis. Since data fields\n", 22 | "can exist either in an experimental data file or a monte-carlo data file, these\n", 23 | "two main stages are divided into EXP and MC. Hence, the following stages \n", 24 | "exist::\n", 25 | "\n", 26 | " DATAPREPARATION_EXP\n", 27 | " DATAPREPARATION_MC\n", 28 | " ANALYSIS_EXP\n", 29 | " ANALYSIS_MC\n", 30 | "\n", 31 | "All stages are defines in the :py:class:`skyllh.core.datafields.DataFieldStages`\n", 32 | "class." 33 | ] 34 | }, 35 | { 36 | "attachments": {}, 37 | "cell_type": "raw", 38 | "metadata": { 39 | "raw_mimetype": "text/restructuredtext" 40 | }, 41 | "source": [ 42 | "After loading the data of a :py:class:`~skyllh.core.dataset.Dataset` instance, \n", 43 | "only data fields with the stage ``ANALYSIS_EXP`` and ``ANALYSIS_MC`` will be\n", 44 | "left to use in the analysis. Data fields marked with stage \n", 45 | "``DATAPREPARATION_EXP`` or ``DATAPREPARATION_MC`` will be available for the data\n", 46 | "preparation stage. " 47 | ] 48 | }, 49 | { 50 | "attachments": {}, 51 | "cell_type": "raw", 52 | "metadata": { 53 | "raw_mimetype": "text/restructuredtext" 54 | }, 55 | "source": [ 56 | "The following code shows how to define the data fields ``my_exp_field`` and \n", 57 | "``my_mc_field`` that should be loaded from the experimental and monte-carlo data \n", 58 | "files, respectively." 59 | ] 60 | }, 61 | { 62 | "cell_type": "code", 63 | "execution_count": 5, 64 | "metadata": {}, 65 | "outputs": [], 66 | "source": [ 67 | "from skyllh.core.config import Config\n", 68 | "from skyllh.core.datafields import DataFieldStages as DFS" 69 | ] 70 | }, 71 | { 72 | "cell_type": "code", 73 | "execution_count": 6, 74 | "metadata": {}, 75 | "outputs": [], 76 | "source": [ 77 | "cfg = Config()\n", 78 | "cfg['datafields']['my_exp_field'] = DFS.ANALYSIS_EXP\n", 79 | "cfg['datafields']['my_mc_field'] = DFS.DATAPREPARATION_MC" 80 | ] 81 | }, 82 | { 83 | "attachments": {}, 84 | "cell_type": "raw", 85 | "metadata": { 86 | "raw_mimetype": "text/restructuredtext" 87 | }, 88 | "source": [ 89 | "The ``my_exp_field`` will be available after the data files have been loaded\n", 90 | "and the data has been prepared by optional data preparation functions, whereas\n", 91 | "the ``my_mc_field`` will be available only at the data preparation stage and not\n", 92 | "at the analysis stage.\n", 93 | "\n", 94 | ".. note::\n", 95 | "\n", 96 | " Everything after the \n", 97 | " :py:meth:`skyllh.core.dataset.Dataset.load_and_prepare_data` call is \n", 98 | " referred to as analysis stage. \n" 99 | ] 100 | }, 101 | { 102 | "attachments": {}, 103 | "cell_type": "raw", 104 | "metadata": { 105 | "raw_mimetype": "text/restructuredtext" 106 | }, 107 | "source": [ 108 | "Datasets can define their own required data fields via setting the \n", 109 | ":py:attr:`skyllh.core.dataset.Dataset.datafields` property in the same way as \n", 110 | "in the configuration." 111 | ] 112 | } 113 | ], 114 | "metadata": { 115 | "kernelspec": { 116 | "display_name": "Python 3", 117 | "language": "python", 118 | "name": "python3" 119 | }, 120 | "language_info": { 121 | "codemirror_mode": { 122 | "name": "ipython", 123 | "version": 3 124 | }, 125 | "file_extension": ".py", 126 | "mimetype": "text/x-python", 127 | "name": "python", 128 | "nbconvert_exporter": "python", 129 | "pygments_lexer": "ipython3", 130 | "version": "3.10.6" 131 | }, 132 | "orig_nbformat": 4 133 | }, 134 | "nbformat": 4, 135 | "nbformat_minor": 2 136 | } 137 | -------------------------------------------------------------------------------- /doc/sphinx/concepts/detsigyield.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "attachments": {}, 5 | "cell_type": "markdown", 6 | "id": "12151349", 7 | "metadata": {}, 8 | "source": [ 9 | "# Detector Signal Yield" 10 | ] 11 | }, 12 | { 13 | "attachments": {}, 14 | "cell_type": "markdown", 15 | "id": "69d138fc", 16 | "metadata": { 17 | "raw_mimetype": "text/restructuredtext" 18 | }, 19 | "source": [ 20 | "Several parts of an analysis will require the calculation of the detector signal yield, which is the mean number of expected signal events in the detector for a given source hypothesis, i.e. source flux function $\\Phi(\\alpha,\\delta,E,t)$." 21 | ] 22 | }, 23 | { 24 | "cell_type": "raw", 25 | "id": "a1560531", 26 | "metadata": { 27 | "raw_mimetype": "text/restructuredtext" 28 | }, 29 | "source": [ 30 | "SkyLLH provides two abstract base classes for creating a detector signal yield \n", 31 | "instance, :py:class:`~skyllh.core.detsigyield.DetSigYieldBuilder` and \n", 32 | ":py:class:`~skyllh.core.detsigyield.DetSigYield`. The first is the builder \n", 33 | "class, which will build a :py:class:`~skyllh.core.detsigyield.DetSigYield` class\n", 34 | "instance." 35 | ] 36 | }, 37 | { 38 | "cell_type": "raw", 39 | "id": "3b4bd127", 40 | "metadata": { 41 | "raw_mimetype": "text/restructuredtext" 42 | }, 43 | "source": [ 44 | "The builder class has the abstract method \n", 45 | ":py:meth:`~skyllh.core.detsigyield.DetSigYieldBuilder.construct_detsigyield`, \n", 46 | "which will take a :py:class:`~skyllh.core.dataset.Dataset`, \n", 47 | ":py:class:`~skyllh.core.dataset.DatasetData`, \n", 48 | ":py:class:`~skyllh.physics.flux_model.FluxModel` instance, and live-time to \n", 49 | "construct a :py:class:`~skyllh.core.detsigyield.DetSigYield` class instance, \n", 50 | "which will provide an evaluation method to calculate the detector signal yield \n", 51 | "for a given source, for the given dataset. Hence, the detector signal yield is\n", 52 | "dataset and source model dependent." 53 | ] 54 | }, 55 | { 56 | "cell_type": "raw", 57 | "id": "653d0c06", 58 | "metadata": { 59 | "raw_mimetype": "text/restructuredtext" 60 | }, 61 | "source": [ 62 | "The :py:class:`~skyllh.core.detsigyield.DetSigYield` class has two abstract \n", 63 | "methods, :py:meth:`~skyllh.core.detsigyield.DetSigYield.source_to_array` and \n", 64 | ":py:meth:`~skyllh.core.detsigyield.DetSigYield.__call__`." 65 | ] 66 | }, 67 | { 68 | "cell_type": "raw", 69 | "id": "60c16a6d", 70 | "metadata": { 71 | "raw_mimetype": "text/restructuredtext" 72 | }, 73 | "source": [ 74 | "The :py:meth:`~skyllh.core.detsigyield.DetSigYield.source_to_array` method takes\n", 75 | "a sequence of source models and converts it into a numpy record array suitable\n", 76 | "for the :py:meth:`~skyllh.core.detsigyield.DetSigYield.__call__` method to \n", 77 | "evaluate the detector signal yield efficiently for a list of sources. \n", 78 | "The :py:meth:`~skyllh.core.detsigyield.DetSigYield.__call__` method evaluates\n", 79 | "the :py:class:`~skyllh.core.detsigyield.DetSigYield` instance. As arguments it\n", 80 | "takes the source record array created by the \n", 81 | ":py:meth:`~skyllh.core.detsigyield.DetSigYield.source_to_array` method, and the \n", 82 | "numpy record array holding the (local) source parameter values." 83 | ] 84 | }, 85 | { 86 | "cell_type": "raw", 87 | "id": "d75df35f", 88 | "metadata": { 89 | "raw_mimetype": "text/restructuredtext" 90 | }, 91 | "source": [ 92 | "The record array holding the local source parameter values can be generated \n", 93 | "through the \n", 94 | ":py:class:`~skyllh.core.parameters.ParameterModelMapper.create_src_params_recarray` \n", 95 | "of the :py:class:`~skyllh.core.parameters.ParameterModelMapper` instance of the\n", 96 | "analysis. See also the :ref:`Parameter to Model mapping \n", 97 | "` section." 98 | ] 99 | } 100 | ], 101 | "metadata": { 102 | "celltoolbar": "Raw Cell Format", 103 | "kernelspec": { 104 | "display_name": "Python 3", 105 | "language": "python", 106 | "name": "python3" 107 | }, 108 | "language_info": { 109 | "codemirror_mode": { 110 | "name": "ipython", 111 | "version": 3 112 | }, 113 | "file_extension": ".py", 114 | "mimetype": "text/x-python", 115 | "name": "python", 116 | "nbconvert_exporter": "python", 117 | "pygments_lexer": "ipython3", 118 | "version": "3.10.6" 119 | } 120 | }, 121 | "nbformat": 4, 122 | "nbformat_minor": 5 123 | } 124 | -------------------------------------------------------------------------------- /doc/sphinx/concepts/index.rst: -------------------------------------------------------------------------------- 1 | .. concepts 2 | 3 | Concepts 4 | ======== 5 | 6 | This section covers a few concepts SkyLLH is persuing. 7 | 8 | .. toctree:: 9 | :maxdepth: 1 10 | 11 | config 12 | dataset 13 | datafields 14 | source_definition 15 | flux_definition 16 | parameters 17 | pdf 18 | detsigyield 19 | -------------------------------------------------------------------------------- /doc/sphinx/dev_docs/logging.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Logging\n", 8 | "\n", 9 | ".. contents:: :local:" 10 | ] 11 | }, 12 | { 13 | "cell_type": "markdown", 14 | "metadata": {}, 15 | "source": [ 16 | "## Introduction\n", 17 | "\n", 18 | "Logging is implemented using standard Python logging module. skyllh package adds support to multiprocessing logging." 19 | ] 20 | }, 21 | { 22 | "cell_type": "markdown", 23 | "metadata": {}, 24 | "source": [ 25 | "## Setup\n", 26 | "\n", 27 | "Analyzer has to set up the main logger and at least one handler for it. Necessary imports:" 28 | ] 29 | }, 30 | { 31 | "cell_type": "code", 32 | "execution_count": null, 33 | "metadata": {}, 34 | "outputs": [], 35 | "source": [ 36 | "import logging\n", 37 | "\n", 38 | "# Logging setup utilities.\n", 39 | "from skyllh.core.debugging import (\n", 40 | " setup_logger,\n", 41 | " setup_console_handler,\n", 42 | " setup_file_handler\n", 43 | ")" 44 | ] 45 | }, 46 | { 47 | "cell_type": "markdown", 48 | "metadata": {}, 49 | "source": [ 50 | "Logger is setup with the following function call. Analyzer can define a desired logging level which optimally is infimum of handling levels." 51 | ] 52 | }, 53 | { 54 | "cell_type": "code", 55 | "execution_count": null, 56 | "metadata": {}, 57 | "outputs": [], 58 | "source": [ 59 | "setup_logger('skyllh', logging.DEBUG)" 60 | ] 61 | }, 62 | { 63 | "cell_type": "markdown", 64 | "metadata": {}, 65 | "source": [ 66 | "Log format is defined by a string containing [LogRecord](https://docs.python.org/3/library/logging.html#logrecord-attributes) attributes." 67 | ] 68 | }, 69 | { 70 | "cell_type": "code", 71 | "execution_count": null, 72 | "metadata": {}, 73 | "outputs": [], 74 | "source": [ 75 | "log_format = '%(asctime)s %(processName)s %(name)s %(levelname)s: '\\\n", 76 | " '%(message)s'" 77 | ] 78 | }, 79 | { 80 | "cell_type": "markdown", 81 | "metadata": {}, 82 | "source": [ 83 | "Console handler is used to print logs to the console." 84 | ] 85 | }, 86 | { 87 | "cell_type": "code", 88 | "execution_count": null, 89 | "metadata": {}, 90 | "outputs": [], 91 | "source": [ 92 | "setup_console_handler('skyllh', logging.INFO, log_format)" 93 | ] 94 | }, 95 | { 96 | "cell_type": "markdown", 97 | "metadata": {}, 98 | "source": [ 99 | "File handler is used to print logs to the specified file. By default it is created in the current working directory." 100 | ] 101 | }, 102 | { 103 | "cell_type": "code", 104 | "execution_count": null, 105 | "metadata": {}, 106 | "outputs": [], 107 | "source": [ 108 | "setup_file_handler('skyllh', logging.DEBUG, log_format, 'debug.log')" 109 | ] 110 | }, 111 | { 112 | "cell_type": "markdown", 113 | "metadata": {}, 114 | "source": [ 115 | "## Generating logs\n", 116 | "\n", 117 | "To generate logs inside skyllh package one has to" 118 | ] 119 | }, 120 | { 121 | "cell_type": "code", 122 | "execution_count": null, 123 | "metadata": {}, 124 | "outputs": [], 125 | "source": [ 126 | "import logging" 127 | ] 128 | }, 129 | { 130 | "cell_type": "markdown", 131 | "metadata": {}, 132 | "source": [ 133 | "and get the logger for each class/function using:" 134 | ] 135 | }, 136 | { 137 | "cell_type": "code", 138 | "execution_count": null, 139 | "metadata": {}, 140 | "outputs": [], 141 | "source": [ 142 | "logger = logging.getLogger(__name__)" 143 | ] 144 | }, 145 | { 146 | "cell_type": "markdown", 147 | "metadata": {}, 148 | "source": [ 149 | "To log an event we use the logging function of a desired level, e.g.:" 150 | ] 151 | }, 152 | { 153 | "cell_type": "code", 154 | "execution_count": null, 155 | "metadata": {}, 156 | "outputs": [], 157 | "source": [ 158 | "logger.warning(\"This is a warning.\")" 159 | ] 160 | }, 161 | { 162 | "cell_type": "markdown", 163 | "metadata": {}, 164 | "source": [ 165 | "The list of logging levels can be found [here](https://docs.python.org/3/library/logging.html#levels)." 166 | ] 167 | } 168 | ], 169 | "metadata": { 170 | "kernelspec": { 171 | "display_name": "i3", 172 | "language": "python", 173 | "name": "i3" 174 | }, 175 | "language_info": { 176 | "codemirror_mode": { 177 | "name": "ipython", 178 | "version": 2 179 | }, 180 | "file_extension": ".py", 181 | "mimetype": "text/x-python", 182 | "name": "python", 183 | "nbconvert_exporter": "python", 184 | "pygments_lexer": "ipython2", 185 | "version": "2.7.15rc1" 186 | } 187 | }, 188 | "nbformat": 4, 189 | "nbformat_minor": 2 190 | } 191 | -------------------------------------------------------------------------------- /doc/sphinx/dev_docs/unit_tests.rst: -------------------------------------------------------------------------------- 1 | .. _unit_tests: 2 | 3 | ********** 4 | Unit tests 5 | ********** 6 | 7 | When extending SkyLLH with new code and features, the extension needs to be 8 | covered by unit tests. SkyLLH uses the ``unittest`` package of Python. 9 | 10 | .. code-block:: python 11 | 12 | import unittest 13 | 14 | class SelfDrivingCarTest(TestCase): 15 | def setUp(self): 16 | self.car = SelfDrivingCar() 17 | 18 | To run all test we can use following command:: 19 | 20 | python -m unittest discover -------------------------------------------------------------------------------- /doc/sphinx/examples/index.rst: -------------------------------------------------------------------------------- 1 | .. _examples_index: 2 | 3 | ******** 4 | Examples 5 | ******** 6 | 7 | .. toctree:: 8 | :maxdepth: 3 9 | 10 | timepdf 11 | -------------------------------------------------------------------------------- /doc/sphinx/faq/index.rst: -------------------------------------------------------------------------------- 1 | .. _faq_index: 2 | 3 | ****************************** 4 | Frequently Ask Questions (FAQ) 5 | ****************************** 6 | 7 | .. toctree:: 8 | :maxdepth: 3 9 | 10 | signal_generator 11 | -------------------------------------------------------------------------------- /doc/sphinx/faq/signal_generator.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "# Signal Generator" 8 | ] 9 | }, 10 | { 11 | "cell_type": "markdown", 12 | "metadata": {}, 13 | "source": [ 14 | "## How can I change the flux model of my sources (for my signal events)?" 15 | ] 16 | }, 17 | { 18 | "cell_type": "raw", 19 | "metadata": { 20 | "raw_mimetype": "text/restructuredtext" 21 | }, 22 | "source": [ 23 | "When SkyLLH generates pseudo data for an analysis, it will generate background\n", 24 | "and signal events. It might be desired to change the flux model of the sources \n", 25 | "to generate signal events following a different flux model as originally choosen\n", 26 | "when the analysis was created.\n", 27 | "\n", 28 | "The most general procedure to change the flux model(s) of the source(s) of \n", 29 | "(only) the signal generator is to create a new signal generator with a \n", 30 | ":py:class:`~skyllh.core.source_hypo_grouping.SourceHypoGroupManager` instance \n", 31 | "that includes the sources with the changed flux model.\n", 32 | "\n", 33 | "The new signal generator instance should then be set to the\n", 34 | ":py:attr:`~skyllh.core.analysis.Analysis._sig_generator` attribute of the\n", 35 | ":py:class:`~skyllh.core.analysis.Analysis` class. However, if the analysis \n", 36 | "signal generator relies on signal generators for each individual dataset, such\n", 37 | "dataset signal generators need to be re-created as well and set to the\n", 38 | ":py:attr:`~skyllh.core.analysis.Analysis._sig_generator_list` attribute of the\n", 39 | ":py:class:`~skyllh.core.analysis.Analysis` class.\n", 40 | "\n", 41 | ".. note::\n", 42 | "\n", 43 | " If the flux model(s) of the source(s) should be set for the analysis itself,\n", 44 | " i.e. also for the detector signal yield calculation, which is used to weigh\n", 45 | " the datasets of a multi-dataset analysis, the \n", 46 | " :py:meth:`~skyllh.core.analysis.Analysis.change_shg_mgr` method should be\n", 47 | " called with the new \n", 48 | " :py:class:`~skyllh.core.source_hypo_grouping.SourceHypoGroupManager` \n", 49 | " instance. This method will call the ``change_shg_mgr`` method of all \n", 50 | " analysis components.\n", 51 | " \n", 52 | " A re-creation of the signal generator instances is then not required.\n" 53 | ] 54 | } 55 | ], 56 | "metadata": { 57 | "kernelspec": { 58 | "display_name": "Python 3", 59 | "language": "python", 60 | "name": "python3" 61 | }, 62 | "language_info": { 63 | "codemirror_mode": { 64 | "name": "ipython", 65 | "version": 3 66 | }, 67 | "file_extension": ".py", 68 | "mimetype": "text/x-python", 69 | "name": "python", 70 | "nbconvert_exporter": "python", 71 | "pygments_lexer": "ipython3", 72 | "version": "3.10.12" 73 | }, 74 | "orig_nbformat": 4 75 | }, 76 | "nbformat": 4, 77 | "nbformat_minor": 2 78 | } 79 | -------------------------------------------------------------------------------- /doc/sphinx/index.rst: -------------------------------------------------------------------------------- 1 | .. SkyLLH 2 | 3 | SkyLLH documentation 4 | ==================== 5 | 6 | SkyLLH is a Python based framework to develop and to perform general maximum 7 | likelihood ratio hypothesis testing. The idea of SkyLLH is to provide a 8 | framework with a class structure that is tied to the mathematical objects of 9 | the likelihood functions, rather than to entire abstract likelihood models. 10 | Hence with SkyLLH it is supposed to be easy to perform an entire maximum 11 | likelihood ratio test once the user (likelihood developer) defined the 12 | mathematical likelihood function. 13 | 14 | .. _user-docs: 15 | 16 | .. toctree:: 17 | :maxdepth: 3 18 | :caption: User Documentation 19 | 20 | installation 21 | concepts/index 22 | tutorials/index 23 | faq/index 24 | examples/index 25 | reference/skyllh 26 | notes 27 | 28 | .. _dev-docs: 29 | 30 | .. toctree:: 31 | :maxdepth: 1 32 | :caption: Developer Documentation 33 | 34 | dev_docs/logging 35 | dev_docs/unit_tests 36 | 37 | 38 | Indices and tables 39 | ================== 40 | 41 | * :ref:`genindex` 42 | * :ref:`modindex` 43 | * :ref:`search` 44 | -------------------------------------------------------------------------------- /doc/sphinx/installation.rst: -------------------------------------------------------------------------------- 1 | .. _installation: 2 | 3 | ************ 4 | Installation 5 | ************ 6 | 7 | 8 | Prerequisites 9 | ============= 10 | 11 | The SkyLLH framework has several dependencies. They are listed in `requirements.txt `_ file: 12 | 13 | :: 14 | 15 | astropy 16 | numpy 17 | scipy 18 | iminuit 19 | matplotlib 20 | 21 | They can be installed from `skyllh` directory with: 22 | 23 | .. code:: bash 24 | 25 | pip install -r requirements.txt 26 | 27 | On cobalt and NPX servers we can use CVMFS Python 3 virtual environment with all necessary packages already installed. In order to activate it run: 28 | 29 | .. code:: bash 30 | 31 | eval `/cvmfs/icecube.opensciencegrid.org/py3-v4.1.1/setup.sh` 32 | 33 | 34 | Setup 35 | ===== 36 | 37 | Using pip 38 | --------- 39 | 40 | The latest `skyllh` release can be installed from the 41 | `PyPI `_ repository: 42 | 43 | .. code:: bash 44 | 45 | pip install skyllh 46 | 47 | The current development version can be installed using pip: 48 | 49 | .. code:: bash 50 | 51 | pip install git+https://github.com/icecube/skyllh.git#egg=skyllh 52 | 53 | Optionally, the editable package version with a specified reference can be 54 | installed by: 55 | 56 | .. code:: bash 57 | 58 | pip install -e git+https://github.com/icecube/skyllh.git@[ref]#egg=skyllh 59 | 60 | where 61 | 62 | * `-e` is the editable flag 63 | * `[ref]` is an optional argument containing a specific commit hash, branch name 64 | or tag 65 | 66 | Cloning from GitHub 67 | ------------------- 68 | 69 | The framework is split into two packages: 70 | 71 | 1. `github.com/icecube/skyllh `_ 72 | 73 | * Contains open source code with classes defining the detector independent 74 | likelihood framework. 75 | 76 | 2. `github.com/icecube/i3skyllh `_ 77 | 78 | * Contains collections of pre-defined SkyLLH IceCube analyses and pre-defined 79 | IceCube datasets. 80 | 81 | In order to set it up, we have to clone git repositories and add them to the 82 | `PYTHONPATH`: 83 | 84 | .. code:: bash 85 | 86 | git clone git@github.com:icecube/skyllh.git /path/to/skyllh 87 | git clone git@github.com:icecube/i3skyllh.git /path/to/i3skyllh 88 | export PYTHONPATH=$PYTHONPATH:/path/to/skyllh 89 | export PYTHONPATH=$PYTHONPATH:/path/to/i3skyllh 90 | 91 | Alternatively, we can add them inside the python script: 92 | 93 | .. code:: python 94 | 95 | import sys 96 | 97 | # Add the skyllh and i3skyllh packages to the PYTHONPATH. 98 | sys.path.insert(0, '/path/to/skyllh') 99 | sys.path.insert(0, '/path/to/i3skyllh') 100 | -------------------------------------------------------------------------------- /doc/sphinx/notes.rst: -------------------------------------------------------------------------------- 1 | .. _notes: 2 | 3 | ***** 4 | Notes 5 | ***** 6 | 7 | Docstrings 8 | ========== 9 | 10 | To generate consistent reference documentantion NumPy 11 | `docstring convention `_ 12 | should be followed. 13 | 14 | Some important notes: 15 | 16 | * Use double back-ticks for code samples, e.g. (\``print("a text")\``), hence 17 | Variable, module, function, and class names should be written between double 18 | back-ticks (\``numpy\``). 19 | 20 | * See the list of available 21 | `sections `_ 22 | in docstrings that appear in a common order. 23 | -------------------------------------------------------------------------------- /doc/sphinx/requirements.txt: -------------------------------------------------------------------------------- 1 | -r ../../requirements.txt 2 | sphinx 3 | sphinx_rtd_theme 4 | nbsphinx 5 | ipython 6 | sphinxcontrib-apidoc 7 | sphinx-multiversion -------------------------------------------------------------------------------- /doc/sphinx/tutorials/getting_started.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "attachments": {}, 5 | "cell_type": "markdown", 6 | "id": "0b906063-5b59-4428-96fd-289fc465ebec", 7 | "metadata": {}, 8 | "source": [ 9 | "# Getting started\n", 10 | "\n", 11 | "SkyLLH is a Python based framework to develop and to perform maximum likelihood ratio hypothesis testing.\n", 12 | "\n", 13 | "The idea of SkyLLH is to provide a framework with a class structure that is tied to the mathematical objects of the likelihood functions.\n", 14 | "\n", 15 | "Slack channel: [#skyllh](https://icecube-spno.slack.com/channels/skyllh)\n", 16 | "\n", 17 | "An IceCube member can find pre-defined IceCube log-likelihood analyses in the (privat) [i3skyllh](https://github.com/icecube/i3skyllh) project.\n", 18 | "\n", 19 | "\n", 20 | "## SkyLLH's analysis workflow\n", 21 | "\n", 22 | "To set-up and run an analysis the following procedure applies:" 23 | ] 24 | }, 25 | { 26 | "cell_type": "raw", 27 | "id": "c090c1ac", 28 | "metadata": {}, 29 | "source": [ 30 | "1. Create a (local) configuration for the analysis." 31 | ] 32 | }, 33 | { 34 | "cell_type": "code", 35 | "execution_count": 2, 36 | "id": "98eb2f8c", 37 | "metadata": {}, 38 | "outputs": [], 39 | "source": [ 40 | "from skyllh.core.config import Config\n", 41 | "cfg = Config()" 42 | ] 43 | }, 44 | { 45 | "attachments": {}, 46 | "cell_type": "raw", 47 | "id": "d1929cdb", 48 | "metadata": {}, 49 | "source": [ 50 | "An updated configuration from the default configuration can also be loaded from\n", 51 | "a *yaml* file or form a Python dictionary." 52 | ] 53 | }, 54 | { 55 | "cell_type": "code", 56 | "execution_count": 5, 57 | "id": "104f74c3", 58 | "metadata": {}, 59 | "outputs": [], 60 | "source": [ 61 | "cfg = Config.from_dict({\n", 62 | " 'project': {\n", 63 | " 'working_directory': '/home/mwolf/projects/publicdata_ps',\n", 64 | " }})" 65 | ] 66 | }, 67 | { 68 | "cell_type": "raw", 69 | "id": "ce2502d0", 70 | "metadata": { 71 | "raw_mimetype": "text/restructuredtext" 72 | }, 73 | "source": [ 74 | "2. Create an analysis instance (preferably based on pre-defined \n", 75 | "`create_analysis` functions). It takes care of the following parts:\n", 76 | "\n", 77 | " 1. Add the datasets and their PDF ratio instances via the :py:meth:`skyllh.core.analysis.Analysis.add_dataset` method.\n", 78 | " 2. Construct the log-likelihood ratio function via the :py:meth:`skyllh.core.analysis.Analysis.construct_llhratio` method." 79 | ] 80 | }, 81 | { 82 | "cell_type": "raw", 83 | "id": "cb08d693", 84 | "metadata": { 85 | "raw_mimetype": "text/restructuredtext" 86 | }, 87 | "source": [ 88 | "3. Call the :py:meth:`skyllh.core.analysis.Analysis.do_trial`, or \n", 89 | ":py:meth:`skyllh.core.analysis.Analysis.unblind` method to perform a random \n", 90 | "trial, or to unblind the data, respectively. Both methods will fit the global\n", 91 | "fit parameters using the set up data. Finally, the test statistic is calculated\n", 92 | "internally via the \n", 93 | ":py:meth:`skyllh.core.analysis.Analysis.calculate_test_statistic` method." 94 | ] 95 | }, 96 | { 97 | "cell_type": "raw", 98 | "id": "95a714df", 99 | "metadata": {}, 100 | "source": [] 101 | } 102 | ], 103 | "metadata": { 104 | "celltoolbar": "Raw Cell Format", 105 | "kernelspec": { 106 | "display_name": "Python 3", 107 | "language": "python", 108 | "name": "python3" 109 | }, 110 | "language_info": { 111 | "codemirror_mode": { 112 | "name": "ipython", 113 | "version": 3 114 | }, 115 | "file_extension": ".py", 116 | "mimetype": "text/x-python", 117 | "name": "python", 118 | "nbconvert_exporter": "python", 119 | "pygments_lexer": "ipython3", 120 | "version": "3.10.6" 121 | } 122 | }, 123 | "nbformat": 4, 124 | "nbformat_minor": 5 125 | } 126 | -------------------------------------------------------------------------------- /doc/sphinx/tutorials/index.rst: -------------------------------------------------------------------------------- 1 | .. _tutorials_index: 2 | 3 | ********* 4 | Tutorials 5 | ********* 6 | 7 | .. toctree:: 8 | :maxdepth: 3 9 | 10 | getting_started 11 | publicdata_ps 12 | publicdata_ps_timedep 13 | -------------------------------------------------------------------------------- /doc/user_manual.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/doc/user_manual.pdf -------------------------------------------------------------------------------- /examples/multiproc.py: -------------------------------------------------------------------------------- 1 | """Example how to use the multiproc module to parallelize the computation of 2 | the function f(x) = x^2 + c 3 | """ 4 | 5 | import numpy as np 6 | 7 | from skyllh.core.multiproc import parallelize 8 | 9 | 10 | def f(x, c=0.): 11 | return x**2 + c 12 | 13 | 14 | if __name__ == '__main__': 15 | res = parallelize( 16 | func=f, 17 | args_list=[ 18 | ((x,), {'c': x}) 19 | for x in np.arange(1, 10, 1) 20 | ], 21 | ncpu=3) 22 | print(res) 23 | -------------------------------------------------------------------------------- /examples/scrambling.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """ 4 | Example how to use the data scrambling mechanism of SkyLLH. 5 | """ 6 | 7 | import numpy as np 8 | 9 | from skyllh.core.livetime import ( 10 | Livetime, 11 | ) 12 | from skyllh.core.random import ( 13 | RandomStateService, 14 | ) 15 | from skyllh.core.scrambling import ( 16 | DataScrambler, 17 | UniformRAScramblingMethod, 18 | ) 19 | from skyllh.core.times import ( 20 | LivetimeTimeGenerationMethod, 21 | TimeGenerator, 22 | ) 23 | 24 | from skyllh.i3.scrambling import ( 25 | I3TimeScramblingMethod, 26 | ) 27 | 28 | 29 | def gen_data(rss, N): 30 | """Create uniformly distributed data on sphere. 31 | """ 32 | arr = np.empty( 33 | (N,), 34 | dtype=[ 35 | ("azi", np.float64), 36 | ("zen", np.float64), 37 | ("ra", np.float64), 38 | ("dec", np.float64), 39 | ("time", np.float64), 40 | ]) 41 | 42 | arr["ra"] = rss.random.uniform(0., 2.*np.pi, N) 43 | arr["dec"] = rss.random.uniform(-np.pi, np.pi, N) 44 | 45 | return arr 46 | 47 | 48 | def ex1(): 49 | """Data scrambling via right-ascention scrambling. 50 | """ 51 | print("Example 1") 52 | print("=========") 53 | 54 | rss = RandomStateService(seed=1) 55 | 56 | # Generate some pseudo data. 57 | data = gen_data(rss=rss, N=10) 58 | print(f'before scrambling: data["ra"]={data["ra"]}') 59 | 60 | # Create DataScrambler instance with uniform RA scrambling. 61 | scrambler = DataScrambler( 62 | method=UniformRAScramblingMethod()) 63 | 64 | # Scramble the data. 65 | scrambler.scramble_data( 66 | rss=rss, 67 | dataset=None, 68 | data=data) 69 | 70 | print(f'after scrambling: data["ra"]={data["ra"]}') 71 | 72 | 73 | def ex2(): 74 | """Data scrambling via detector on-time scrambling. 75 | """ 76 | print("Example 2") 77 | print("=========") 78 | 79 | rss = RandomStateService(seed=1) 80 | 81 | # Generate some pseudo data. 82 | data = gen_data(rss=rss, N=10) 83 | print(f'before scrambling: data["ra"]={data["ra"]}') 84 | 85 | # Create a Livetime object, which defines the detector live-time. 86 | lt = Livetime(uptime_mjd_intervals_arr=np.array( 87 | [ 88 | [55000, 56000], 89 | [60000, 69000] 90 | ], 91 | dtype=np.float64)) 92 | 93 | # Create a TimeGenerator with an on-time time generation method. 94 | timegen = TimeGenerator(method=LivetimeTimeGenerationMethod(livetime=lt)) 95 | 96 | # Create DataScrambler with IceCube time scrambing method. 97 | scrambler = DataScrambler( 98 | method=I3TimeScramblingMethod(timegen)) 99 | 100 | # Scramble the data. 101 | scrambler.scramble_data( 102 | rss=rss, 103 | dataset=None, 104 | data=data) 105 | 106 | print(f'after scrambling: data["ra"]={data["ra"]}') 107 | 108 | 109 | if __name__ == '__main__': 110 | ex1() 111 | ex2() 112 | -------------------------------------------------------------------------------- /pyproject.toml: -------------------------------------------------------------------------------- 1 | [build-system] 2 | requires = ["setuptools", "wheel", "versioneer"] 3 | build-backend = "setuptools.build_meta" 4 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | astropy 2 | numpy 3 | pyarrow 4 | scipy 5 | iminuit 6 | matplotlib 7 | tqdm 8 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [metadata] 2 | name = skyllh 3 | description = The SkyLLH framework is an open-source Python3-based package licensed under the GPLv3 license. It provides a modular framework for implementing custom likelihood functions and executing log-likelihood ratio hypothesis tests. The idea is to provide a class structure tied to the mathematical objects of the likelihood functions. 4 | long_description = file:README.md 5 | long_description_content_type = text/markdown 6 | url = https://github.com/icecube/skyllh 7 | author_email = martin.wolf@icecube.wisc.edu 8 | author = Martin Wolf 9 | requires_python = >=3.8.0 10 | license = GPL-3+ 11 | 12 | classifiers = 13 | Development Status :: 5 - Production/Stable 14 | Environment :: Console 15 | Intended Audience :: Science/Research 16 | License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+) 17 | Operating System :: POSIX 18 | Programming Language :: Python :: 3.10 19 | Topic :: Scientific/Engineering :: Physics 20 | 21 | project_urls = 22 | Bug Tracker = https://github.com/icecube/skyllh/issues 23 | Documentation = https://icecube.github.io/skyllh 24 | Source Code = https://github.com/icecube/skyllh 25 | 26 | 27 | [options] 28 | packages = find: 29 | install_requires = 30 | astropy 31 | numpy 32 | scipy 33 | tqdm 34 | 35 | 36 | [versioneer] 37 | VCS = git 38 | style = pep440 39 | versionfile_source = skyllh/_version.py 40 | versionfile_build = skyllh/_version.py 41 | tag_prefix = v 42 | parentdir_prefix = skyllh- 43 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | import versioneer 3 | 4 | setup( 5 | version=versioneer.get_version(), 6 | cmdclass=versioneer.get_cmdclass(), 7 | ) 8 | -------------------------------------------------------------------------------- /skyllh/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import logging 4 | import multiprocessing as mp 5 | 6 | from skyllh import _version 7 | 8 | # Initialize top-level logger with a do-nothing NullHandler. It is required to 9 | # be able to log messages when user has not set up any handler for the logger. 10 | logging.getLogger(__name__).addHandler(logging.NullHandler()) 11 | 12 | # Change macOS default multiprocessing start method 'spawn' to 'fork'. 13 | 14 | try: 15 | mp.set_start_method("fork") 16 | except Exception: 17 | # It could be already set by another package. 18 | if mp.get_start_method() != "fork": 19 | logging.warning( 20 | "Couldn't set the multiprocessing start method to 'fork'. " 21 | "Parallel calculations using 'ncpu' argument != 1 may break." 22 | ) 23 | 24 | __version__ = _version.get_versions()['version'] 25 | -------------------------------------------------------------------------------- /skyllh/analyses/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/analyses/__init__.py -------------------------------------------------------------------------------- /skyllh/analyses/i3/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/analyses/i3/__init__.py -------------------------------------------------------------------------------- /skyllh/analyses/i3/publicdata_ps/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/analyses/i3/publicdata_ps/__init__.py -------------------------------------------------------------------------------- /skyllh/analyses/i3/publicdata_ps/scripts/mceq_atm_bkg.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import argparse 4 | import os.path 5 | import pickle 6 | 7 | import crflux.models as pm 8 | import mceq_config as config 9 | import numpy as np 10 | 11 | from MCEq.core import ( 12 | MCEqRun, 13 | ) 14 | 15 | from skyllh.analyses.i3.publicdata_ps.aeff import ( 16 | PDAeff, 17 | ) 18 | from skyllh.datasets.i3 import ( 19 | PublicData_10y_ps, 20 | ) 21 | 22 | 23 | def create_flux_file(save_path, ds): 24 | """Creates a pickle file containing the flux for the given dataset. 25 | """ 26 | output_filename = ds.get_aux_data_definition('mceq_flux_datafile')[0] 27 | output_pathfilename = '' 28 | if save_path is None: 29 | output_pathfilename = ds.get_abs_pathfilename_list([output_filename])[0] 30 | else: 31 | output_pathfilename = os.path.join( 32 | save_path, output_filename) 33 | 34 | print(f'Output path filename: {output_pathfilename}') 35 | 36 | # Load the effective area instance to get the binning information. 37 | aeff = PDAeff( 38 | os.path.join( 39 | ds.root_dir, 40 | ds.get_aux_data_definition('eff_area_datafile')[0] 41 | ) 42 | ) 43 | 44 | # Setup MCeq. 45 | config.e_min = float( 46 | 10**(np.max([aeff._log10_enu_binedges_lower[0], 2]))) 47 | config.e_max = float( 48 | 10**(np.min([aeff._log10_enu_binedges_upper[-1], 9])+0.05)) 49 | 50 | print(f'E_min = {config.e_min}') 51 | print(f'E_max = {config.e_max}') 52 | 53 | mceq = MCEqRun( 54 | interaction_model="SIBYLL2.3c", 55 | primary_model=(pm.HillasGaisser2012, "H3a"), 56 | theta_deg=0.0, 57 | density_model=("MSIS00_IC", ("SouthPole", "January")), 58 | ) 59 | 60 | print(f'MCEq log10(e_grid) = {np.log10(mceq.e_grid)}') 61 | 62 | mag = 0 63 | # Use the same binning as for the effective area. 64 | # theta = delta + pi/2 65 | print(f'sin_true_dec_binedges: {aeff.sin_decnu_binedges}') 66 | theta_angles_binedges = np.rad2deg( 67 | np.arcsin(aeff.sin_decnu_binedges) + np.pi/2 68 | ) 69 | theta_angles = 0.5*(theta_angles_binedges[:-1] + theta_angles_binedges[1:]) 70 | print(f'Theta angles = {theta_angles}') 71 | 72 | flux_def = dict() 73 | 74 | all_component_names = [ 75 | "numu_conv", 76 | "numu_pr", 77 | "numu_total", 78 | "mu_conv", 79 | "mu_pr", 80 | "mu_total", 81 | "nue_conv", 82 | "nue_pr", 83 | "nue_total", 84 | "nutau_pr", 85 | ] 86 | 87 | # Initialize empty grid 88 | for frac in all_component_names: 89 | flux_def[frac] = np.zeros( 90 | (len(mceq.e_grid), len(theta_angles))) 91 | 92 | # fluxes calculated for different theta_angles 93 | for ti, theta in enumerate(theta_angles): 94 | mceq.set_theta_deg(theta) 95 | mceq.solve() 96 | 97 | # same meaning of prefixes for muon neutrinos as for muons 98 | flux_def["mu_conv"][:, ti] = ( 99 | mceq.get_solution("conv_mu+", mag) + 100 | mceq.get_solution("conv_mu-", mag) 101 | ) 102 | 103 | flux_def["mu_pr"][:, ti] = ( 104 | mceq.get_solution("pr_mu+", mag) + 105 | mceq.get_solution("pr_mu-", mag) 106 | ) 107 | 108 | flux_def["mu_total"][:, ti] = ( 109 | mceq.get_solution("total_mu+", mag) + 110 | mceq.get_solution("total_mu-", mag) 111 | ) 112 | 113 | # same meaning of prefixes for muon neutrinos as for muons 114 | flux_def["numu_conv"][:, ti] = ( 115 | mceq.get_solution("conv_numu", mag) + 116 | mceq.get_solution("conv_antinumu", mag) 117 | ) 118 | 119 | flux_def["numu_pr"][:, ti] = ( 120 | mceq.get_solution("pr_numu", mag) + 121 | mceq.get_solution("pr_antinumu", mag) 122 | ) 123 | 124 | flux_def["numu_total"][:, ti] = ( 125 | mceq.get_solution("total_numu", mag) + 126 | mceq.get_solution("total_antinumu", mag) 127 | ) 128 | 129 | # same meaning of prefixes for electron neutrinos as for muons 130 | flux_def["nue_conv"][:, ti] = ( 131 | mceq.get_solution("conv_nue", mag) + 132 | mceq.get_solution("conv_antinue", mag) 133 | ) 134 | 135 | flux_def["nue_pr"][:, ti] = ( 136 | mceq.get_solution("pr_nue", mag) + 137 | mceq.get_solution("pr_antinue", mag) 138 | ) 139 | 140 | flux_def["nue_total"][:, ti] = ( 141 | mceq.get_solution("total_nue", mag) + 142 | mceq.get_solution("total_antinue", mag) 143 | ) 144 | 145 | # since there are no conventional tau neutrinos, prompt=total 146 | flux_def["nutau_pr"][:, ti] = ( 147 | mceq.get_solution("total_nutau", mag) + 148 | mceq.get_solution("total_antinutau", mag) 149 | ) 150 | print("\U0001F973") 151 | 152 | # Save the result to the output file. 153 | with open(output_pathfilename, 'wb') as f: 154 | pickle.dump(((mceq.e_grid, theta_angles_binedges), flux_def), f) 155 | print(f'Saved fluxes for dataset {ds.name} to: {output_pathfilename}') 156 | 157 | 158 | if __name__ == '__main__': 159 | 160 | parser = argparse.ArgumentParser( 161 | description='Generate atmospheric background fluxes with MCEq.' 162 | ) 163 | parser.add_argument( 164 | '-b', 165 | '--data-base-path', 166 | type=str, 167 | default='/data/ana/analyses', 168 | help='The base path of the data repository.' 169 | ) 170 | parser.add_argument( 171 | '-s', 172 | '--save-path', 173 | type=str, 174 | default=None 175 | ) 176 | 177 | args = parser.parse_args() 178 | 179 | dsc = PublicData_10y_ps.create_dataset_collection(args.data_base_path) 180 | 181 | dataset_names = ['IC40', 'IC59', 'IC79', 'IC86_I', 'IC86_II'] 182 | for ds_name in dataset_names: 183 | ds = dsc.get_dataset(ds_name) 184 | create_flux_file( 185 | save_path=args.save_path, 186 | ds=ds 187 | ) 188 | -------------------------------------------------------------------------------- /skyllh/cluster/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/cluster/__init__.py -------------------------------------------------------------------------------- /skyllh/cluster/commands.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import pickle 4 | 5 | from skyllh.core.py import ( 6 | int_cast, 7 | ) 8 | from skyllh.cluster.srvclt import ( 9 | Message, 10 | receive_object_from_socket, 11 | ) 12 | 13 | 14 | class Command(object): 15 | """Base class for a command. A command has a command string plus optional 16 | additional data. 17 | """ 18 | _cmd = '' 19 | 20 | def __init__(self): 21 | super(Command, self).__init__() 22 | 23 | def as_message(self): 24 | msg = pickle.dumps(self) 25 | return Message(msg) 26 | 27 | def send(self, sock): 28 | self.as_message().send(sock) 29 | 30 | def is_same_as(self, cmd): 31 | return (cmd._cmd == self._cmd) 32 | 33 | 34 | class ACK(Command): 35 | _cmd = 'ACK' 36 | 37 | def __init__(self): 38 | super(ACK, self).__init__() 39 | 40 | 41 | class MSG(Command): 42 | _cmd = 'MSG' 43 | 44 | def __init__(self, msg): 45 | super(MSG, self).__init__() 46 | 47 | self.msg = msg 48 | 49 | 50 | class ShutdownCN(Command): 51 | _cmd = 'SHUTDOWNCN' 52 | 53 | def __init__(self): 54 | super(ShutdownCN, self).__init__() 55 | 56 | 57 | class RegisterCN(Command): 58 | _cmd = 'REGCN' 59 | 60 | def __init__(self, cn_start_time, cn_live_time): 61 | """Creates a register compute node command. 62 | 63 | Parameters 64 | ---------- 65 | cn_start_time : int 66 | The compute node's start time as unix time stamp. 67 | cn_live_time : int 68 | The compute node's live time. After that time the CN should be 69 | considered dead. 70 | """ 71 | super(RegisterCN, self).__init__() 72 | 73 | self.cn_start_time = cn_start_time 74 | self.cn_live_time = cn_live_time 75 | 76 | @property 77 | def cn_start_time(self): 78 | """The CN's start time as unix time stamp. 79 | """ 80 | return self._cn_start_time 81 | 82 | @cn_start_time.setter 83 | def cn_start_time(self, t): 84 | t = int_cast( 85 | t, 86 | 'The cn_start_time property must be castable to type int!') 87 | self._cn_start_time = t 88 | 89 | @property 90 | def cn_live_time(self): 91 | """The CN's live time in seconds. 92 | """ 93 | return self._cn_live_time 94 | 95 | @cn_live_time.setter 96 | def cn_live_time(self, t): 97 | t = int_cast( 98 | t, 99 | 'The cn_live_time property must be castable to type int!') 100 | self._cn_live_time = t 101 | 102 | 103 | def receive_command_from_socket(sock, blocksize=2048): 104 | """Receives a command from the given socket. 105 | """ 106 | return receive_object_from_socket(sock, blocksize=blocksize) 107 | -------------------------------------------------------------------------------- /skyllh/cluster/compute_node.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import argparse 4 | import socket 5 | import time 6 | 7 | from skyllh.cluster.commands import ( 8 | ACK, 9 | MSG, 10 | RegisterCN, 11 | ShutdownCN, 12 | receive_command_from_socket, 13 | ) 14 | from skyllh.core.py import ( 15 | int_cast, 16 | ) 17 | 18 | 19 | class ComputeNode(object): 20 | """The ComputeNode class provides an entity for stand-alone program running 21 | on a dedicated compute node host. 22 | """ 23 | def __init__(self, live_time, master_addr, master_port): 24 | super(ComputeNode, self).__init__() 25 | 26 | self.live_time = live_time 27 | self.master_addr = master_addr 28 | self.master_port = master_port 29 | 30 | self._start_time = time.time() 31 | 32 | # Register the compute node to the master node. 33 | self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 34 | self.sock.connect((self.master_addr, self.master_port)) 35 | 36 | # Send the register command to the master and tell . 37 | RegisterCN(self._start_time, self._live_time).send(self.sock) 38 | reply = receive_command_from_socket(self.sock) 39 | if not reply.is_same_as(ACK): 40 | raise RuntimeError( 41 | 'The master node did not reply with an ACK command!') 42 | 43 | print(f'Registered to master {self._master_addr}:{self._master_port}') 44 | print(f'Runtime set to {self._live_time} seconds') 45 | 46 | def __del__(self): 47 | self.sock.close() 48 | 49 | @property 50 | def live_time(self): 51 | """The time in seconds this ComputeNode instance should be listening for 52 | requests. 53 | """ 54 | return self._live_time 55 | 56 | @live_time.setter 57 | def live_time(self, t): 58 | t = int_cast( 59 | t, 60 | 'The live_time property must be castable to type int!') 61 | self._live_time = t 62 | 63 | @property 64 | def master_addr(self): 65 | """The address of the SkyLLH master program. 66 | """ 67 | return self._master_addr 68 | 69 | @master_addr.setter 70 | def master_addr(self, addr): 71 | if not isinstance(addr, str): 72 | raise TypeError( 73 | 'The master_addr property must be of type str!') 74 | self._master_addr = addr 75 | 76 | @property 77 | def master_port(self): 78 | """The port number of the SkyLLH master program. 79 | """ 80 | return self._master_port 81 | 82 | @master_port.setter 83 | def master_port(self, p): 84 | p = int_cast( 85 | p, 86 | 'The master_port property must be castable to type int!') 87 | self._master_port = p 88 | 89 | def handle_requests(self): 90 | if time.time() > self._start_time + self._live_time: 91 | raise RuntimeError('Live-time already exceeded!') 92 | 93 | while True: 94 | # Receive a command. 95 | cmd = receive_command_from_socket(self.sock) 96 | if cmd.is_same_as(MSG): 97 | print(f'Received general message: {cmd.msg}') 98 | elif cmd.is_same_as(ShutdownCN): 99 | print('Received shutdown command. Shutting down.') 100 | self.sock.close() 101 | return 102 | else: 103 | print('Received unknown command! Ignoring.') 104 | 105 | if time.time() > self._start_time + self._live_time: 106 | print('Live-time exceeded. Shutting down.') 107 | return 108 | 109 | 110 | if __name__ == '__main__': 111 | 112 | parser = argparse.ArgumentParser(description='SkyLLH Compute Node') 113 | parser.add_argument( 114 | 'master_addr', type=str, 115 | help='The address (IP / hostname) of the SkyLLH master program.') 116 | parser.add_argument( 117 | 'master_port', type=int, default=9999, 118 | help='The port number of the SkyLLH master program.') 119 | parser.add_argument( 120 | '--live-time', type=int, default=2*60*60, 121 | help='The time in seconds to run this compute node instance.') 122 | 123 | args = parser.parse_args() 124 | 125 | cn = ComputeNode( 126 | live_time=args.live_time, 127 | master_addr=args.master_addr, 128 | master_port=args.master_port) 129 | 130 | cn.handle_requests() 131 | -------------------------------------------------------------------------------- /skyllh/cluster/master_node.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import logging 4 | import socket 5 | 6 | from skyllh.cluster.commands import ( 7 | ACK, 8 | Command, 9 | MSG, 10 | ShutdownCN, 11 | RegisterCN, 12 | receive_command_from_socket, 13 | ) 14 | 15 | 16 | class CNRegistryEntry(object): 17 | """This class provides an registry entry for a compute node. It holds the 18 | socket to the compute node. 19 | """ 20 | def __init__(self, sock, addr, port, cn_start_time, cn_live_time): 21 | super(CNRegistryEntry, self).__init__() 22 | 23 | self.sock = sock 24 | self.addr = addr 25 | self.port = port 26 | self.cn_start_time = cn_start_time 27 | self.cn_live_time = cn_live_time 28 | 29 | def __del__(self): 30 | self.sock.close() 31 | 32 | @property 33 | def key(self): 34 | """(read-only) The CN's identification key. 35 | """ 36 | return f'{self.addr:s}:{self.port:d}' 37 | 38 | def send_command(self, cmd): 39 | if not isinstance(cmd, Command): 40 | raise TypeError( 41 | 'The cmd argument must be an instance of Command!') 42 | cmd.send(self.sock) 43 | 44 | 45 | class MasterNode(object): 46 | """The MasterNode class provides an entity to run the SkyLLH program as a 47 | master node, where compute nodes can register to, so the master node can 48 | distribute work to the compute nodes. The work distribution is handled 49 | through the MasterNode instance. 50 | """ 51 | def __init__(self): 52 | super(MasterNode, self).__init__() 53 | 54 | self.cn_registry = dict() 55 | 56 | @property 57 | def cn_registry(self): 58 | """The dictionary with the registered compute nodes. 59 | """ 60 | return self._cn_registry 61 | 62 | @cn_registry.setter 63 | def cn_registry(self, d): 64 | if not isinstance(d, dict): 65 | raise TypeError( 66 | 'The cn_registry property must be of type dict!') 67 | self._cn_registry = d 68 | 69 | def clear_cn_registry(self): 70 | # Close the sockets to all the CNs. 71 | for (cn_key, cn) in self.cn_registry.items(): 72 | cn.sock.close() 73 | 74 | self.cn_registry = dict() 75 | 76 | def register_compute_nodes(self, n_cn=10, master_port=9999, blocksize=2048): 77 | logger = logging.getLogger(__name__) 78 | 79 | logger.debug( 80 | 'Clearing the CN registry') 81 | self.clear_cn_registry() 82 | 83 | logger.debug( 84 | 'Creating server TCP/IP socket') 85 | serversock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) 86 | 87 | # bind the socket to a public host, and a well-known port 88 | master_hostname = socket.getfqdn(socket.gethostname()) 89 | logger.debug( 90 | 'Listening on %s:%d with %d simulanious allowed connections', 91 | master_hostname, master_port, n_cn) 92 | serversock.bind((master_hostname, master_port)) 93 | serversock.listen(n_cn) 94 | 95 | try: 96 | while len(self._cn_registry) < n_cn: 97 | # Accept connections from the compute nodes in order to register 98 | # them. 99 | (clientsock, (addr, port)) = serversock.accept() 100 | logger.debug( 101 | 'Got inbound connection from %s:%d', addr, port) 102 | 103 | cmd = receive_command_from_socket( 104 | clientsock, blocksize=blocksize) 105 | if not cmd.is_same_as(RegisterCN): 106 | raise RuntimeError( 107 | 'The compute node provided an unknown command ' 108 | f'"{cmd.as_message().msg}"!') 109 | ACK().send(clientsock) 110 | 111 | cn = CNRegistryEntry( 112 | clientsock, addr, port, cmd.cn_start_time, cmd.cn_live_time) 113 | self._cn_registry[cn.key] = cn 114 | finally: 115 | serversock.close() 116 | 117 | def send_request(self, msg): 118 | for (cn_key, cn) in self.cn_registry.items(): 119 | cn.send_command(MSG(msg)) 120 | 121 | def shutdown_compute_nodes(self): 122 | """Sends a stop command to all compute nodes. 123 | """ 124 | for (cn_key, cn) in self._cn_registry.items(): 125 | cn.send_command(ShutdownCN()) 126 | -------------------------------------------------------------------------------- /skyllh/cluster/srvclt.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import pickle 4 | 5 | from skyllh.core.py import ( 6 | str_cast, 7 | ) 8 | 9 | 10 | class Message(object): 11 | @staticmethod 12 | def receive(sock, blocksize=2048, as_bytes=False): 13 | """Receives a message from the given socket. 14 | 15 | Parameters 16 | ---------- 17 | blocksize : int 18 | The size in bytes of the block that should be read from the socket 19 | at once. 20 | as_bytes : bool 21 | If set to ``True`` the Message instance will contain a bytes 22 | message, otherwise it is converted to a str. 23 | 24 | Returns 25 | ------- 26 | m : Message 27 | The Message instance created with the message read from the socket. 28 | """ 29 | # Get the first 2 bytes to determine the length of the message. 30 | msglen = int.from_bytes( 31 | read_from_socket(sock, 2, blocksize=blocksize), 'little') 32 | 33 | # Read the message of length msglen bytes from the socket. Here, msg is 34 | # a bytes object. 35 | msg = read_from_socket(sock, msglen, blocksize=blocksize) 36 | 37 | if as_bytes: 38 | return Message(msg) 39 | 40 | return Message(str(msg, 'utf-8')) 41 | 42 | def __init__(self, msg): 43 | """Creates a new Message instance. 44 | 45 | Parameters 46 | ---------- 47 | msg : str | bytes 48 | The message string of this Message instance. 49 | """ 50 | self.msg = msg 51 | 52 | @property 53 | def msg(self): 54 | """The message string. This is either a bytes instance or a str 55 | instance. 56 | """ 57 | return self._msg 58 | 59 | @msg.setter 60 | def msg(self, m): 61 | if not isinstance(m, bytes): 62 | m = str_cast( 63 | m, 64 | 'The msg property must be of type bytes or castable to type ' 65 | 'str!') 66 | self._msg = m 67 | 68 | @property 69 | def length(self): 70 | """The length of the message in bytes. 71 | """ 72 | return len(self.msg) 73 | 74 | def as_socket_msg(self): 75 | """Converts this message to a bytes instance that can be send through a 76 | socket. The first two bytes hold the length of the message. 77 | """ 78 | smsg = len(self.msg).to_bytes(2, 'little') 79 | if isinstance(self.msg, bytes): 80 | smsg += self.msg 81 | else: 82 | smsg += bytes(self.msg, 'utf-8') 83 | 84 | return smsg 85 | 86 | def send(self, sock): 87 | send_to_socket(sock, self.as_socket_msg()) 88 | 89 | 90 | def send_to_socket(sock, msg): 91 | msglen = len(msg) 92 | n_bytes_sent = 0 93 | while n_bytes_sent < msglen: 94 | sent = sock.send(msg[n_bytes_sent:]) 95 | if sent == 0: 96 | raise RuntimeError('Socket connection broken!') 97 | n_bytes_sent += sent 98 | 99 | 100 | def read_from_socket(sock, size, blocksize=2048): 101 | """Reads ``size`` bytes from the socket ``sock``. 102 | """ 103 | chunks = [] 104 | n_bytes_recd = 0 105 | while (n_bytes_recd < size): 106 | chunk = sock.recv(min(size - n_bytes_recd, blocksize)) 107 | if chunk == b'': 108 | raise RuntimeError('Socket connection broken!') 109 | chunks.append(chunk) 110 | n_bytes_recd += len(chunk) 111 | return b''.join(chunks) 112 | 113 | 114 | def receive_object_from_socket(sock, blocksize=2048): 115 | """Receives a pickled Python object from the given socket. 116 | 117 | Parameters 118 | ---------- 119 | sock : socket 120 | """ 121 | m = Message.receive(sock, blocksize, as_bytes=True) 122 | obj = pickle.loads(m.msg) 123 | return obj 124 | -------------------------------------------------------------------------------- /skyllh/core/README.txt: -------------------------------------------------------------------------------- 1 | The ``core`` module holds all the code that defines the framework of SkyLLH. 2 | It should not be neccessary for users to change code of this module, unless 3 | there is a need to change the overall framework design. -------------------------------------------------------------------------------- /skyllh/core/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from skyllh.core import ( 4 | session, 5 | ) 6 | 7 | # Automatically enable interactive mode, if the Python interpreter is in 8 | # interactive mode. 9 | if session.is_python_interpreter_in_interactive_mode(): 10 | session.enable_interactive_session() 11 | else: 12 | session.disable_interactive_session() 13 | -------------------------------------------------------------------------------- /skyllh/core/backgroundpdf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """The ``backgroundpdf`` module contains background PDF classes for the 4 | likelihood function. 5 | """ 6 | 7 | import numpy as np 8 | 9 | from skyllh.core.pdf import ( 10 | IsBackgroundPDF, 11 | MultiDimGridPDF, 12 | TimePDF, 13 | ) 14 | from skyllh.core.py import ( 15 | classname, 16 | ) 17 | 18 | 19 | class BackgroundMultiDimGridPDF( 20 | MultiDimGridPDF, 21 | IsBackgroundPDF): 22 | """This class provides a multi-dimensional background PDF defined on a grid. 23 | The PDF is created from pre-calculated PDF data on a grid. The grid data is 24 | interpolated using a :class:`scipy.interpolate.RegularGridInterpolator` 25 | instance. 26 | """ 27 | 28 | def __init__( 29 | self, 30 | *args, 31 | **kwargs): 32 | """Creates a new :class:`~skyllh.core.pdf.MultiDimGridPDF` instance that 33 | is also derived from :class:`~skyllh.core.pdf.IsBackgroundPDF`. 34 | 35 | For the documentation of arguments see the documentation of the 36 | :meth:`~skyllh.core.pdf.MultiDimGridPDF.__init__` method. 37 | """ 38 | super().__init__(*args, **kwargs) 39 | 40 | 41 | class BackgroundTimePDF( 42 | TimePDF, 43 | IsBackgroundPDF): 44 | """This class provides a background time PDF class. 45 | """ 46 | 47 | def __init__( 48 | self, 49 | livetime, 50 | time_flux_profile, 51 | **kwargs): 52 | """Creates a new signal time PDF instance for a given time flux profile 53 | and detector live time. 54 | 55 | Parameters 56 | ---------- 57 | livetime : instance of Livetime 58 | An instance of Livetime, which provides the detector live-time 59 | information. 60 | time_flux_profile : instance of TimeFluxProfile 61 | The signal's time flux profile. 62 | """ 63 | super().__init__( 64 | pmm=None, 65 | livetime=livetime, 66 | time_flux_profile=time_flux_profile, 67 | **kwargs) 68 | 69 | self._pd = None 70 | 71 | def initialize_for_new_trial( 72 | self, 73 | tdm, 74 | tl=None, 75 | **kwargs): 76 | """Initializes the background time PDF with new trial data. Because this 77 | PDF does not depend on any parameters, the probability density values 78 | can be pre-computed here. 79 | 80 | Parameters 81 | ---------- 82 | tdm : instance of TrialDataManager 83 | The instance of TrialDataManager holding the trial event data for 84 | which to calculate the PDF value. The following data fields must 85 | exist: 86 | 87 | ``'time'`` : float 88 | The MJD time of the event. 89 | 90 | tl : instance of TimeLord | None 91 | The optional TimeLord instance that should be used to measure 92 | timing information. 93 | """ 94 | times = tdm.get_data('time') 95 | 96 | self._pd = np.zeros((len(times),), dtype=np.float64) 97 | 98 | # Get a mask of the event times which fall inside a detector on-time 99 | # interval. 100 | on = self._livetime.is_on(times) 101 | 102 | self._pd[on] = self._time_flux_profile(t=times[on]) / self._S 103 | 104 | def get_pd( 105 | self, 106 | tdm, 107 | params_recarray=None, 108 | tl=None): 109 | """ 110 | Parameters 111 | ---------- 112 | tdm : instance of TrialDataManager 113 | The instance of TrialDataManager holding the trial event data for 114 | which to calculate the PDF value. The following data fields must 115 | exist: 116 | 117 | ``'time'`` : float 118 | The MJD time of the event. 119 | 120 | params_recarray : None 121 | Unused interface argument. 122 | tl : instance of TimeLord | None 123 | The optional TimeLord instance that should be used to measure 124 | timing information. 125 | 126 | Returns 127 | ------- 128 | pd : instance of numpy ndarray 129 | The (N_events,)-shaped numpy ndarray holding the background 130 | probability density value for each event. 131 | grads : dict 132 | The dictionary holding the gradients of the probability density 133 | w.r.t. each global fit parameter. 134 | The background PDF does not depend on any global fit parameter, 135 | hence, this is an empty dictionary. 136 | """ 137 | if self._pd is None: 138 | raise RuntimeError( 139 | f'The {classname(self)} was not initialized with trial data!') 140 | 141 | grads = dict() 142 | 143 | return (self._pd, grads) 144 | -------------------------------------------------------------------------------- /skyllh/core/catalog.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Dr. Martin Wolf 3 | 4 | """This module provides classes for defining source catalogs. 5 | """ 6 | 7 | from skyllh.core.py import ( 8 | str_cast, 9 | ) 10 | from skyllh.core.source_model import ( 11 | SourceModelCollection, 12 | ) 13 | 14 | 15 | class SourceCatalog( 16 | SourceModelCollection): 17 | """This class describes a catalog of sources. It is derived from 18 | SourceModelCollection. A catalog has a name. 19 | """ 20 | def __init__( 21 | self, 22 | name, 23 | sources=None, 24 | source_type=None, 25 | **kwargs): 26 | """Creates a new source catalog. 27 | 28 | Parameters 29 | ---------- 30 | name : str 31 | The name of the catalog. 32 | sources : sequence of source_type | None 33 | The sequence of sources this catalog should be initialized with. 34 | source_type : type | None 35 | The type of the source class. If set to None (default), the 36 | default type defined by SourceCollection will be used. 37 | """ 38 | super().__init__( 39 | sources=sources, 40 | source_type=source_type, 41 | **kwargs) 42 | 43 | self.name = name 44 | 45 | @property 46 | def name(self): 47 | """The name of the catalog. 48 | """ 49 | return self._name 50 | 51 | @name.setter 52 | def name(self, name): 53 | name = str_cast( 54 | name, 55 | 'The name property must be cast-able to type str!') 56 | self._name = name 57 | 58 | def __str__(self): 59 | s = f'"{self.name}" {super().__str__()}' 60 | return s 61 | 62 | def as_SourceModelCollection(self): 63 | """Creates a SourceModelCollection object for this catalog and 64 | returns it. 65 | 66 | Returns 67 | ------- 68 | source_model_collection : instance of SourceModelCollection 69 | The created instance of SourceModelCollection. 70 | """ 71 | return SourceModelCollection( 72 | sources=self.sources, 73 | source_type=self.source_type) 74 | -------------------------------------------------------------------------------- /skyllh/core/datafields.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """This module provides functionality for defining which data fields of a data 4 | file is required at what stage. 5 | """ 6 | 7 | 8 | class DataFieldStages( 9 | object, 10 | ): 11 | """This class provides the data field stage values, which are individual 12 | bits of an integer value to allow for multiple stages of a data field. 13 | """ 14 | 15 | DATAPREPARATION_EXP = 1 16 | DATAPREPARATION_MC = 2 17 | ANALYSIS_EXP = 4 18 | ANALYSIS_MC = 8 19 | 20 | @staticmethod 21 | def and_check( 22 | stage, 23 | stages, 24 | ): 25 | """Checks if the given stage matches all of the given stages. 26 | 27 | Parameters 28 | ---------- 29 | stage : int 30 | The stage value, which should get checked. 31 | stages : int | sequence of int 32 | The stage(s) to check for. 33 | 34 | Returns 35 | ------- 36 | check : bool 37 | ``True`` if the given stage contains all of the given stages, 38 | ``False`` otherwise. 39 | """ 40 | if isinstance(stages, int): 41 | return (stage & stages == stages) 42 | 43 | for stage_ in stages: 44 | if stage & stage_ != stage_: 45 | return False 46 | 47 | return True 48 | 49 | @staticmethod 50 | def or_check( 51 | stage, 52 | stages, 53 | ): 54 | """Checks if the given stage matches any of the given stages. 55 | 56 | Parameters 57 | ---------- 58 | stage : int 59 | The stage value, which should get checked. 60 | stages : int | sequence of int 61 | The stage(s) to check for. 62 | 63 | Returns 64 | ------- 65 | check : bool 66 | ``True`` if the given stage contains any of the given stages, 67 | ``False`` otherwise. 68 | """ 69 | if isinstance(stages, int): 70 | return (stage & stages != 0) 71 | 72 | for stage_ in stages: 73 | if stage & stage_ != 0: 74 | return True 75 | 76 | return False 77 | 78 | 79 | class DataFields( 80 | object, 81 | ): 82 | @staticmethod 83 | def get_joint_names( 84 | datafields, 85 | stages, 86 | ): 87 | """Returns the list of data field names that match at least one of the 88 | given stages, i.e. the joint set of data fields given the stages. 89 | 90 | Parameters 91 | ---------- 92 | datafields : dict 93 | The dictionary of data field names as keys and stages as values. 94 | stages : int | sequence of int 95 | The stage(s) for which data field names should get returned. 96 | 97 | Returns 98 | ------- 99 | datafield_names : list of str 100 | The list of data field names. 101 | """ 102 | datafield_names = [ 103 | field 104 | for (field, stage) in datafields.items() 105 | if DataFieldStages.or_check(stage, stages) 106 | ] 107 | 108 | return datafield_names 109 | -------------------------------------------------------------------------------- /skyllh/core/debugging.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import logging 4 | import os.path 5 | import sys 6 | 7 | 8 | def get_logger( 9 | name): 10 | """Retrieves the logger with the given name from the Python logging system. 11 | 12 | Parameters 13 | ---------- 14 | name : str 15 | The name of the logger. 16 | Logger hierarchy is defined using dots as separators. 17 | 18 | Returns 19 | ------- 20 | logger : logging.Logger 21 | The Logger instance. 22 | """ 23 | logger = logging.getLogger(name) 24 | return logger 25 | 26 | 27 | def setup_logger( 28 | name, 29 | log_level): 30 | """Initializes logger with a given name and a log level. 31 | 32 | Parameters 33 | ---------- 34 | name : str 35 | Logger name. Loggers hierarchy is defined using dots as separators. 36 | log_level : int 37 | The log level. The ``logging`` module predefines levels, e.g. 38 | ``logging.DEBUG``. 39 | """ 40 | logger = logging.getLogger(name) 41 | logger.setLevel(log_level) 42 | 43 | 44 | def setup_console_handler( 45 | cfg, 46 | name, 47 | log_level=None, 48 | log_format=None, 49 | stream=None): 50 | """Initializes `StreamHandler` for a logger with a given name and sets its 51 | handling level. 52 | 53 | Parameters 54 | ---------- 55 | cfg : instance of Config 56 | The instance of Config holding the local configuration. 57 | name : str 58 | Logger name. Loggers hierarchy is defined using dots as separators. 59 | log_level : int | None 60 | The log level. The ``logging`` module predefines levels, e.g. 61 | ``logging.DEBUG``. 62 | If set to None, the log level of the logger will be used. 63 | log_format : str | None 64 | The format of log records in the final output. 65 | If set to `None`, the log format is taken from the configuration. 66 | stream : data stream | None 67 | The stream that the handler should use. Default stream is `sys.stderr`. 68 | """ 69 | logger = logging.getLogger(name) 70 | 71 | if log_level is None: 72 | log_level = logger.level 73 | 74 | if log_format is None: 75 | log_format = cfg['debugging']['log_format'] 76 | 77 | if stream is None: 78 | stream = sys.stderr 79 | 80 | # Create and add `StreamHandler` to the logger. 81 | sh = logging.StreamHandler(stream=stream) 82 | sh.setLevel(log_level) 83 | sh.setFormatter(logging.Formatter(log_format)) 84 | logger.addHandler(sh) 85 | 86 | 87 | def setup_file_handler( 88 | cfg, 89 | name, 90 | filename, 91 | log_level=None, 92 | path=None, 93 | log_format=None, 94 | mode='a'): 95 | """Initializes `FileHandler` for a logger with a given name and sets its 96 | handling level. 97 | 98 | Parameters 99 | ---------- 100 | cfg : instance of Config 101 | The instance of Config holding the local configuration. 102 | name : str 103 | Logger name. Loggers hierarchy is defined using dots as separators. 104 | log_level : int | None 105 | The log level. There are predefined levels, e.g. ``logging.DEBUG``. 106 | If set to None, the log level of the logger will be used. 107 | filename : str 108 | The filename of the specified file which is opened and used as the 109 | stream for logging. 110 | path : str | None 111 | The path under which the log file should be stored. 112 | If set to `None`, the project's working directory will be used. 113 | log_format : str | None 114 | The format of log records in the final output. 115 | If set to `None`, the log format is taken from the configuration. 116 | mode : str 117 | File opening mode. Default is 'a' for appending. 118 | """ 119 | logger = logging.getLogger(name) 120 | 121 | if log_level is None: 122 | log_level = logger.level 123 | 124 | if path is None: 125 | path = cfg['project']['working_directory'] 126 | 127 | if log_format is None: 128 | log_format = cfg['debugging']['log_format'] 129 | 130 | pathfilename = os.path.join(path, filename) 131 | 132 | # Create and add `FileHandler` to the logger. 133 | fh = logging.FileHandler(pathfilename, mode=mode) 134 | fh.setLevel(log_level) 135 | fh.setFormatter(logging.Formatter(log_format)) 136 | logger.addHandler(fh) 137 | -------------------------------------------------------------------------------- /skyllh/core/display.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """The display module provides global settings for pretty command line 4 | displaying. 5 | """ 6 | 7 | # Define the width (number of characters) of the display. 8 | PAGE_WIDTH = 80 9 | 10 | # Define the width (number of characters) for each text block indentation. 11 | INDENTATION_WIDTH = 4 12 | 13 | 14 | class ANSIColors: 15 | """This class defines the ANSI color codes, which can be used to change 16 | the text color in a terminal. 17 | """ 18 | HEADER = '\033[95m' 19 | OKBLUE = '\033[94m' 20 | OKGREEN = '\033[92m' 21 | WARNING = '\033[93m' 22 | FAIL = '\033[91m' 23 | ENDC = '\033[0m' 24 | BOLD = '\033[1m' 25 | UNDERLINE = '\033[4m' 26 | 27 | 28 | def add_leading_text_line_padding(padwidth, text): 29 | """Adds leading white spaces to all the lines of the given text. 30 | 31 | Parameters 32 | ---------- 33 | padwidth : int 34 | The width of the padding. 35 | text : str 36 | The text with new line characters for each line. 37 | 38 | Returns 39 | ------- 40 | padded_text : str 41 | The text where each line is padded with the given number of whitespaces. 42 | """ 43 | return '\n'.join([' '*padwidth + line for line in text.split('\n')]) 44 | -------------------------------------------------------------------------------- /skyllh/core/expectation_maximization.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | from scipy.stats import norm 3 | 4 | 5 | def em_expectation_step( 6 | ns, 7 | mu, 8 | sigma, 9 | t, 10 | sob, 11 | ): 12 | """Expectation step of expectation maximization algorithm. 13 | 14 | Parameters 15 | ---------- 16 | ns : instance of ndarray 17 | The (n_flares,)-shaped numpy ndarray holding the number of signal 18 | neutrinos, as weight for each gaussian flare. 19 | mu : instance of ndarray 20 | The (n_flares,)-shaped numpy ndarray holding the mean for each gaussian 21 | flare. 22 | sigma: instance of ndarray 23 | The (n_flares,)-shaped numpy ndarray holding the sigma for each gaussian 24 | flare. 25 | t : instance of ndarray 26 | The (n_events,)-shaped numpy ndarray holding the time of each event. 27 | sob : instance of ndarray 28 | The (n_events,)-shaped numpy ndarray holding the signal-over-background 29 | values of each event. 30 | 31 | Returns 32 | ------- 33 | expectations : instane of ndarray 34 | The (n_flares, n_events)-shaped numpy ndarray holding the expectation 35 | of each flare and event. 36 | llh : float 37 | The log-likelihood value, which is the sum of log of the signal and 38 | background expectations. 39 | """ 40 | n_flares = len(ns) 41 | 42 | b_term = (1 - np.cos(10 / 180 * np.pi)) / 2 43 | N = len(t) 44 | e_sig = np.empty((n_flares, N), dtype=np.float64) 45 | for i in range(n_flares): 46 | e_sig[i] = norm(loc=mu[i], scale=sigma[i]).pdf(t) 47 | e_sig[i] *= sob 48 | e_sig[i] *= ns[i] 49 | e_bkg = (N - np.sum(ns)) / (np.max(t) - np.min(t)) / b_term 50 | denom = np.sum(e_sig, axis=0) + e_bkg 51 | 52 | expectations = e_sig / denom 53 | llh = np.sum(np.log(denom)) 54 | 55 | return (expectations, llh) 56 | 57 | 58 | def em_maximization_step( 59 | e, 60 | t, 61 | ): 62 | """The maximization step of the expectation maximization algorithm. 63 | 64 | Parameters 65 | ---------- 66 | e : instance of ndarray 67 | The (n_flares, n_events)-shaped numpy ndarray holding the expectation 68 | for each event and flare. 69 | t : 1d ndarray of float 70 | The times of each event. 71 | 72 | Returns 73 | ------- 74 | mu : list of float 75 | Best fit mean time of the gaussian flare. 76 | sigma : list of float 77 | Best fit sigma of the gaussian flare. 78 | ns : list of float 79 | Best fit number of signal neutrinos, as weight for the gaussian flare. 80 | """ 81 | mu = [] 82 | sigma = [] 83 | ns = [] 84 | for i in range(e.shape[0]): 85 | mu.append(np.average(t, weights=e[i])) 86 | sigma.append(np.sqrt(np.average(np.square(t - mu[i]), weights=e[i]))) 87 | ns.append(np.sum(e[i])) 88 | sigma = [max(1, s) for s in sigma] 89 | 90 | return (mu, sigma, ns) 91 | 92 | 93 | def em_fit( 94 | x, 95 | weights, 96 | n=1, 97 | tol=1.e-200, 98 | iter_max=500, 99 | weight_thresh=0, 100 | initial_width=5000, 101 | remove_x=None, 102 | ): 103 | """Perform the expectation maximization fit. 104 | 105 | Parameters 106 | ---------- 107 | x : array of float 108 | The quantity to run EM on (e.g. the time if EM should find time flares). 109 | weights : array of float 110 | The weights for each x value (e.g. the signal over background ratio). 111 | n : int 112 | How many Gaussians flares we are looking for. 113 | tol : float 114 | The stopping criteria for the expectation maximization. This is the 115 | difference in the normalized likelihood over the last 20 iterations. 116 | iter_max : int 117 | The maximum number of iterations, even if stopping criteria tolerance 118 | (``tol``) is not yet reached. 119 | weight_thresh : float 120 | Set a minimum threshold for event weights. Events with smaller weights 121 | will be removed. 122 | initial_width : float 123 | The starting width for the gaussian flare in days. 124 | remove_x : float | None 125 | Specific x of event that should be removed. 126 | 127 | Returns 128 | ------- 129 | mu : list of float 130 | The list of size ``n`` with the determined mean values. 131 | sigma : list of float 132 | The list of size ``n`` with the standard deviation values. 133 | ns : list of float 134 | The list of size ``n`` with the normalization factor values. 135 | """ 136 | if weight_thresh > 0: 137 | # Remove events below threshold. 138 | for i in range(len(weights)): 139 | mask = weights > weight_thresh 140 | weights[i] = weights[i][mask] 141 | x[i] = x[i][mask] 142 | 143 | if remove_x is not None: 144 | # Remove data point. 145 | mask = x == remove_x 146 | weights = weights[~mask] 147 | x = x[~mask] 148 | 149 | # Do the expectation maximization. 150 | mu = np.linspace(x[0], x[-1], n+2)[1:-1] 151 | sigma = np.full((n,), initial_width) 152 | ns = np.full((n,), 10) 153 | 154 | llh_diff = 100 155 | llh_old = 0 156 | llh_diff_list = [100] * 20 157 | 158 | # Run until convergence or maximum number of iterations is reached. 159 | iteration = 0 160 | while (iteration < iter_max) and (llh_diff > tol): 161 | iteration += 1 162 | 163 | (e, llh_new) = em_expectation_step( 164 | ns=ns, 165 | mu=mu, 166 | sigma=sigma, 167 | t=x, 168 | sob=weights) 169 | 170 | tmp_diff = np.abs(llh_old - llh_new) / llh_new 171 | llh_diff_list = llh_diff_list[:-1] 172 | llh_diff_list.insert(0, tmp_diff) 173 | llh_diff = np.max(llh_diff_list) 174 | 175 | llh_old = llh_new 176 | 177 | (mu, sigma, ns) = em_maximization_step( 178 | e=e, 179 | t=x) 180 | 181 | return (mu, sigma, ns) 182 | -------------------------------------------------------------------------------- /skyllh/core/math.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """The ``math`` module contains classes for pure mathematical objects. 4 | """ 5 | 6 | import abc 7 | from copy import ( 8 | deepcopy, 9 | ) 10 | import numpy as np 11 | 12 | from skyllh.core.py import ( 13 | classname, 14 | isproperty, 15 | issequence, 16 | issequenceof, 17 | ) 18 | 19 | 20 | class MathFunction( 21 | object, 22 | metaclass=abc.ABCMeta): 23 | """This abstract base class provides an implementation for a mathematical 24 | function. Such a function has defined parameters, which are implemented as 25 | class properties. The tuple of parameter names is defined through the 26 | `param_names` property. 27 | """ 28 | 29 | def __init__(self, **kwargs): 30 | super(MathFunction, self).__init__(**kwargs) 31 | 32 | self.param_names = () 33 | 34 | @property 35 | def param_names(self): 36 | """The tuple holding the names of the math function's parameters. 37 | """ 38 | return self._param_names 39 | 40 | @param_names.setter 41 | def param_names(self, names): 42 | if not issequence(names): 43 | names = (names,) 44 | if not issequenceof(names, str): 45 | raise TypeError( 46 | 'The param_names property must be a sequence of str!') 47 | names = tuple(names) 48 | # Check if all the given names are actual properties of this 49 | # MathFunction class. 50 | for name in names: 51 | if not hasattr(self, name): 52 | raise KeyError( 53 | f'The "{classname(self)}" class does not have an attribute ' 54 | f'named "{name}"!') 55 | if not isproperty(self, name): 56 | raise TypeError( 57 | f'The attribute "{classname(self)}" of "{name}" is not a ' 58 | 'property!') 59 | self._param_names = names 60 | 61 | @property 62 | @abc.abstractmethod 63 | def math_function_str(self): 64 | """The string showing the mathematical function of this MathFunction. 65 | """ 66 | pass 67 | 68 | def __str__(self): 69 | """Pretty string representation of this MathFunction instance. 70 | """ 71 | return self.math_function_str 72 | 73 | def copy( 74 | self, 75 | newparams=None): 76 | """Copies this MathFunction object by calling the copy.deepcopy 77 | function, and sets new parameters if requested. 78 | 79 | Parameters 80 | ---------- 81 | newparams : dict | None 82 | The dictionary with the new parameter values to set, where the 83 | dictionary key is the parameter name and the dictionary value is the 84 | new value of the parameter. 85 | """ 86 | f = deepcopy(self) 87 | 88 | # Set the new parameter values. 89 | if newparams is not None: 90 | f.set_params(newparams) 91 | 92 | return f 93 | 94 | def get_param( 95 | self, 96 | name): 97 | """Retrieves the value of the given parameter. It returns ``np.nan`` if 98 | the parameter does not exist. 99 | 100 | Parameters 101 | ---------- 102 | name : str 103 | The name of the parameter. 104 | 105 | Returns 106 | ------- 107 | value : float | np.nan 108 | The value of the parameter. 109 | """ 110 | if name not in self._param_names: 111 | return np.nan 112 | 113 | value = getattr(self, name) 114 | 115 | return value 116 | 117 | def set_params( 118 | self, 119 | pdict): 120 | """Sets the parameters of the math function to the given parameter 121 | values. 122 | 123 | Parameters 124 | ---------- 125 | pdict : dict (name: value) 126 | The dictionary holding the names of the parameters and their new 127 | values. 128 | 129 | Returns 130 | ------- 131 | updated : bool 132 | Flag if parameter values were actually updated. 133 | """ 134 | if not isinstance(pdict, dict): 135 | raise TypeError( 136 | 'The pdict argument must be of type dict!') 137 | 138 | updated = False 139 | 140 | for pname in self._param_names: 141 | current_value = getattr(self, pname) 142 | pvalue = pdict.get(pname, current_value) 143 | if pvalue != current_value: 144 | setattr(self, pname, pvalue) 145 | updated = True 146 | 147 | return updated 148 | -------------------------------------------------------------------------------- /skyllh/core/minimizers/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/core/minimizers/__init__.py -------------------------------------------------------------------------------- /skyllh/core/model.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Dr. Martin Wolf 3 | 4 | """This module defines the base class for any model class used in SkyLLH. 5 | """ 6 | 7 | from skyllh.core.py import ( 8 | NamedObjectCollection, 9 | issequenceof, 10 | str_cast, 11 | typename, 12 | ) 13 | 14 | 15 | class Model( 16 | object): 17 | """This class provides a base class for all model classes used in SkyLLH. 18 | Models could be for instance source models or background models. 19 | """ 20 | def __init__( 21 | self, 22 | name=None, 23 | **kwargs): 24 | """Creates a new Model instance. 25 | 26 | Parameters 27 | ---------- 28 | name : str | None 29 | The name of the model. If set to `None`, the id of the object is 30 | taken as name. 31 | """ 32 | super().__init__( 33 | **kwargs) 34 | 35 | if name is None: 36 | name = self.id 37 | 38 | self.name = name 39 | 40 | @property 41 | def name(self): 42 | """The name of the model. 43 | """ 44 | return self._name 45 | 46 | @name.setter 47 | def name(self, name): 48 | name = str_cast( 49 | name, 50 | 'The name property must be castable to type str!') 51 | self._name = name 52 | 53 | @property 54 | def id(self): 55 | """(read-only) The ID of the model. It's an integer generated with 56 | Python's `id` function. Hence, it's related to the memory address 57 | of the object. 58 | """ 59 | return id(self) 60 | 61 | 62 | class ModelCollection( 63 | NamedObjectCollection): 64 | """This class describes a collection of Model instances. It can be 65 | used to group several models into a single object. 66 | """ 67 | @staticmethod 68 | def cast( 69 | obj, 70 | errmsg=None, 71 | **kwargs): 72 | """Casts the given object to a ModelCollection object. 73 | If the cast fails, a TypeError with the given error message is raised. 74 | 75 | Parameters 76 | ---------- 77 | obj : Model instance | sequence of Model instances | 78 | ModelCollection | None 79 | The object that should be casted to ModelCollection. 80 | If set to None, an empty ModelCollection is created. 81 | errmsg : str | None 82 | The error message if the cast fails. 83 | If set to None, a generic error message will be used. 84 | 85 | Additional keyword arguments 86 | ---------------------------- 87 | Additional keyword arguments are passed to the constructor of the 88 | ModelCollection class. 89 | 90 | Raises 91 | ------ 92 | TypeError 93 | If the cast failed. 94 | 95 | Returns 96 | ------- 97 | model_collection : instance of ModelCollection 98 | The created ModelCollection instance. If `obj` is already a 99 | ModelCollection instance, it will be returned. 100 | """ 101 | if obj is None: 102 | return ModelCollection( 103 | models=None, model_type=Model, **kwargs) 104 | 105 | if isinstance(obj, Model): 106 | return ModelCollection( 107 | models=[obj], model_type=Model, **kwargs) 108 | 109 | if isinstance(obj, ModelCollection): 110 | return obj 111 | 112 | if issequenceof(obj, Model): 113 | return ModelCollection( 114 | models=obj, model_type=Model, **kwargs) 115 | 116 | if errmsg is None: 117 | errmsg = (f'Cast of object "{str(obj)}" of type ' 118 | f'"{typename(type(obj))}" to ModelCollection failed!') 119 | raise TypeError(errmsg) 120 | 121 | def __init__( 122 | self, 123 | models=None, 124 | model_type=None, 125 | **kwargs): 126 | """Creates a new Model collection. The type of the model instances this 127 | collection holds can be restricted, by setting the model_type argument. 128 | 129 | Parameters 130 | ---------- 131 | models : sequence of model_type instances | None 132 | The sequence of models this collection should be initalized with. 133 | model_type : type | None 134 | The type of the model. It must be a subclass of class ``Model``. 135 | If set to None (default), Model will be used. 136 | """ 137 | if model_type is None: 138 | model_type = Model 139 | if not issubclass(model_type, Model): 140 | raise TypeError( 141 | 'The model_type argument must be a subclass of Model!') 142 | 143 | super().__init__( 144 | objs=models, 145 | obj_type=model_type, 146 | **kwargs) 147 | 148 | @property 149 | def model_type(self): 150 | """(read-only) The type of the model. 151 | """ 152 | return self.obj_type 153 | 154 | @property 155 | def models(self): 156 | """(read-only) The list of models of type `model_type`. 157 | """ 158 | return self.objects 159 | 160 | 161 | class DetectorModel(Model): 162 | """This class provides a base class for a detector model. It can be used 163 | in combination with the ParameterModelMapper class. 164 | """ 165 | def __init__(self, name, **kwargs): 166 | """Creates a new DetectorModel instance. 167 | 168 | Parameters 169 | ---------- 170 | name : str 171 | The name of the detector model. 172 | """ 173 | super().__init__( 174 | name=name, 175 | **kwargs) 176 | -------------------------------------------------------------------------------- /skyllh/core/session.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import sys 4 | 5 | """The session module provides global settings for session handling. 6 | """ 7 | 8 | # By default SkyLLH will not be in interactive session, i.e. will be in batch 9 | # mode. Hence, progress bars will not be displayed to not screw up the output. 10 | IS_INTERACTIVE_SESSION = False 11 | 12 | 13 | def enable_interactive_session(): 14 | """Enables interactive session mode. 15 | """ 16 | global IS_INTERACTIVE_SESSION 17 | 18 | IS_INTERACTIVE_SESSION = True 19 | 20 | 21 | def disable_interactive_session(): 22 | """Disables interactive session mode. 23 | """ 24 | global IS_INTERACTIVE_SESSION 25 | 26 | IS_INTERACTIVE_SESSION = False 27 | 28 | 29 | def is_interactive_session(): 30 | """Checks whether the current session is interactive (True) or not (False). 31 | 32 | Returns 33 | ------- 34 | check : bool 35 | True if the current SkyLLH session is interactive, False otherwise. 36 | """ 37 | return IS_INTERACTIVE_SESSION 38 | 39 | 40 | def is_python_interpreter_in_interactive_mode(): 41 | """Checks if the Python interpreter is in interactive mode. 42 | 43 | Returns 44 | ------- 45 | check : bool 46 | True if the Python interpreter is in interactive mode, False otherwise. 47 | """ 48 | return bool(getattr(sys, 'ps1', sys.flags.interactive)) 49 | -------------------------------------------------------------------------------- /skyllh/core/signal_generation.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import abc 4 | 5 | from skyllh.core.py import ( 6 | issequence, 7 | float_cast, 8 | ) 9 | 10 | 11 | class SignalGenerationMethod( 12 | object, 13 | metaclass=abc.ABCMeta 14 | ): 15 | """This is a base class for a source and detector specific signal generation 16 | method, that calculates the source flux for a given monte-carlo event, which 17 | is needed to calculate the MC event weights for the signal generator. 18 | """ 19 | 20 | def __init__( 21 | self, 22 | energy_range, 23 | **kwargs, 24 | ): 25 | """Constructs a new signal generation method instance. 26 | 27 | Parameters 28 | ---------- 29 | energy_range : 2-element tuple of float | None 30 | The energy range from which to take MC events into account for 31 | signal event generation. 32 | If set to None, the entire energy range [0, +inf] is used. 33 | """ 34 | super().__init__(**kwargs) 35 | 36 | self.energy_range = energy_range 37 | 38 | @property 39 | def energy_range(self): 40 | """The 2-element tuple of floats holding the energy range from which to 41 | take MC events into account for signal event generation. 42 | """ 43 | return self._energy_range 44 | 45 | @energy_range.setter 46 | def energy_range(self, r): 47 | if r is not None: 48 | if not issequence(r): 49 | raise TypeError( 50 | 'The energy_range property must be a sequence!') 51 | if len(r) != 2: 52 | raise ValueError( 53 | 'The energy_range property must be a sequence of 2 ' 54 | 'elements!') 55 | r = ( 56 | float_cast( 57 | r[0], 58 | 'The first element of the energy_range ' 59 | 'sequence must be castable to type float!'), 60 | float_cast( 61 | r[1], 62 | 'The second element of the energy_range ' 63 | 'sequence must be castable to type float!') 64 | ) 65 | self._energy_range = r 66 | 67 | @abc.abstractmethod 68 | def calc_source_signal_mc_event_flux( 69 | self, 70 | data_mc, 71 | shg, 72 | ): 73 | """This method is supposed to calculate the signal flux of each given 74 | MC event for each source hypothesis of the given source hypothesis 75 | group. 76 | 77 | Parameters 78 | ---------- 79 | data_mc : numpy record ndarray 80 | The numpy record array holding all the MC events. 81 | shg : instance of SourceHypoGroup 82 | The source hypothesis group instance, which defines the list of 83 | sources, and their flux model. 84 | 85 | Returns 86 | ------- 87 | ev_idx_arr : ndarray 88 | The (N_selected_signal_events,)-shaped 1D ndarray holding the index 89 | of the MC event. 90 | shg_src_idx_arr : ndarray 91 | The (N_selected_signal_events,)-shaped 1D ndarray holding the index 92 | of the source within the given source hypothesis group for each 93 | signal candidate event. 94 | flux_arr : ndarray 95 | The (N_selected_signal_events,)-shaped 1D ndarray holding the flux 96 | value of each signal candidate event. 97 | """ 98 | pass 99 | 100 | def signal_event_post_sampling_processing( 101 | self, 102 | shg, 103 | shg_sig_events_meta, 104 | shg_sig_events, 105 | ): 106 | """This method should be reimplemented by the derived class if there 107 | is some processing needed after the MC signal events have been sampled 108 | from the global MC data. 109 | 110 | Parameters 111 | ---------- 112 | shg : SourceHypoGroup instance 113 | The source hypothesis group instance holding the sources and their 114 | locations. 115 | shg_sig_events_meta : numpy record ndarray 116 | The numpy record ndarray holding meta information about the 117 | generated signal events for the given source hypothesis group. 118 | The length of this array must be the same as shg_sig_events. 119 | It needs to contain the following data fields: 120 | 121 | shg_src_idx : int 122 | The source index within the source hypothesis group. 123 | 124 | shg_sig_events : numpy record ndarray 125 | The numpy record ndarray holding the generated signal events for 126 | the given source hypothesis group and in the format of the original 127 | MC events. 128 | 129 | Returns 130 | ------- 131 | shg_sig_events : numpy record array 132 | The processed signal events. In the default implementation of this 133 | method this is just the shg_sig_events input array. 134 | """ 135 | return shg_sig_events 136 | -------------------------------------------------------------------------------- /skyllh/core/times.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import abc 4 | 5 | from skyllh.core.livetime import Livetime 6 | 7 | 8 | class TimeGenerationMethod( 9 | object, 10 | metaclass=abc.ABCMeta, 11 | ): 12 | """Base class (type) for implementing a method to generate times. 13 | """ 14 | 15 | def __init__(self, **kwargs): 16 | super().__init__(**kwargs) 17 | 18 | @abc.abstractmethod 19 | def generate_times( 20 | self, 21 | rss, 22 | size, 23 | ): 24 | """The ``generate_times`` method implements the actual generation of 25 | times, which is method dependent. 26 | 27 | Parameters 28 | ---------- 29 | rss : instance of RandomStateService 30 | The random state service providing the random number 31 | generator (RNG). 32 | size : int 33 | The number of times that should get generated. 34 | 35 | Returns 36 | ------- 37 | times : ndarray 38 | The 1d numpy ndarray holding the generated times. 39 | """ 40 | pass 41 | 42 | 43 | class LivetimeTimeGenerationMethod( 44 | TimeGenerationMethod, 45 | ): 46 | """The LivetimeTimeGenerationMethod provides the method to generate times 47 | from a Livetime object. It will uniformely generate times that will coincide 48 | with the on-time intervals of the detector, by calling the `draw_ontimes` 49 | method of the Livetime class. 50 | """ 51 | def __init__(self, livetime, **kwargs): 52 | """Creates a new LivetimeTimeGeneration instance. 53 | 54 | Parameters 55 | ---------- 56 | livetime : Livetime 57 | The Livetime instance that should be used to generate times from. 58 | """ 59 | super().__init__(**kwargs) 60 | 61 | self.livetime = livetime 62 | 63 | @property 64 | def livetime(self): 65 | """The Livetime instance used to draw times from. 66 | """ 67 | return self._livetime 68 | 69 | @livetime.setter 70 | def livetime(self, livetime): 71 | if not isinstance(livetime, Livetime): 72 | raise TypeError( 73 | 'The livetime property must be an instance of Livetime!') 74 | self._livetime = livetime 75 | 76 | def generate_times( 77 | self, 78 | rss, 79 | size, 80 | **kwargs, 81 | ): 82 | """Generates `size` MJD times according to the detector on-times 83 | provided by the Livetime instance. 84 | 85 | Parameters 86 | ---------- 87 | rss : instance of RandomStateService 88 | The random state service providing the random number 89 | generator (RNG). 90 | size : int 91 | The number of times that should get generated. 92 | 93 | Returns 94 | ------- 95 | times : ndarray 96 | The 1d (`size`,)-shaped numpy ndarray holding the generated times. 97 | """ 98 | times = self._livetime.draw_ontimes( 99 | rss=rss, 100 | size=size, 101 | **kwargs) 102 | 103 | return times 104 | 105 | 106 | class TimeGenerator( 107 | object): 108 | def __init__(self, method): 109 | """Creates a time generator instance with a given defined time 110 | generation method. 111 | 112 | Parameters 113 | ---------- 114 | method : instance of TimeGenerationMethod 115 | The instance of TimeGenerationMethod that defines the method of 116 | generating times. 117 | """ 118 | self.method = method 119 | 120 | @property 121 | def method(self): 122 | """The TimeGenerationMethod object that should be used to generate 123 | the times. 124 | """ 125 | return self._method 126 | 127 | @method.setter 128 | def method(self, method): 129 | if not isinstance(method, TimeGenerationMethod): 130 | raise TypeError( 131 | 'The time generation method must be an instance of ' 132 | 'TimeGenerationMethod!') 133 | self._method = method 134 | 135 | def generate_times( 136 | self, 137 | rss, 138 | size, 139 | **kwargs, 140 | ): 141 | """Generates ``size`` amount of times by calling the ``generate_times`` 142 | method of the TimeGenerationMethod class. 143 | 144 | Parameters 145 | ---------- 146 | rss : instance of RandomStateService 147 | The random state service providing the random number generator 148 | (RNG). 149 | size : int 150 | The number of time that should get generated. 151 | **kwargs 152 | Additional keyword arguments are passed to the ``generate_times`` 153 | method of the TimeGenerationMethod class. 154 | 155 | Returns 156 | ------- 157 | times : ndarray 158 | The 1d (``size``,)-shaped ndarray holding the generated times. 159 | """ 160 | times = self._method.generate_times( 161 | rss=rss, 162 | size=size, 163 | **kwargs) 164 | 165 | return times 166 | -------------------------------------------------------------------------------- /skyllh/core/types.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """This modules defines base types for some of the SkyLLH classes to avoid 4 | circular imports when actively checking for types. 5 | """ 6 | 7 | 8 | class SourceHypoGroup_t( 9 | object, 10 | ): 11 | def __init__(self, *args, **kwargs) -> None: 12 | super().__init__(*args, **kwargs) 13 | -------------------------------------------------------------------------------- /skyllh/core/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/core/utils/__init__.py -------------------------------------------------------------------------------- /skyllh/core/utils/flux_model.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from scipy.stats import ( 4 | rv_continuous, 5 | ) 6 | 7 | from skyllh.core.flux_model import ( 8 | TimeFluxProfile, 9 | ) 10 | from skyllh.core.py import ( 11 | classname, 12 | ) 13 | 14 | 15 | def create_scipy_stats_rv_continuous_from_TimeFluxProfile( 16 | profile, 17 | ): 18 | """This function builds a scipy.stats.rv_continuous instance for a given 19 | :class:`~skyllh.core.flux_model.TimeFluxProfile` instance. 20 | 21 | It can be used to generate random numbers according to the given time flux 22 | profile function. 23 | 24 | Parameters 25 | ---------- 26 | profile : instance of TimeFluxProfile 27 | The instance of TimeFluxProfile providing the function of the time flux 28 | profile. 29 | 30 | Returns 31 | ------- 32 | rv : instance of rv_continuous_frozen 33 | The instance of rv_continuous_frozen representing the time flux profile 34 | as a continuous random variate instance. 35 | """ 36 | if not isinstance(profile, TimeFluxProfile): 37 | raise TypeError( 38 | 'The profile argument must be an instance of TimeFluxProfile! ' 39 | f'Its current type is {classname(profile)}!') 40 | 41 | norm = 0 42 | tot_integral = profile.get_total_integral() 43 | if tot_integral != 0: 44 | norm = 1 / tot_integral 45 | 46 | class rv_continuous_from_TimeFluxProfile( 47 | rv_continuous): 48 | 49 | def __init__(self, *args, **kwargs): 50 | """Creates a new instance of the subclass of rv_continuous using 51 | the time flux profile. 52 | """ 53 | self._profile = profile 54 | self._norm = norm 55 | 56 | super().__init__(*args, **kwargs) 57 | 58 | def _pdf(self, t): 59 | """Calculates the probability density of the time flux profile 60 | function for given time values. 61 | """ 62 | pd = self._profile(t=t) * self._norm 63 | 64 | return pd 65 | 66 | def _cdf(self, t): 67 | """Calculates the cumulative distribution function values for rhe 68 | given time values. If the time flux profile instance provides a 69 | ``cdf`` method, it will be used. Otherwise the generic ``_cdf`` 70 | method of the ``rv_continuous`` class will be used. 71 | """ 72 | if hasattr(self._profile, 'cdf') and callable(self._profile.cdf): 73 | return self._profile.cdf(t=t) 74 | 75 | return super()._cdf(t) 76 | 77 | rv = rv_continuous_from_TimeFluxProfile( 78 | a=profile.t_start, 79 | b=profile.t_stop, 80 | ).freeze(loc=0, scale=1) 81 | 82 | return rv 83 | -------------------------------------------------------------------------------- /skyllh/core/utils/tdm.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import numpy as np 4 | 5 | from skyllh.core.utils.coords import ( 6 | angular_separation, 7 | ) 8 | 9 | 10 | def get_tdm_field_func_psi(psi_floor=None): 11 | """Returns the TrialDataManager (TDM) field function for psi with an 12 | optional psi value floor. 13 | 14 | Parameters 15 | ---------- 16 | psi_floor : float | None 17 | The optional floor value for psi. This should be ``None`` for a standard 18 | point-source analysis that uses an analytic function for the detector's 19 | point-spread-function (PSF). 20 | 21 | Returns 22 | ------- 23 | tdm_field_func_psi : function 24 | TrialDataManager (TDM) field function for psi. 25 | """ 26 | def tdm_field_func_psi( 27 | tdm, 28 | shg_mgr, 29 | pmm): 30 | """TDM data field function to calculate the opening angle between the 31 | source positions and the event's reconstructed position. 32 | """ 33 | (src_idxs, evt_idxs) = tdm.src_evt_idxs 34 | 35 | ra = np.take(tdm.get_data('ra'), evt_idxs) 36 | dec = np.take(tdm.get_data('dec'), evt_idxs) 37 | 38 | src_array = tdm.get_data('src_array') 39 | src_ra = np.take(src_array['ra'], src_idxs) 40 | src_dec = np.take(src_array['dec'], src_idxs) 41 | 42 | psi = angular_separation( 43 | ra1=ra, 44 | dec1=dec, 45 | ra2=src_ra, 46 | dec2=src_dec, 47 | psi_floor=psi_floor) 48 | 49 | return psi 50 | 51 | return tdm_field_func_psi 52 | -------------------------------------------------------------------------------- /skyllh/core/utils/trials.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """This module contains utility functions related analysis trials. 4 | """ 5 | 6 | import pickle 7 | 8 | from skyllh.core.timing import ( 9 | TaskTimer, 10 | ) 11 | 12 | 13 | def create_pseudo_data_file( 14 | ana, 15 | rss, 16 | filename, 17 | mean_n_bkg_list=None, 18 | mean_n_sig=0, 19 | bkg_kwargs=None, 20 | sig_kwargs=None, 21 | tl=None 22 | ): 23 | """Creates a pickle file that contains the pseudo data for a single trial 24 | by generating background and signal events. 25 | 26 | Parameters 27 | ---------- 28 | ana : Analysis 29 | The Analysis instance that should be used to generate the pseudo data. 30 | rss : RandomStateService 31 | The RandomStateService instance to use for generating random numbers. 32 | filename : str 33 | The data file name into which the generated pseudo data should get 34 | written to. 35 | mean_n_bkg_list : list of float | None 36 | The mean number of background events that should be generated for 37 | each dataset. If set to None (the default), the background 38 | generation method needs to obtain this number itself. 39 | mean_n_sig : float 40 | The mean number of signal events that should be generated for the 41 | trial. The actual number of generated events will be drawn from a 42 | Poisson distribution with this given signal mean as mean. 43 | bkg_kwargs : dict | None 44 | Additional keyword arguments for the `generate_events` method of the 45 | background generation method class. An usual keyword argument is 46 | `poisson`. 47 | sig_kwargs : dict | None 48 | Additional keyword arguments for the `generate_signal_events` method 49 | of the `SignalGenerator` class. An usual keyword argument is 50 | `poisson`. 51 | tl : TimeLord | None 52 | The instance of TimeLord that should be used to time individual tasks. 53 | 54 | """ 55 | (n_bkg_events_list, bkg_events_list) = ana.generate_background_events( 56 | rss=rss, 57 | mean_n_bkg_list=mean_n_bkg_list, 58 | bkg_kwargs=bkg_kwargs, 59 | tl=tl 60 | ) 61 | 62 | (n_sig, n_sig_events_list, sig_events_list) = ana.generate_signal_events( 63 | rss=rss, 64 | mean_n_sig=mean_n_sig, 65 | sig_kwargs=sig_kwargs, 66 | tl=tl 67 | ) 68 | 69 | trial_data = dict( 70 | mean_n_bkg_list=mean_n_bkg_list, 71 | mean_n_sig=mean_n_sig, 72 | bkg_kwargs=bkg_kwargs, 73 | sig_kwargs=sig_kwargs, 74 | n_sig=n_sig, 75 | n_bkg_events_list=n_bkg_events_list, 76 | n_sig_events_list=n_sig_events_list, 77 | bkg_events_list=bkg_events_list, 78 | sig_events_list=sig_events_list 79 | ) 80 | 81 | with TaskTimer(tl, 'Writing pseudo data to file.'): 82 | with open(filename, 'wb') as fp: 83 | pickle.dump(trial_data, fp) 84 | 85 | 86 | def load_pseudo_data(filename, tl=None): 87 | """Loads the pseudo data for a single trial from the given file name. 88 | 89 | Parameters 90 | ---------- 91 | filename : str 92 | The name of the file that contains the pseudo data. 93 | tl : TimeLord | None 94 | The instance of TimeLord that should be used to time individual tasks. 95 | 96 | Returns 97 | ------- 98 | mean_n_sig : float 99 | The mean number of signal events that was used to generate the pseudo 100 | data. 101 | n_sig : int 102 | The actual total number of signal events in the pseudo data. 103 | n_bkg_events_list : list of int 104 | The total number of background events for each data set of the 105 | pseudo data. 106 | n_sig_events_list : list of int 107 | The total number of signal events for each data set of the pseudo data. 108 | bkg_events_list : list of DataFieldRecordArray instances 109 | The list of DataFieldRecordArray instances containing the background 110 | pseudo data events for each data set. 111 | sig_events_list : list of DataFieldRecordArray instances | None 112 | The list of DataFieldRecordArray instances containing the signal 113 | pseudo data events for each data set. If a particular dataset has 114 | no signal events, the entry for that dataset can be None. 115 | """ 116 | with TaskTimer(tl, 'Loading pseudo data from file.'): 117 | with open(filename, 'rb') as fp: 118 | trial_data = pickle.load(fp) 119 | 120 | return ( 121 | trial_data['mean_n_sig'], 122 | trial_data['n_sig'], 123 | trial_data['n_bkg_events_list'], 124 | trial_data['n_sig_events_list'], 125 | trial_data['bkg_events_list'], 126 | trial_data['sig_events_list'] 127 | ) 128 | -------------------------------------------------------------------------------- /skyllh/datasets/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/datasets/__init__.py -------------------------------------------------------------------------------- /skyllh/datasets/i3/PublicData_10y_ps_wMC.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Dr. Martin Wolf 3 | 4 | import numpy as np 5 | 6 | from skyllh.datasets.i3 import ( 7 | PublicData_10y_ps, 8 | ) 9 | 10 | 11 | def create_dataset_collection( 12 | cfg, 13 | base_path=None, 14 | sub_path_fmt=None, 15 | ): 16 | """Defines the dataset collection for IceCube's 10-year 17 | point-source public data, which is available at 18 | http://icecube.wisc.edu/data-releases/20210126_PS-IC40-IC86_VII.zip, and 19 | adds monte-carlo files. 20 | 21 | Parameters 22 | ---------- 23 | cfg : instance of Config 24 | The instance of Config holding the local configuration. 25 | base_path : str | None 26 | The base path of the data files. The actual path of a data file is 27 | assumed to be of the structure //. 28 | If None, use the default path ``cfg['repository']['base_path']``. 29 | sub_path_fmt : str | None 30 | The sub path format of the data files of the public data sample. 31 | If None, use the default sub path format 32 | 'icecube_10year_ps'. 33 | 34 | Returns 35 | ------- 36 | dsc : DatasetCollection 37 | The dataset collection containing all the seasons as individual 38 | I3Dataset objects. 39 | """ 40 | dsc = PublicData_10y_ps.create_dataset_collection( 41 | cfg=cfg, 42 | base_path=base_path, 43 | sub_path_fmt=sub_path_fmt) 44 | 45 | dsc.description += """ 46 | This dataset collection has monte-carlo (MC) files defined for each dataset. 47 | These MC files are not part of the original data release and need to be 48 | generated by the user. 49 | """ 50 | 51 | ( 52 | IC40, 53 | IC59, 54 | IC79, 55 | IC86_I, 56 | IC86_II, 57 | IC86_III, 58 | IC86_IV, 59 | IC86_V, 60 | IC86_VI, 61 | IC86_VII, 62 | IC86_II_VII, 63 | ) = dsc[ 64 | 'IC40', 65 | 'IC59', 66 | 'IC79', 67 | 'IC86_I', 68 | 'IC86_II', 69 | 'IC86_III', 70 | 'IC86_IV', 71 | 'IC86_V', 72 | 'IC86_VI', 73 | 'IC86_VII', 74 | 'IC86_II-VII', 75 | ] 76 | IC40.mc_pathfilename_list = 'sim/IC40_MC.npy' 77 | IC59.mc_pathfilename_list = 'sim/IC59_MC.npy' 78 | IC79.mc_pathfilename_list = 'sim/IC79_MC.npy' 79 | IC86_I.mc_pathfilename_list = 'sim/IC86_I_MC.npy' 80 | IC86_II.mc_pathfilename_list = 'sim/IC86_II-VII_MC.npy' 81 | IC86_III.mc_pathfilename_list = IC86_II.mc_pathfilename_list 82 | IC86_IV.mc_pathfilename_list = IC86_II.mc_pathfilename_list 83 | IC86_V.mc_pathfilename_list = IC86_II.mc_pathfilename_list 84 | IC86_VI.mc_pathfilename_list = IC86_II.mc_pathfilename_list 85 | IC86_VII.mc_pathfilename_list = IC86_II.mc_pathfilename_list 86 | IC86_II_VII.mc_pathfilename_list = IC86_II.mc_pathfilename_list 87 | 88 | def add_time(data): 89 | mc = data.mc 90 | mc.append_field('time', np.repeat(0, len(mc))) 91 | 92 | def add_azimuth_and_zenith(data): 93 | mc = data.mc 94 | mc.append_field('azi', np.repeat(0, len(mc))) 95 | mc.append_field('zen', np.repeat(0, len(mc))) 96 | 97 | dsc.add_data_preparation(add_time) 98 | dsc.add_data_preparation(add_azimuth_and_zenith) 99 | 100 | return dsc 101 | -------------------------------------------------------------------------------- /skyllh/datasets/i3/TestData.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Dr. Martin Wolf 3 | 4 | from skyllh.core.dataset import ( 5 | DatasetCollection, 6 | ) 7 | from skyllh.i3.dataset import ( 8 | I3Dataset, 9 | ) 10 | 11 | 12 | def create_dataset_collection( 13 | cfg, 14 | base_path=None, 15 | sub_path_fmt=None, 16 | ): 17 | """Defines a dataset collection with a test dataset. 18 | 19 | Parameters 20 | ---------- 21 | cfg : instance of Config 22 | The instance of Config holding the local configuration. 23 | base_path : str | None 24 | The base path of the data files. The actual path of a data file is 25 | assumed to be of the structure //. 26 | If None, use the default path ``cfg['repository']['base_path']``. 27 | sub_path_fmt : str | None 28 | The sub path format of the data files of the public data sample. 29 | If None, use the default sub path format 30 | 'testdata'. 31 | 32 | Returns 33 | ------- 34 | dsc : DatasetCollection 35 | The dataset collection containing all the seasons as individual 36 | I3Dataset objects. 37 | """ 38 | (version, verqualifiers) = (1, dict(p=0)) 39 | 40 | default_sub_path_fmt = 'testdata' 41 | 42 | dsc = DatasetCollection('Public Data 10-year point-source') 43 | 44 | dsc.description = r""" 45 | This dataset collection contains a test dataset which can be used for unit 46 | tests. 47 | """ 48 | 49 | # Define the common keyword arguments for all data sets. 50 | ds_kwargs = dict( 51 | cfg=cfg, 52 | livetime=None, 53 | version=version, 54 | verqualifiers=verqualifiers, 55 | base_path=base_path, 56 | default_sub_path_fmt=default_sub_path_fmt, 57 | sub_path_fmt=sub_path_fmt, 58 | ) 59 | 60 | TestData = I3Dataset( 61 | name='TestData', 62 | exp_pathfilenames='exp.npy', 63 | mc_pathfilenames='mc.npy', 64 | grl_pathfilenames='grl.npy', 65 | **ds_kwargs, 66 | ) 67 | 68 | dsc.add_datasets(( 69 | TestData, 70 | )) 71 | 72 | return dsc 73 | -------------------------------------------------------------------------------- /skyllh/datasets/i3/__init__.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from skyllh.datasets.i3 import ( 4 | PublicData_10y_ps, 5 | PublicData_10y_ps_wMC, 6 | ) 7 | 8 | data_samples = { 9 | 'PublicData_10y_ps': 10 | PublicData_10y_ps, 11 | 'PublicData_10y_ps_wMC': 12 | PublicData_10y_ps_wMC, 13 | } 14 | -------------------------------------------------------------------------------- /skyllh/i3/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/i3/__init__.py -------------------------------------------------------------------------------- /skyllh/i3/background_generation.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | from skyllh.core.background_generation import ( 4 | BackgroundGenerationMethod, 5 | ) 6 | from skyllh.core.py import ( 7 | classname, 8 | ) 9 | from skyllh.core.scrambling import ( 10 | DataScrambler, 11 | ) 12 | 13 | 14 | class FixedScrambledExpDataI3BkgGenMethod( 15 | BackgroundGenerationMethod, 16 | ): 17 | """This class implements the background event generation method for the 18 | IceCube detector using scrambled experimental data as background hypothesis 19 | with a fixed number of background events equal to the number of events in 20 | the dataset. This background generation method is the one used in SkyLab. 21 | """ 22 | def __init__( 23 | self, 24 | data_scrambler, 25 | **kwargs, 26 | ): 27 | """Creates a new background generation method instance to generate 28 | background events from scrambled experimental data with a fixed number 29 | of events equal to the number of events in the dataset. 30 | 31 | Parameters 32 | ---------- 33 | data_scrambler : instance of DataScrambler 34 | The DataScrambler instance to use to generate scrambled experimental 35 | data. 36 | """ 37 | super().__init__(**kwargs) 38 | 39 | self.data_scrambler = data_scrambler 40 | 41 | @property 42 | def data_scrambler(self): 43 | """The DataScrambler instance that implements the data scrambling. 44 | """ 45 | return self._data_scrambler 46 | 47 | @data_scrambler.setter 48 | def data_scrambler(self, scrambler): 49 | if not isinstance(scrambler, DataScrambler): 50 | raise TypeError( 51 | 'The data_scrambler property must be an instance of ' 52 | 'DataScrambler! ' 53 | f'Its current type is {classname(scrambler)}!') 54 | self._data_scrambler = scrambler 55 | 56 | def generate_events( 57 | self, 58 | rss, 59 | dataset, 60 | data, 61 | **kwargs, 62 | ): 63 | """Generates background events from the given data, by scrambling the 64 | experimental data. The number of events is equal to the size of the 65 | given dataset. 66 | 67 | Parameters 68 | ---------- 69 | rss : instance of RandomStateService 70 | The instance of RandomStateService that should be used to generate 71 | random numbers from. It is used to scramble the experimental data. 72 | dataset : instance of Dataset 73 | The Dataset instance describing the dataset for which background 74 | events should get generated. 75 | data : instance of DatasetData 76 | The DatasetData instance holding the data of the dataset for which 77 | background events should get generated. 78 | 79 | Returns 80 | ------- 81 | n_bkg : int 82 | The number of generated background events. 83 | bkg_events : instance of DataFieldRecordArray 84 | The instance of DataFieldRecordArray holding the generated 85 | background events. 86 | """ 87 | bkg_events = self._data_scrambler.scramble_data( 88 | rss=rss, 89 | dataset=dataset, 90 | data=data.exp, 91 | copy=True) 92 | 93 | return (len(bkg_events), bkg_events) 94 | -------------------------------------------------------------------------------- /skyllh/i3/config.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """This file defines IceCube specific global configuration. 4 | """ 5 | 6 | from skyllh.core.datafields import ( 7 | DataFieldStages as DFS, 8 | ) 9 | 10 | 11 | def add_icecube_specific_analysis_required_data_fields(cfg): 12 | """Adds IceCube specific data fields required by an IceCube analysis to 13 | the given local configuration. 14 | 15 | Parameters 16 | ---------- 17 | cfg : instance of Config 18 | The instance of Config holding the local configuration. 19 | """ 20 | cfg['datafields']['azi'] = DFS.ANALYSIS_EXP 21 | cfg['datafields']['zen'] = DFS.ANALYSIS_EXP 22 | cfg['datafields']['sin_dec'] = DFS.ANALYSIS_EXP 23 | cfg['datafields']['sin_true_dec'] = DFS.ANALYSIS_MC 24 | -------------------------------------------------------------------------------- /skyllh/i3/livetime.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import numpy as np 4 | 5 | from skyllh.core.livetime import ( 6 | Livetime, 7 | ) 8 | from skyllh.core.storage import ( 9 | create_FileLoader, 10 | ) 11 | from skyllh.i3.dataset import ( 12 | I3Dataset, 13 | ) 14 | 15 | 16 | class I3Livetime( 17 | Livetime): 18 | """The I3Livetime class provides the functionality to load a Livetime object 19 | from a good-run-list data file. 20 | """ 21 | 22 | @classmethod 23 | def from_grl_data(cls, grl_data): 24 | """Creates an I3LiveTime instance from the given good-run-list (GRL) 25 | data. 26 | 27 | Parameters 28 | ---------- 29 | grl_data : instance of numpy structured ndarray. 30 | The numpy structured ndarray of length N_runs holding the start end 31 | end times of the good runs. The following fields need to exist: 32 | 33 | start : float 34 | The MJD of the run start. 35 | end : float 36 | The MJD of the run stop. 37 | 38 | Returns 39 | ------- 40 | livetime : instance of I3Livetime 41 | The created instance of I3Livetime for the provided GRL data. 42 | """ 43 | uptime_mjd_intervals_arr = np.hstack(( 44 | grl_data['start'].reshape((len(grl_data), 1)), 45 | grl_data['stop'].reshape((len(grl_data), 1)) 46 | )) 47 | 48 | livetime = cls( 49 | uptime_mjd_intervals_arr=uptime_mjd_intervals_arr) 50 | 51 | return livetime 52 | 53 | @staticmethod 54 | def from_grl_files( 55 | pathfilenames): 56 | """Loads an I3Livetime instance from the given good-run-list (GRL) data 57 | file. The data file needs to contain the following data fields: 58 | 59 | start : float 60 | The MJD of the run start. 61 | stop : float 62 | The MJD of the run stop. 63 | 64 | Parameters 65 | ---------- 66 | pathfilenames : str | list of str 67 | The list of fully qualified file names of the GRL data files. 68 | 69 | Returns 70 | ------- 71 | livetime : instance of I3Livetime 72 | The created instance of I3Livetime for the provided GRL data. 73 | """ 74 | grl_data = create_FileLoader(pathfilenames).load_data() 75 | 76 | uptime_mjd_intervals_arr = np.hstack(( 77 | grl_data['start'].reshape((len(grl_data), 1)), 78 | grl_data['stop'].reshape((len(grl_data), 1)) 79 | )) 80 | 81 | livetime = I3Livetime( 82 | uptime_mjd_intervals_arr=uptime_mjd_intervals_arr) 83 | 84 | return livetime 85 | 86 | @staticmethod 87 | def from_I3Dataset(ds): 88 | """Loads an I3Livetime instance from a given I3Dataset instance, which 89 | must have a good-run-list (GRL) files defined. 90 | 91 | Parameters 92 | ---------- 93 | ds : I3Dataset instance 94 | The instance of I3Dataset which defined the good-run-list (GRL) 95 | files for the dataset. 96 | 97 | Returns 98 | ------- 99 | livetime : instance of I3Livetime 100 | The created instance of I3Livetime for the GRL data from the 101 | provided dataset. 102 | """ 103 | if not isinstance(ds, I3Dataset): 104 | raise TypeError( 105 | 'The ds argument must be an instance of I3Dataset!') 106 | if len(ds.grl_pathfilename_list) == 0: 107 | raise ValueError( 108 | 'No GRL files have been defined for the given dataset!') 109 | 110 | livetime = I3Livetime.from_grl_files( 111 | pathfilenames=ds.grl_pathfilename_list) 112 | 113 | return livetime 114 | 115 | def __init__( 116 | self, 117 | *args, 118 | **kwargs): 119 | """Creates a new instance of I3Livetime. 120 | """ 121 | super().__init__( 122 | *args, 123 | **kwargs) 124 | -------------------------------------------------------------------------------- /skyllh/i3/scrambling.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import numpy as np 4 | 5 | from skyllh.core.scrambling import ( 6 | DataScramblingMethod, 7 | TimeScramblingMethod, 8 | ) 9 | 10 | from skyllh.i3.utils.coords import ( 11 | azi_to_ra_transform, 12 | hor_to_equ_transform, 13 | ) 14 | 15 | 16 | class I3TimeScramblingMethod( 17 | TimeScramblingMethod, 18 | ): 19 | """The I3TimeScramblingMethod class provides a data scrambling method to 20 | perform time scrambling of the data, 21 | by drawing a MJD time from a given time generator. 22 | """ 23 | def __init__( 24 | self, 25 | timegen, 26 | **kwargs, 27 | ): 28 | """Initializes a new I3 time scrambling instance. 29 | 30 | Parameters 31 | ---------- 32 | timegen : TimeGenerator 33 | The time generator that should be used to generate random MJD times. 34 | """ 35 | super().__init__( 36 | timegen=timegen, 37 | hor_to_equ_transform=hor_to_equ_transform, 38 | **kwargs) 39 | 40 | # We override the scramble method because for IceCube we only need to change 41 | # the ``ra`` field. 42 | def scramble( 43 | self, 44 | rss, 45 | dataset, 46 | data, 47 | ): 48 | """Draws a time from the time generator and calculates the right 49 | ascention coordinate from the azimuth angle according to the time. 50 | Sets the values of the ``time`` and ``ra`` keys of data. 51 | 52 | Parameters 53 | ---------- 54 | rss : RandomStateService 55 | The random state service providing the random number 56 | generator (RNG). 57 | dataset : instance of Dataset 58 | The instance of Dataset for which the data should get scrambled. 59 | data : DataFieldRecordArray instance 60 | The DataFieldRecordArray instance containing the to be scrambled 61 | data. 62 | 63 | Returns 64 | ------- 65 | data : numpy record ndarray 66 | The given numpy record ndarray holding the scrambled data. 67 | """ 68 | mjds = self._timegen.generate_times(rss, len(data)) 69 | 70 | data['time'] = mjds 71 | data['ra'] = azi_to_ra_transform(data['azi'], mjds) 72 | 73 | return data 74 | 75 | 76 | class I3SeasonalVariationTimeScramblingMethod( 77 | DataScramblingMethod, 78 | ): 79 | """The I3SeasonalVariationTimeScramblingMethod class provides a data 80 | scrambling method to perform data coordinate scrambling based on a generated 81 | time, which follows seasonal variations within the experimental data. 82 | """ 83 | def __init__( 84 | self, 85 | data, 86 | **kwargs, 87 | ): 88 | """Initializes a new seasonal time scrambling instance. 89 | 90 | Parameters 91 | ---------- 92 | data : instance of I3DatasetData 93 | The instance of I3DatasetData holding the experimental data and 94 | good-run-list information. 95 | """ 96 | super().__init__(**kwargs) 97 | 98 | # The run weights are the number of events in each run relative to all 99 | # the events to account for possible seasonal variations. 100 | self.run_weights = np.zeros((len(data.grl),), dtype=np.float64) 101 | n_events = len(data.exp['time']) 102 | for (i, (start, stop)) in enumerate( 103 | zip(data.grl['start'], data.grl['stop'])): 104 | mask = (data.exp['time'] >= start) & (data.exp['time'] < stop) 105 | self.run_weights[i] = len(data.exp[mask]) / n_events 106 | self.run_weights /= np.sum(self.run_weights) 107 | 108 | self.grl = data.grl 109 | 110 | def scramble( 111 | self, 112 | rss, 113 | dataset, 114 | data, 115 | ): 116 | """Scrambles the given data based on random MJD times, which are 117 | generated uniformely within the data runs, where the data runs are 118 | weighted based on their amount of events compared to the total events. 119 | 120 | Parameters 121 | ---------- 122 | rss : instance of RandomStateService 123 | The random state service providing the random number 124 | generator (RNG). 125 | dataset : instance of Dataset 126 | The instance of Dataset for which the data should get scrambled. 127 | data : instance of DataFieldRecordArray 128 | The DataFieldRecordArray instance containing the to be scrambled 129 | data. 130 | 131 | Returns 132 | ------- 133 | data : instance of DataFieldRecordArray 134 | The given DataFieldRecordArray holding the scrambled data. 135 | """ 136 | # Get run indices based on their seasonal weights. 137 | run_idxs = rss.random.choice( 138 | self.grl['start'].size, 139 | size=len(data['time']), 140 | p=self.run_weights) 141 | 142 | # Draw random times uniformely within the runs. 143 | times = rss.random.uniform( 144 | self.grl['start'][run_idxs], 145 | self.grl['stop'][run_idxs]) 146 | 147 | # Get the correct right ascension. 148 | data['time'] = times 149 | data['ra'] = azi_to_ra_transform( 150 | azi=data['azi'], 151 | mjd=times) 152 | 153 | return data 154 | -------------------------------------------------------------------------------- /skyllh/i3/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/i3/utils/__init__.py -------------------------------------------------------------------------------- /skyllh/i3/utils/coords.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """IceCube specific coordinate utility functions. 4 | """ 5 | 6 | import numpy as np 7 | 8 | 9 | def azi_to_ra_transform(azi, mjd): 10 | """Rotates the given IceCube azimuth angles into right-ascention angles for 11 | the given MJD times. This function is IceCube specific and assumes that the 12 | detector is located excently at the South Pole and neglects all astronomical 13 | effects like Earth's precession. 14 | 15 | Parameters 16 | ---------- 17 | azi : instance of numpy.ndarray 18 | The array with the azimuth angles. 19 | mjd : instance of numpy.ndarray 20 | The array with the MJD times for each azimuth angle. 21 | 22 | Returns 23 | ------- 24 | ra : instance of numpy.ndarray 25 | The right-ascention values. 26 | """ 27 | # sidereal day = length * solar day 28 | _sidereal_length = 0.997269566 29 | _sidereal_offset = 2.54199002505 30 | sidereal_day_residuals = (mjd / _sidereal_length) % 1 31 | ra = _sidereal_offset + 2 * np.pi * sidereal_day_residuals - azi 32 | ra = np.mod(ra, 2*np.pi) 33 | 34 | return ra 35 | 36 | 37 | def ra_to_azi_transform(ra, mjd): 38 | """Rotates the given right-ascention angles to local IceCube azimuth angles. 39 | 40 | Parameters 41 | ---------- 42 | ra : instance of numpy.ndarray 43 | The array with the right-ascention angles. 44 | mjd : instance of numpy.ndarray 45 | The array with the MJD times for each right-ascention angle. 46 | 47 | Returns 48 | ------- 49 | azi : instance of numpy.ndarray 50 | The azimuth angle for each right-ascention angle. 51 | """ 52 | # Use the azi_to_ra_transform function because it is symmetric. 53 | azi = azi_to_ra_transform(ra, mjd) 54 | 55 | return azi 56 | 57 | 58 | def hor_to_equ_transform(azi, zen, mjd): 59 | """Transforms the coordinate from the horizontal system (azimuth, zenith) 60 | into the equatorial system (right-ascention, declination) for detector at 61 | the South Pole and neglecting all astronomical effects like Earth 62 | precession. 63 | 64 | Parameters 65 | ---------- 66 | azi : instance of numpy.ndarray 67 | The azimuth angle. 68 | zen : instance of numpy.ndarray 69 | The zenith angle. 70 | mjd : instance of numpy.ndarray 71 | The time in MJD. 72 | 73 | Returns 74 | ------- 75 | ra : instance of numpy.ndarray 76 | The right-ascention angle. 77 | dec : instance of numpy.ndarray 78 | The declination angle. 79 | """ 80 | ra = azi_to_ra_transform(azi, mjd) 81 | dec = np.pi - zen 82 | return (ra, dec) 83 | -------------------------------------------------------------------------------- /skyllh/plotting/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/plotting/__init__.py -------------------------------------------------------------------------------- /skyllh/plotting/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/plotting/core/__init__.py -------------------------------------------------------------------------------- /skyllh/plotting/i3/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/plotting/i3/__init__.py -------------------------------------------------------------------------------- /skyllh/plotting/i3/backgroundpdf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Plotting module to plot IceCube specific background PDF objects. 4 | """ 5 | 6 | import numpy as np 7 | 8 | from matplotlib.axes import ( 9 | Axes, 10 | ) 11 | from matplotlib.colors import ( 12 | LogNorm, 13 | ) 14 | 15 | from skyllh.core.py import ( 16 | classname, 17 | ) 18 | from skyllh.core.source_hypo_grouping import ( 19 | SourceHypoGroupManager, 20 | ) 21 | from skyllh.core.storage import ( 22 | DataFieldRecordArray, 23 | ) 24 | from skyllh.core.trialdata import ( 25 | TrialDataManager, 26 | ) 27 | from skyllh.i3.backgroundpdf import ( 28 | BackgroundI3SpatialPDF, 29 | ) 30 | 31 | 32 | class BackgroundI3SpatialPDFPlotter(object): 33 | """Plotter class to plot an BackgroundI3SpatialPDF object. 34 | """ 35 | def __init__(self, tdm, pdf): 36 | """Creates a new plotter object for plotting an BackgroundI3SpatialPDF 37 | object. 38 | 39 | Parameters 40 | ---------- 41 | tdm : instance of TrialDataManager 42 | The instance of TrialDataManager that provides the data for the PDF 43 | evaluation. 44 | pdf : instance of BackgroundI3SpatialPDF 45 | The PDF object to plot. 46 | """ 47 | self.tdm = tdm 48 | self.pdf = pdf 49 | 50 | @property 51 | def pdf(self): 52 | """The PDF object to plot. 53 | """ 54 | return self._pdf 55 | 56 | @pdf.setter 57 | def pdf(self, pdf): 58 | if not isinstance(pdf, BackgroundI3SpatialPDF): 59 | raise TypeError( 60 | 'The pdf property must be an object of instance ' 61 | 'BackgroundI3SpatialPDF!') 62 | self._pdf = pdf 63 | 64 | @property 65 | def tdm(self): 66 | """The TrialDataManager that provides the data for the PDF evaluation. 67 | """ 68 | return self._tdm 69 | 70 | @tdm.setter 71 | def tdm(self, obj): 72 | if not isinstance(obj, TrialDataManager): 73 | raise TypeError( 74 | 'The tdm property must be an instance of TrialDataManager!') 75 | self._tdm = obj 76 | 77 | def plot(self, src_hypo_group_manager, axes): 78 | """Plots the spatial PDF. It uses the sin(dec) binning of the PDF to 79 | propperly represent the resolution of the PDF in the drawing. 80 | 81 | Parameters 82 | ---------- 83 | src_hypo_group_manager : instance of SourceHypoGroupManager 84 | The instance of SourceHypoGroupManager that defines the source 85 | hypotheses. 86 | axes : mpl.axes.Axes 87 | The matplotlib Axes object on which the PDF should get drawn to. 88 | 89 | Returns 90 | ------- 91 | img : instance of mpl.AxesImage 92 | The AxesImage instance showing the PDF image. 93 | """ 94 | if not isinstance(src_hypo_group_manager, SourceHypoGroupManager): 95 | raise TypeError( 96 | 'The src_hypo_group_manager argument must be an ' 97 | 'instance of SourceHypoGroupManager!') 98 | if not isinstance(axes, Axes): 99 | raise TypeError( 100 | 'The axes argument must be an instance of ' 101 | 'matplotlib.axes.Axes!') 102 | 103 | # By construction the BackgroundI3SpatialPDF does not depend on 104 | # right-ascention. Hence, we only need a single bin for the 105 | # right-ascention. 106 | sin_dec_binning = self.pdf.get_binning('sin_dec') 107 | pdfprobs = np.zeros((1, sin_dec_binning.nbins)) 108 | 109 | sin_dec_points = sin_dec_binning.bincenters 110 | events = DataFieldRecordArray(np.zeros( 111 | (pdfprobs.size,), 112 | dtype=[('sin_dec', np.float64)])) 113 | for (i, sin_dec) in enumerate(sin_dec_points): 114 | events['sin_dec'][i] = sin_dec 115 | 116 | self._tdm.initialize_for_new_trial(src_hypo_group_manager, events) 117 | 118 | event_probs = self._pdf.get_prob(self._tdm) 119 | 120 | for i in range(len(events)): 121 | pdfprobs[0, i] = event_probs[i] 122 | 123 | ra_axis = self.pdf.axes['ra'] 124 | (left, right, bottom, top) = ( 125 | ra_axis.vmin, ra_axis.vmax, 126 | sin_dec_binning.lower_edge, sin_dec_binning.upper_edge) 127 | img = axes.imshow( 128 | pdfprobs.T, 129 | extent=(left, right, bottom, top), 130 | origin='lower', 131 | norm=LogNorm(), 132 | interpolation='none') 133 | axes.set_xlabel('ra') 134 | axes.set_ylabel('sin_dec') 135 | axes.set_title(classname(self.pdf)) 136 | 137 | return img 138 | -------------------------------------------------------------------------------- /skyllh/plotting/i3/pdf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Plotting module to plot IceCube specific PDF objects. 4 | """ 5 | 6 | import itertools 7 | 8 | import numpy as np 9 | 10 | from matplotlib.axes import ( 11 | Axes, 12 | ) 13 | from matplotlib.colors import ( 14 | LogNorm, 15 | ) 16 | 17 | from skyllh.core.py import ( 18 | classname, 19 | ) 20 | from skyllh.core.source_hypo_grouping import ( 21 | SourceHypoGroupManager, 22 | ) 23 | from skyllh.core.storage import ( 24 | DataFieldRecordArray, 25 | ) 26 | from skyllh.core.trialdata import ( 27 | TrialDataManager, 28 | ) 29 | from skyllh.i3.pdf import ( 30 | I3EnergyPDF, 31 | ) 32 | 33 | 34 | class I3EnergyPDFPlotter(object): 35 | """Plotter class to plot an I3EnergyPDF object. 36 | """ 37 | def __init__(self, tdm, pdf): 38 | """Creates a new plotter object for plotting an I3EnergyPDF object. 39 | 40 | Parameters 41 | ---------- 42 | tdm : instance of TrialDataManager 43 | The instance of TrialDataManager that provides the data for the 44 | PDF evaluation. 45 | pdf : I3EnergyPDF 46 | The PDF object to plot. 47 | """ 48 | self.tdm = tdm 49 | self.pdf = pdf 50 | 51 | @property 52 | def pdf(self): 53 | """The PDF object to plot. 54 | """ 55 | return self._pdf 56 | 57 | @pdf.setter 58 | def pdf(self, obj): 59 | if not isinstance(obj, I3EnergyPDF): 60 | raise TypeError( 61 | 'The pdf property must be an object of instance I3EnergyPDF!') 62 | self._pdf = obj 63 | 64 | @property 65 | def tdm(self): 66 | """The TrialDataManager that provides the data for the PDF evaluation. 67 | """ 68 | return self._tdm 69 | 70 | @tdm.setter 71 | def tdm(self, obj): 72 | if not isinstance(obj, TrialDataManager): 73 | raise TypeError( 74 | 'The tdm property must be an instance of TrialDataManager!') 75 | self._tdm = obj 76 | 77 | def plot(self, src_hypo_group_manager, axes, **kwargs): 78 | """Plots the PDF object. 79 | 80 | Parameters 81 | ---------- 82 | src_hypo_group_manager : instance of SourceHypoGroupManager 83 | The instance of SourceHypoGroupManager that defines the source 84 | hypotheses. 85 | axes : mpl.axes.Axes 86 | The matplotlib Axes object on which the PDF ratio should get drawn 87 | to. 88 | fitparams : dict 89 | The dictionary with the set of fit paramater values. 90 | 91 | Additional Keyword Arguments 92 | ---------------------------- 93 | Any additional keyword arguments will be passed to the `mpl.imshow` 94 | function. 95 | 96 | Returns 97 | ------- 98 | img : instance of mpl.AxesImage 99 | The AxesImage instance showing the PDF ratio image. 100 | """ 101 | if not isinstance(src_hypo_group_manager, SourceHypoGroupManager): 102 | raise TypeError( 103 | 'The src_hypo_group_manager argument must be an instance of ' 104 | 'SourceHypoGroupManager!') 105 | if not isinstance(axes, Axes): 106 | raise TypeError( 107 | 'The axes argument must be an instance of ' 108 | 'matplotlib.axes.Axes!') 109 | 110 | # The I3EnergyPDF object has two axes, one for log10_energy and sin_dec. 111 | (xbinning, ybinning) = self._pdf.binnings 112 | 113 | pdf_values = np.zeros((xbinning.nbins, ybinning.nbins), dtype=np.float64) 114 | events = DataFieldRecordArray(np.zeros( 115 | (pdf_values.size,), 116 | dtype=[('ix', np.int64), (xbinning.name, np.float64), 117 | ('iy', np.int64), (ybinning.name, np.float64)])) 118 | for (i, ((ix, x), (iy, y))) in enumerate(itertools.product( 119 | enumerate(xbinning.bincenters), 120 | enumerate(ybinning.bincenters))): 121 | events['ix'][i] = ix 122 | events[xbinning.name][i] = x 123 | events['iy'][i] = iy 124 | events[ybinning.name][i] = y 125 | 126 | self._tdm.initialize_for_new_trial(src_hypo_group_manager, events) 127 | 128 | event_pdf_values = self._pdf.get_prob(self._tdm) 129 | pdf_values[events['ix'], events['iy']] = event_pdf_values 130 | 131 | (left, right, bottom, top) = (xbinning.lower_edge, xbinning.upper_edge, 132 | ybinning.lower_edge, ybinning.upper_edge) 133 | img = axes.imshow( 134 | pdf_values.T, 135 | extent=(left, right, bottom, top), 136 | origin='lower', 137 | norm=LogNorm(), 138 | interpolation='none', 139 | **kwargs) 140 | axes.set_xlabel(xbinning.name) 141 | axes.set_ylabel(ybinning.name) 142 | axes.set_title(classname(self._pdf)) 143 | 144 | return img 145 | -------------------------------------------------------------------------------- /skyllh/plotting/i3/pdfratio.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Plotting module to plot IceCube specific PDF ratio objects. 4 | """ 5 | 6 | import numpy as np 7 | import itertools 8 | 9 | from matplotlib.axes import Axes 10 | from matplotlib.colors import LogNorm 11 | 12 | from skyllh.core.py import classname 13 | from skyllh.core.source_hypo_grouping import ( 14 | SourceHypoGroupManager, 15 | ) 16 | from skyllh.core.storage import DataFieldRecordArray 17 | from skyllh.core.trialdata import TrialDataManager 18 | from skyllh.i3.pdfratio import SplinedI3EnergySigSetOverBkgPDFRatio 19 | 20 | 21 | class SplinedI3EnergySigSetOverBkgPDFRatioPlotter(object): 22 | """Plotter class to plot an I3EnergySigSetOverBkgPDFRatioSpline object. 23 | """ 24 | def __init__(self, tdm, pdfratio): 25 | """Creates a new plotter object for plotting an 26 | I3EnergySigSetOverBkgPDFRatioSpline object. 27 | 28 | Parameters 29 | ---------- 30 | tdm : instance of TrialDataManager 31 | The instance of TrialDataManager that provides the data for the 32 | PDF ratio evaluation. 33 | pdfratio : I3EnergySigSetOverBkgPDFRatioSpline 34 | The PDF ratio object to plot. 35 | """ 36 | self.tdm = tdm 37 | self.pdfratio = pdfratio 38 | 39 | @property 40 | def pdfratio(self): 41 | """The PDF ratio object to plot. 42 | """ 43 | return self._pdfratio 44 | 45 | @pdfratio.setter 46 | def pdfratio(self, pdfratio): 47 | if not isinstance(pdfratio, SplinedI3EnergySigSetOverBkgPDFRatio): 48 | raise TypeError( 49 | 'The pdfratio property must be an instance of ' 50 | 'SplinedI3EnergySigSetOverBkgPDFRatio!') 51 | self._pdfratio = pdfratio 52 | 53 | @property 54 | def tdm(self): 55 | """The TrialDataManager that provides the data for the PDF evaluation. 56 | """ 57 | return self._tdm 58 | 59 | @tdm.setter 60 | def tdm(self, obj): 61 | if not isinstance(obj, TrialDataManager): 62 | raise TypeError( 63 | 'The tdm property must be an instance of TrialDataManager!') 64 | self._tdm = obj 65 | 66 | def plot(self, src_hypo_group_manager, axes, fitparams, **kwargs): 67 | """Plots the PDF ratio for the given set of fit paramater values. 68 | 69 | Parameters 70 | ---------- 71 | src_hypo_group_manager : instance of SourceHypoGroupManager 72 | The instance of SourceHypoGroupManager that defines the source 73 | hypotheses. 74 | axes : mpl.axes.Axes 75 | The matplotlib Axes object on which the PDF ratio should get drawn 76 | to. 77 | fitparams : dict 78 | The dictionary with the set of fit paramater values. 79 | 80 | Additional Keyword Arguments 81 | ---------------------------- 82 | Any additional keyword arguments will be passed to the `mpl.imshow` 83 | function. 84 | 85 | Returns 86 | ------- 87 | img : instance of mpl.AxesImage 88 | The AxesImage instance showing the PDF ratio image. 89 | """ 90 | if not isinstance(src_hypo_group_manager, SourceHypoGroupManager): 91 | raise TypeError( 92 | 'The src_hypo_group_manager argument must be an ' 93 | 'instance of SourceHypoGroupManager!') 94 | if not isinstance(axes, Axes): 95 | raise TypeError( 96 | 'The axes argument must be an instance of ' 97 | 'matplotlib.axes.Axes!') 98 | if not isinstance(fitparams, dict): 99 | raise TypeError( 100 | 'The fitparams argument must be an instance of dict!') 101 | 102 | # Get the binning for the axes. We use the background PDF to get it 103 | # from. By construction, all PDFs use the same binning. We know that 104 | # the PDFs are 2-dimensional. 105 | (xbinning, ybinning) = self._pdfratio.backgroundpdf.binnings 106 | 107 | # Create a 2D array with the ratio values. We put one event into each 108 | # bin. 109 | ratios = np.zeros((xbinning.nbins, ybinning.nbins), dtype=np.float64) 110 | events = DataFieldRecordArray(np.zeros( 111 | (ratios.size,), 112 | dtype=[('ix', np.int64), (xbinning.name, np.float64), 113 | ('iy', np.int64), (ybinning.name, np.float64)])) 114 | for (i, ((ix, x), (iy, y))) in enumerate(itertools.product( 115 | enumerate(xbinning.bincenters), 116 | enumerate(ybinning.bincenters))): 117 | events['ix'][i] = ix 118 | events[xbinning.name][i] = x 119 | events['iy'][i] = iy 120 | events[ybinning.name][i] = y 121 | 122 | self._tdm.initialize_for_new_trial(src_hypo_group_manager, events) 123 | 124 | event_ratios = self.pdfratio.get_ratio(self._tdm, fitparams) 125 | for i in range(len(events)): 126 | ratios[events['ix'][i], events['iy'][i]] = event_ratios[i] 127 | 128 | (left, right, bottom, top) = (xbinning.lower_edge, xbinning.upper_edge, 129 | ybinning.lower_edge, ybinning.upper_edge) 130 | img = axes.imshow( 131 | ratios.T, 132 | extent=(left, right, bottom, top), 133 | origin='lower', 134 | norm=LogNorm(), 135 | interpolation='none', 136 | **kwargs) 137 | axes.set_xlabel(xbinning.name) 138 | axes.set_ylabel(ybinning.name) 139 | axes.set_title(classname(self._pdfratio)) 140 | 141 | return img 142 | -------------------------------------------------------------------------------- /skyllh/plotting/utils/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/plotting/utils/__init__.py -------------------------------------------------------------------------------- /skyllh/scripting/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/skyllh/scripting/__init__.py -------------------------------------------------------------------------------- /skyllh/scripting/argparser.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """This module contains utility functions for the argument parser of an analysis 4 | script. 5 | """ 6 | 7 | import argparse 8 | 9 | 10 | def create_argparser( 11 | description=None, 12 | options=True): 13 | """Creates an argparser with the given description and adds common options 14 | useful for analysis scripts. 15 | 16 | Parameters 17 | ---------- 18 | description : str | None 19 | The description for the argparser. 20 | options : bool | dict | None 21 | If set to None or False, no options will be added. 22 | If set to True, all common analysis script options will be added. 23 | If set to a dictionary, individual options can be turned on and off. 24 | See the :func:`add_argparser_options` for possible options. 25 | Default is ``True``. 26 | """ 27 | parser = argparse.ArgumentParser( 28 | description=description, 29 | formatter_class=argparse.RawTextHelpFormatter 30 | ) 31 | 32 | if options is True: 33 | options = dict() 34 | 35 | if isinstance(options, dict): 36 | add_argparser_options( 37 | parser=parser, 38 | **options) 39 | 40 | return parser 41 | 42 | 43 | def add_argparser_options( 44 | parser, 45 | config=True, 46 | data_basepath=True, 47 | debug_logfile=True, 48 | enable_tracing=True, 49 | n_cpu=True, 50 | seed=True): 51 | """Adds common argparser options to the given argparser instance, useful for 52 | analysis scripts. 53 | 54 | Parameters 55 | ---------- 56 | parser : instance of ArgumentParser 57 | The instance of ArgumentParser to which options should get added. 58 | config : bool 59 | If set to ``True``, the ``--config`` option of type ``str`` 60 | will be added. 61 | It specifies the configuration file. 62 | The default value is ``None``. 63 | The option destination is ``config``. 64 | data_basepath : bool 65 | If set to ``True``, the ``--data-basepath`` option of type ``str`` 66 | will be added. 67 | It specifies the base path to the data samples. 68 | The default value is ``None``. 69 | The option destination is ``data_basepath``. 70 | debug_logfile : bool 71 | If set to ``True``, the ``--debug-logfile`` option of type ``str`` 72 | will be added. 73 | If not ``None``, it specifies the log file for dubug information. 74 | The default value is ``None``. 75 | The option destination is ``debug_logfile``. 76 | enable_tracing : bool 77 | If set to ``True``, the ``--enable-tracing`` option of type ``bool`` 78 | will be added. 79 | If specified, enables the logging on the tracing level, i.e. a lot of 80 | DEBUG messages. 81 | The default value is ``False``. 82 | The option destination is ``enable_tracing``. 83 | n_cpu : bool 84 | If set to ``True``, the ``--n-cpu`` option of type ``int`` 85 | will be added. 86 | It specifies the number of CPUs to utilize where parallelization is 87 | possible. 88 | The default value is ``1``. 89 | The option destination is ``n_cpu``. 90 | seed : bool 91 | If set to ``True``, the ``--seed`` option of type ``int`` 92 | will be added. 93 | It specifies the seed for the random number generator. 94 | The default value is ``0``. 95 | The option destination is ``seed``. 96 | """ 97 | if config: 98 | parser.add_argument( 99 | '--config', 100 | dest='config', 101 | default=None, 102 | type=str, 103 | help='The configuration file. ' 104 | '(default=None)') 105 | 106 | if data_basepath: 107 | parser.add_argument( 108 | '--data-basepath', 109 | dest='data_basepath', 110 | default=None, 111 | type=str, 112 | help='The base path to the data samples. ' 113 | '(default=None)') 114 | 115 | if debug_logfile: 116 | parser.add_argument( 117 | '--debug-logfile', 118 | dest='debug_logfile', 119 | default=None, 120 | type=str, 121 | help='If not None, it specifies the log file for dubug ' 122 | 'information. ' 123 | '(default=None)') 124 | 125 | if enable_tracing: 126 | parser.add_argument( 127 | '--enable-tracing', 128 | dest='enable_tracing', 129 | default=False, 130 | action='store_true', 131 | help='If specified, enables the logging on the tracing level, i.e. ' 132 | 'a lot of DEBUG messages. ' 133 | '(default=False)') 134 | 135 | if n_cpu: 136 | parser.add_argument( 137 | '--n-cpu', 138 | dest='n_cpu', 139 | default=1, 140 | type=int, 141 | help='The number of CPUs to utilize where parallelization is ' 142 | 'possible. ' 143 | '(default=1)') 144 | 145 | if seed: 146 | parser.add_argument( 147 | '--seed', 148 | dest='seed', 149 | default=0, 150 | type=int, 151 | help='The seed for the random number generator. ' 152 | '(default=0)') 153 | -------------------------------------------------------------------------------- /skyllh/scripting/logging.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """This module contains utility functions for logging functionalities of an 4 | analysis script. 5 | """ 6 | 7 | import logging 8 | 9 | from skyllh.core.debugging import ( 10 | get_logger, 11 | setup_logger, 12 | setup_console_handler, 13 | setup_file_handler, 14 | ) 15 | 16 | 17 | def setup_logging( 18 | cfg, 19 | script_logger_name, 20 | log_format=None, 21 | log_level=logging.INFO, 22 | debug_pathfilename=None): 23 | """Initializes loggers and installs console handlers for the ``skyllh`` and 24 | ``script_logger_name`` loggers. If a debug file is specified, file handlers 25 | for debug messages will be installed as well. 26 | 27 | Parameters 28 | ---------- 29 | cfg : instance of Config 30 | The instance of Config holding the local configuration. 31 | script_logger_name : str 32 | The name of the logger used by the script. 33 | log_format : str | None 34 | The format template of the log message. If set to ``None``, the format 35 | will be taken from ``cfg['debugging']['log_format']``. 36 | log_level : int 37 | The log level of the loggers. The default is ``logging.INFO``. 38 | debug_pathfilename : str | None 39 | If not ``None``, file handlers for DEBUG messages will be installed and 40 | those messages will be stored in the given file. 41 | 42 | Returns 43 | ------- 44 | script_logger : instance of logging.Logger 45 | The logger instance of the script, specified by ``script_logger_name``. 46 | """ 47 | if log_format is None: 48 | log_format = cfg['debugging']['log_format'] 49 | 50 | setup_logger('skyllh', log_level) 51 | setup_logger(script_logger_name, log_level) 52 | 53 | setup_console_handler( 54 | cfg=cfg, 55 | name='skyllh', 56 | log_level=log_level, 57 | log_format=log_format 58 | ) 59 | 60 | setup_console_handler( 61 | cfg=cfg, 62 | name=script_logger_name, 63 | log_level=log_level, 64 | log_format=log_format 65 | ) 66 | 67 | if debug_pathfilename is not None: 68 | setup_file_handler( 69 | cfg=cfg, 70 | name='skyllh', 71 | filename=debug_pathfilename, 72 | log_format=log_format, 73 | log_level=logging.DEBUG 74 | ) 75 | setup_file_handler( 76 | cfg=cfg, 77 | name=script_logger_name, 78 | filename=debug_pathfilename, 79 | log_format=log_format, 80 | log_level=logging.DEBUG 81 | ) 82 | 83 | script_logger = get_logger(script_logger_name) 84 | 85 | return script_logger 86 | -------------------------------------------------------------------------------- /tests/core/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/tests/core/__init__.py -------------------------------------------------------------------------------- /tests/core/test_datafields.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import unittest 4 | 5 | from skyllh.core.datafields import ( 6 | DataFieldStages as DFS, 7 | DataFields, 8 | ) 9 | 10 | 11 | class DataFieldStagesTestCase( 12 | unittest.TestCase 13 | ): 14 | def setUp(self) -> None: 15 | self.stage_and_check = ( 16 | DFS.DATAPREPARATION_EXP | 17 | DFS.DATAPREPARATION_MC | 18 | DFS.ANALYSIS_MC 19 | ) 20 | self.stage_or_check = ( 21 | DFS.DATAPREPARATION_MC | 22 | DFS.ANALYSIS_MC 23 | ) 24 | 25 | def test_and_check__bitwise_ored(self): 26 | check = DFS.and_check( 27 | stage=self.stage_and_check, 28 | stages=( 29 | DFS.DATAPREPARATION_EXP | 30 | DFS.DATAPREPARATION_MC | 31 | DFS.ANALYSIS_MC 32 | ) 33 | ) 34 | self.assertTrue(check) 35 | 36 | check = DFS.and_check( 37 | stage=self.stage_and_check, 38 | stages=( 39 | DFS.DATAPREPARATION_EXP | 40 | DFS.ANALYSIS_EXP | 41 | DFS.ANALYSIS_MC 42 | ) 43 | ) 44 | self.assertFalse(check) 45 | 46 | def test_and_check__sequence_int(self): 47 | check = DFS.and_check( 48 | stage=self.stage_and_check, 49 | stages=( 50 | DFS.DATAPREPARATION_EXP, 51 | DFS.DATAPREPARATION_MC, 52 | DFS.ANALYSIS_MC, 53 | ) 54 | ) 55 | self.assertTrue(check) 56 | 57 | check = DFS.and_check( 58 | stage=self.stage_and_check, 59 | stages=( 60 | DFS.DATAPREPARATION_EXP, 61 | DFS.ANALYSIS_EXP, 62 | DFS.ANALYSIS_MC, 63 | ) 64 | ) 65 | self.assertFalse(check) 66 | 67 | def test_and_check__mixture_bitwise_ored_sequence_int(self): 68 | check = DFS.and_check( 69 | stage=self.stage_and_check, 70 | stages=( 71 | DFS.DATAPREPARATION_EXP | DFS.DATAPREPARATION_MC, 72 | DFS.ANALYSIS_MC, 73 | ) 74 | ) 75 | self.assertTrue(check) 76 | 77 | def test_or_check__bitwise_ored(self): 78 | check = DFS.or_check( 79 | stage=self.stage_or_check, 80 | stages=( 81 | DFS.DATAPREPARATION_EXP | 82 | DFS.DATAPREPARATION_MC | 83 | DFS.ANALYSIS_MC 84 | ) 85 | ) 86 | self.assertTrue(check) 87 | 88 | check = DFS.or_check( 89 | stage=self.stage_or_check, 90 | stages=( 91 | DFS.ANALYSIS_EXP 92 | ) 93 | ) 94 | self.assertFalse(check) 95 | 96 | def test_or_check__sequence_int(self): 97 | check = DFS.or_check( 98 | stage=self.stage_or_check, 99 | stages=( 100 | DFS.DATAPREPARATION_EXP, 101 | DFS.DATAPREPARATION_MC, 102 | DFS.ANALYSIS_MC 103 | ) 104 | ) 105 | self.assertTrue(check) 106 | 107 | check = DFS.or_check( 108 | stage=self.stage_or_check, 109 | stages=( 110 | DFS.ANALYSIS_EXP, 111 | ) 112 | ) 113 | self.assertFalse(check) 114 | 115 | def test_or_check__mixture_bitwise_ored_sequence_int(self): 116 | check = DFS.or_check( 117 | stage=self.stage_or_check, 118 | stages=( 119 | DFS.DATAPREPARATION_EXP | DFS.ANALYSIS_EXP, 120 | DFS.ANALYSIS_MC, 121 | ) 122 | ) 123 | self.assertTrue(check) 124 | 125 | 126 | class DataFieldsTestCase( 127 | unittest.TestCase 128 | ): 129 | def setUp(self) -> None: 130 | self.datafields = { 131 | 'f0': DFS.DATAPREPARATION_EXP, 132 | 'f1': DFS.DATAPREPARATION_EXP | DFS.DATAPREPARATION_MC, 133 | } 134 | 135 | def test_get_joint_names(self): 136 | fieldnames = DataFields.get_joint_names( 137 | datafields=self.datafields, 138 | stages=DFS.DATAPREPARATION_EXP) 139 | self.assertEqual(fieldnames, ['f0', 'f1']) 140 | 141 | fieldnames = DataFields.get_joint_names( 142 | datafields=self.datafields, 143 | stages=DFS.DATAPREPARATION_MC) 144 | self.assertEqual(fieldnames, ['f1']) 145 | 146 | fieldnames = DataFields.get_joint_names( 147 | datafields=self.datafields, 148 | stages=DFS.ANALYSIS_EXP) 149 | self.assertEqual(fieldnames, []) 150 | 151 | 152 | if __name__ == '__main__': 153 | unittest.main() 154 | -------------------------------------------------------------------------------- /tests/core/test_model.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Martin Wolf 3 | 4 | """This test module tests classes, methods and functions of the ``core.model`` 5 | module. 6 | """ 7 | 8 | import unittest 9 | 10 | from skyllh.core.model import ( 11 | Model, 12 | ModelCollection 13 | ) 14 | 15 | 16 | class Model_TestCase(unittest.TestCase): 17 | def setUp(self): 18 | self.model = Model('Model1') 19 | 20 | def test_name(self): 21 | self.assertEqual(self.model.name, 'Model1') 22 | 23 | def test_id(self): 24 | self.assertTrue(isinstance(self.model.id, int)) 25 | 26 | 27 | class ModelCollection_TestCase(unittest.TestCase): 28 | def setUp(self): 29 | self.model1 = Model('Model1') 30 | self.model2 = Model('Model2') 31 | self.modelcoll = ModelCollection((self.model1, self.model2)) 32 | 33 | def test_cast(self): 34 | # Test cast function for None type. 35 | modelcoll = ModelCollection.cast(None) 36 | self.assertTrue(issubclass(modelcoll.model_type, Model)) 37 | self.assertEqual(len(modelcoll.models), 0) 38 | 39 | # Test cast function for single Model instance. 40 | modelcoll = ModelCollection.cast(self.model1) 41 | self.assertTrue(issubclass(modelcoll.model_type, Model)) 42 | self.assertEqual(len(modelcoll.models), 1) 43 | self.assertEqual(modelcoll.models[0], self.model1) 44 | 45 | # Test cast function for ModelCollection. 46 | modelcoll = ModelCollection.cast(self.modelcoll) 47 | self.assertEqual(modelcoll, self.modelcoll) 48 | 49 | # Test sequence of Model instances. 50 | modelcoll = ModelCollection.cast((self.model1, self.model2)) 51 | self.assertTrue(issubclass(modelcoll.model_type, Model)) 52 | self.assertEqual(len(modelcoll.models), 2) 53 | self.assertEqual(modelcoll.models[0], self.model1) 54 | self.assertEqual(modelcoll.models[1], self.model2) 55 | 56 | # Test that non-Model instances raises a TypeError. 57 | with self.assertRaises(TypeError): 58 | modelcoll = ModelCollection.cast('A str instance.') 59 | with self.assertRaises(TypeError): 60 | modelcoll = ModelCollection.cast(('str1', 'str2')) 61 | 62 | def test_model_type(self): 63 | self.assertTrue(issubclass(self.modelcoll.model_type, Model)) 64 | 65 | def test_models(self): 66 | self.assertEqual(len(self.modelcoll.models), 2) 67 | self.assertEqual(self.modelcoll.models[0], self.model1) 68 | self.assertEqual(self.modelcoll.models[1], self.model2) 69 | 70 | 71 | if __name__ == '__main__': 72 | unittest.main() 73 | -------------------------------------------------------------------------------- /tests/core/test_py.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # Author: Dr. Martin Wolf 3 | 4 | import unittest 5 | 6 | from skyllh.core.py import ( 7 | ConstPyQualifier, 8 | NamedObjectCollection, 9 | const, 10 | issequenceof 11 | ) 12 | 13 | 14 | class A(object): 15 | def __init__(self, name=None): 16 | super(A, self).__init__() 17 | 18 | self._name = name 19 | 20 | @property 21 | def name(self): 22 | return self._name 23 | 24 | 25 | class B(object): 26 | def __init__(self, name=None): 27 | super(B, self).__init__() 28 | 29 | self.name = name 30 | 31 | 32 | class ConstPyQualifier_TestCase(unittest.TestCase): 33 | def test_call(self): 34 | a = const(A()) 35 | self.assertTrue(hasattr(a, '__pyqualifiers__')) 36 | self.assertTrue(ConstPyQualifier in a.__pyqualifiers__) 37 | 38 | def test_check(self): 39 | a = const(A()) 40 | self.assertTrue(const.check(a)) 41 | 42 | 43 | class issequenceof_TestCase(unittest.TestCase): 44 | def test_type(self): 45 | seq = [A('a1'), A('a2')] 46 | self.assertTrue(issequenceof(seq, A)) 47 | 48 | seq = [A('a1'), B('b1')] 49 | self.assertFalse(issequenceof(seq, A)) 50 | 51 | def test_pyqualifiers(self): 52 | """Tests if the issequenceof function works with PyQualifiers. 53 | """ 54 | seq = [const(A('a1')), const(A('a2'))] 55 | self.assertTrue(issequenceof(seq, A, const)) 56 | 57 | seq = [const(A('a1')), A('a2')] 58 | self.assertFalse(issequenceof(seq, A, const)) 59 | 60 | 61 | class NamedObjectCollection_TestCase( 62 | unittest.TestCase): 63 | def setUp(self): 64 | self.a1 = A('a1') 65 | self.a2 = A('a2') 66 | self.a3 = A('a3') 67 | self.noc = NamedObjectCollection([self.a1, self.a2, self.a3]) 68 | 69 | def test_name_list(self): 70 | self.assertEqual(self.noc.name_list, ['a1', 'a2', 'a3']) 71 | 72 | def test__contains__(self): 73 | self.assertTrue('a1' in self.noc) 74 | self.assertTrue('a2' in self.noc) 75 | self.assertTrue('a3' in self.noc) 76 | self.assertFalse('a4' in self.noc) 77 | 78 | def test__getitem__(self): 79 | self.assertTrue(self.noc['a1'] is self.a1) 80 | self.assertTrue(self.noc['a2'] is self.a2) 81 | self.assertTrue(self.noc['a3'] is self.a3) 82 | 83 | self.assertTrue(self.noc[0] is self.a1) 84 | self.assertTrue(self.noc[1] is self.a2) 85 | self.assertTrue(self.noc[2] is self.a3) 86 | 87 | def test_get_index_by_name(self): 88 | self.assertEqual(self.noc.get_index_by_name('a1'), 0) 89 | self.assertEqual(self.noc.get_index_by_name('a2'), 1) 90 | self.assertEqual(self.noc.get_index_by_name('a3'), 2) 91 | 92 | def test_add(self): 93 | a4 = A('a4') 94 | self.noc.add(a4) 95 | 96 | self.assertEqual(self.noc.name_list, ['a1', 'a2', 'a3', 'a4']) 97 | self.assertEqual(self.noc.get_index_by_name('a4'), 3) 98 | self.assertTrue(self.noc['a4'] is a4) 99 | 100 | def test_pop(self): 101 | obj = self.noc.pop() 102 | self.assertTrue(obj is self.a3) 103 | self.assertEqual(self.noc.name_list, ['a1', 'a2']) 104 | self.assertEqual(self.noc.get_index_by_name('a1'), 0) 105 | self.assertEqual(self.noc.get_index_by_name('a2'), 1) 106 | 107 | def test_pop_with_int(self): 108 | obj = self.noc.pop(1) 109 | self.assertTrue(obj is self.a2) 110 | self.assertEqual(self.noc.name_list, ['a1', 'a3']) 111 | self.assertEqual(self.noc.get_index_by_name('a1'), 0) 112 | self.assertEqual(self.noc.get_index_by_name('a3'), 1) 113 | 114 | def test_pop_with_str(self): 115 | obj = self.noc.pop('a2') 116 | self.assertTrue(obj is self.a2) 117 | self.assertEqual(self.noc.name_list, ['a1', 'a3']) 118 | self.assertEqual(self.noc.get_index_by_name('a1'), 0) 119 | self.assertEqual(self.noc.get_index_by_name('a3'), 1) 120 | 121 | 122 | if __name__ == '__main__': 123 | unittest.main() 124 | -------------------------------------------------------------------------------- /tests/core/test_random.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import unittest 4 | 5 | import numpy as np 6 | 7 | from skyllh.core.random import ( 8 | RandomChoice, 9 | RandomStateService, 10 | ) 11 | 12 | 13 | class RandomChoice_TestCase( 14 | unittest.TestCase, 15 | ): 16 | def setUp(self): 17 | self.size = 100 18 | self.items = np.arange(self.size) 19 | self.probs = np.random.uniform(low=0, high=1, size=self.size) 20 | self.probs /= np.sum(self.probs) 21 | 22 | def test_choice(self): 23 | rss = RandomStateService(seed=1) 24 | np_items = rss.random.choice( 25 | self.items, 26 | size=5, 27 | replace=True, 28 | p=self.probs) 29 | 30 | rss = RandomStateService(seed=1) 31 | random_choice = RandomChoice( 32 | items=self.items, 33 | probabilities=self.probs) 34 | rc_items = random_choice( 35 | rss=rss, 36 | size=5) 37 | 38 | np.testing.assert_equal(np_items, rc_items) 39 | 40 | 41 | if __name__ == '__main__': 42 | unittest.main() 43 | -------------------------------------------------------------------------------- /tests/core/test_signalpdf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """The unit tests in this module test classes of the skyllh.core.signalpdf 4 | module. 5 | """ 6 | 7 | import unittest 8 | 9 | import numpy as np 10 | 11 | from unittest.mock import ( 12 | Mock, 13 | ) 14 | 15 | from skyllh.core.config import ( 16 | Config, 17 | ) 18 | from skyllh.core.flux_model import ( 19 | BoxTimeFluxProfile, 20 | ) 21 | from skyllh.core.livetime import ( 22 | Livetime, 23 | ) 24 | from skyllh.core.parameters import ( 25 | ParameterModelMapper, 26 | ) 27 | from skyllh.core.signalpdf import ( 28 | SignalTimePDF, 29 | ) 30 | from skyllh.core.source_model import ( 31 | SourceModel, 32 | ) 33 | from skyllh.core.trialdata import ( 34 | TrialDataManager, 35 | ) 36 | 37 | 38 | def create_tdm(n_sources, n_selected_events): 39 | """Creates a Mock instance mimicing a TrialDataManager instance with a 40 | given number of sources and selected events. 41 | """ 42 | tdm = Mock(spec_set=[ 43 | '__class__', 44 | 'trial_data_state_id', 45 | 'get_n_values', 46 | 'src_evt_idxs', 47 | 'n_sources', 48 | 'n_selected_events', 49 | 'get_data']) 50 | 51 | def tdm_get_data(key): 52 | if n_selected_events == 3: 53 | return np.array([0, 5, 9.7]) 54 | raise ValueError( 55 | f'Value n_selected_events={n_selected_events} is not supported!') 56 | 57 | tdm.__class__ = TrialDataManager 58 | tdm.trial_data_state_id = 1 59 | tdm.get_n_values = lambda: n_sources*n_selected_events 60 | tdm.src_evt_idxs = ( 61 | np.repeat(np.arange(n_sources), n_selected_events), 62 | np.tile(np.arange(n_selected_events), n_sources) 63 | ) 64 | tdm.n_sources = n_sources 65 | tdm.n_selected_events = n_selected_events 66 | tdm.get_data = tdm_get_data 67 | 68 | return tdm 69 | 70 | 71 | class SignalTimePDFTestCase( 72 | unittest.TestCase, 73 | ): 74 | def setUp(self): 75 | self.cfg = Config() 76 | 77 | self.pmm = ParameterModelMapper( 78 | models=[ 79 | SourceModel(), 80 | SourceModel()]) 81 | 82 | self.livetime = Livetime(np.array([ 83 | [0, 1], 84 | [1.3, 4.6], 85 | [7.7, 10], 86 | ])) 87 | 88 | self.S = (1-0) + (4.6-1.3) + (10-7.7) 89 | 90 | self.time_flux_profile = BoxTimeFluxProfile( 91 | t0=5, 92 | tw=10, 93 | cfg=self.cfg) 94 | 95 | self.sig_time_pdf = SignalTimePDF( 96 | pmm=self.pmm, 97 | livetime=self.livetime, 98 | time_flux_profile=self.time_flux_profile, 99 | cfg=self.cfg) 100 | 101 | def test__str__(self): 102 | str(self.sig_time_pdf) 103 | 104 | def test__calculate_sum_of_ontime_time_flux_profile_integrals(self): 105 | S = self.sig_time_pdf._calculate_sum_of_ontime_time_flux_profile_integrals() 106 | self.assertEqual(S, self.S) 107 | 108 | def test_get_pd(self): 109 | tdm = create_tdm(n_sources=self.pmm.n_sources, n_selected_events=3) 110 | src_params_recarray = self.pmm.create_src_params_recarray(gflp_values=[]) 111 | 112 | (pd, grads) = self.sig_time_pdf.get_pd( 113 | tdm=tdm, 114 | params_recarray=src_params_recarray) 115 | 116 | np.testing.assert_almost_equal( 117 | pd, 118 | np.array([ 119 | 1/self.S, 120 | 0., 121 | 1/self.S, 122 | 1/self.S, 123 | 0., 124 | 1/self.S, 125 | ])) 126 | 127 | self.assertEqual(grads, {}) 128 | 129 | 130 | if __name__ == '__main__': 131 | unittest.main() 132 | -------------------------------------------------------------------------------- /tests/core/test_source_model.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | import unittest 4 | 5 | from skyllh.core.catalog import ( 6 | SourceCatalog, 7 | ) 8 | from skyllh.core.source_model import ( 9 | PointLikeSource, 10 | SourceModel, 11 | SourceModelCollection, 12 | ) 13 | 14 | 15 | class SourceModelTestCase( 16 | unittest.TestCase 17 | ): 18 | def setUp(self): 19 | self.name = 'MySource' 20 | self.classification = 'MyClassification' 21 | self.weight = 1.1 22 | 23 | self.source_model = SourceModel( 24 | name=self.name, 25 | classification=self.classification, 26 | weight=self.weight) 27 | 28 | def test_name(self): 29 | self.assertEqual(self.source_model.name, self.name) 30 | 31 | def test_classification(self): 32 | self.assertEqual(self.source_model.classification, self.classification) 33 | 34 | def test_weight(self): 35 | self.assertEqual(self.source_model.weight, self.weight) 36 | 37 | 38 | class SourceModelCollectionTestCase( 39 | unittest.TestCase, 40 | ): 41 | def setUp(self): 42 | self.ra = 0 43 | self.dec = 1 44 | 45 | def test_SourceModelCollection(self): 46 | source_model1 = SourceModel(self.ra, self.dec) 47 | source_model2 = SourceModel(self.ra, self.dec) 48 | 49 | source_collection_casted = SourceModelCollection.cast( 50 | source_model1, 51 | "Could not cast SourceModel to SourceCollection") 52 | source_collection = SourceModelCollection( 53 | source_type=SourceModel, 54 | sources=[source_model1, source_model2]) 55 | 56 | self.assertIsInstance(source_collection_casted, SourceModelCollection) 57 | self.assertEqual(source_collection.source_type, SourceModel) 58 | self.assertIsInstance(source_collection.sources[0], SourceModel) 59 | self.assertIsInstance(source_collection.sources[1], SourceModel) 60 | 61 | 62 | class SourceCatalogTestCase( 63 | unittest.TestCase, 64 | ): 65 | def setUp(self): 66 | self.name = 'MySourceCatalog' 67 | self.ra = 0.1 68 | self.dec = 1.1 69 | self.source1 = SourceModel(self.ra, self.dec) 70 | self.source2 = SourceModel(self.ra, self.dec) 71 | 72 | self.catalog = SourceCatalog( 73 | name=self.name, 74 | sources=[self.source1, self.source2], 75 | source_type=SourceModel) 76 | 77 | def test_name(self): 78 | self.assertEqual(self.catalog.name, self.name) 79 | 80 | def test_as_SourceModelCollection(self): 81 | sc = self.catalog.as_SourceModelCollection() 82 | self.assertIsInstance(sc, SourceModelCollection) 83 | 84 | 85 | class PointLikeSourceTestCase( 86 | unittest.TestCase, 87 | ): 88 | def setUp(self): 89 | self.name = 'MyPointLikeSource' 90 | self.ra = 0.1 91 | self.dec = 1.1 92 | self.source = PointLikeSource( 93 | name=self.name, 94 | ra=self.ra, 95 | dec=self.dec) 96 | 97 | def test_name(self): 98 | self.assertEqual(self.source.name, self.name) 99 | 100 | def test_ra(self): 101 | self.assertEqual(self.source.ra, self.ra) 102 | 103 | def test_dec(self): 104 | self.assertEqual(self.source.dec, self.dec) 105 | 106 | 107 | if __name__ == '__main__': 108 | unittest.main() 109 | -------------------------------------------------------------------------------- /tests/core/testdata/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/tests/core/testdata/__init__.py -------------------------------------------------------------------------------- /tests/core/testdata/exp_testdata.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/tests/core/testdata/exp_testdata.npy -------------------------------------------------------------------------------- /tests/core/testdata/livetime_testdata.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/tests/core/testdata/livetime_testdata.npy -------------------------------------------------------------------------------- /tests/core/testdata/mc_testdata.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/icecube/skyllh/cf1858657bdf745e8ace012ab87301cf32a74362/tests/core/testdata/mc_testdata.npy -------------------------------------------------------------------------------- /tests/core/testdata/testdata_generator.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | 3 | """Generate specific data files for tests. 4 | """ 5 | 6 | import numpy as np 7 | import numpy.lib.recfunctions as np_rfn 8 | 9 | 10 | def generate_testdata(): 11 | exp_testdata_dtype = np.dtype([ 12 | ('time', '