├── .gitignore ├── .travis.yml ├── README.rst ├── docs ├── Makefile ├── conf.py ├── fact.analysis.rst ├── fact.auxservices.rst ├── fact.coordinates.rst ├── fact.credentials.rst ├── fact.factdb.rst ├── fact.instrument.rst ├── fact.plotting.rst ├── fact.rst ├── figures │ └── pincushin_distortion_slope.png ├── index.rst ├── make.bat └── setup.rst ├── examples ├── export_ctapipe_camera_geometry.py ├── other_factcamera.py ├── path_utils.ipynb ├── plot_qla.py ├── pydim_examples │ ├── commands-client.py │ ├── commands-server.py │ ├── services-client.py │ └── services-server.py └── toy_events.py ├── fact ├── VERSION ├── __init__.py ├── analysis │ ├── __init__.py │ ├── binning.py │ ├── core.py │ ├── scripts │ │ ├── __init__.py │ │ ├── radec.py │ │ └── theta.py │ ├── source.py │ └── statistics.py ├── auxservices │ ├── __init__.py │ ├── base.py │ └── services.py ├── coordinates │ ├── __init__.py │ ├── camera_frame.py │ ├── representation.py │ └── utils.py ├── credentials │ ├── __init__.py │ └── credentials.encrypted ├── encrypt_credentials.py ├── factdb │ ├── __init__.py │ ├── database.py │ ├── models.py │ └── utils.py ├── instrument │ ├── __init__.py │ ├── camera.py │ ├── constants.py │ └── trigger.py ├── io.py ├── path.py ├── plotting │ ├── __init__.py │ ├── analysis.py │ ├── core.py │ └── utils.py ├── qla.py ├── resources │ ├── FACTmap111030.txt │ ├── known_problems_from_trac.txt │ └── pixel-map.csv └── time.py ├── github_deploy_key.enc ├── requirements.txt ├── resources ├── MasterList-v3.txt ├── PatchList.txt ├── Trigger-Patches.txt ├── bias-crate-measurement-201300504.txt ├── bias-positions.txt ├── gapd-offset-20140321.txt ├── sensor-pos.txt └── voltage-calibration.txt ├── setup.cfg ├── setup.py └── tests ├── resources ├── gammas.hdf5 └── proton_header_test.hdf5 ├── test_all.py ├── test_analysis.py ├── test_analysis_source.py ├── test_camera.py ├── test_coordinate_trafos.py ├── test_coordinates.py ├── test_io.py ├── test_path.py └── test_time.py /.gitignore: -------------------------------------------------------------------------------- 1 | github_deploy_key* 2 | *.swp 3 | .pytest_cache 4 | 5 | .pytest_cache 6 | # Byte-compiled / optimized / DLL files 7 | __pycache__/ 8 | *.py[cod] 9 | 10 | # C extensions 11 | *.so 12 | 13 | # Distribution / packaging 14 | .Python 15 | env/ 16 | build/ 17 | develop-eggs/ 18 | dist/ 19 | downloads/ 20 | eggs/ 21 | .eggs/ 22 | lib/ 23 | lib64/ 24 | parts/ 25 | sdist/ 26 | var/ 27 | *.egg-info/ 28 | .installed.cfg 29 | *.egg 30 | 31 | # PyInstaller 32 | # Usually these files are written by a python script from a template 33 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 34 | *.manifest 35 | *.spec 36 | 37 | # Installer logs 38 | pip-log.txt 39 | pip-delete-this-directory.txt 40 | 41 | # Unit test / coverage reports 42 | htmlcov/ 43 | .tox/ 44 | .coverage 45 | .cache 46 | nosetests.xml 47 | coverage.xml 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | 56 | # Sphinx documentation 57 | docs/_build/ 58 | 59 | # PyBuilder 60 | target/ 61 | 62 | # Jupyter Notebook 63 | .ipynb_checkpoints 64 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | 3 | matrix: 4 | include: 5 | - python: "3.6" 6 | - python: "3.7" 7 | dist: xenial 8 | 9 | addons: 10 | apt: 11 | packages: 12 | - libhdf5-serial-dev 13 | 14 | 15 | before_install: 16 | - export MPLBACKEND=agg 17 | - pip install --upgrade pip 18 | 19 | install: 20 | - pip install restructuredtext-lint sphinx~=1.8 pygments 21 | # make sure tables and h5py get linked vs the same hdf5 library 22 | - pip install --no-binary=h5py --no-binary=tables . 23 | 24 | script: 25 | - python setup.py test 26 | - rst-lint README.rst 27 | - python setup.py build_sphinx 28 | 29 | 30 | deploy: 31 | - provider: pypi 32 | skip_cleanup: true 33 | distributions: sdist 34 | user: fact-deploybot 35 | password: 36 | secure: "HjD8z63sITDhK4TwcnF/q1K+nF3nIslpoVUdJhSKlX/Hv8NaFajt0bM32KqIEADMAj510dcuHlZtcyDCX/9TTF4U9fAPkZSsoAa4yyD7BwcY2arwHDBgYSvGAHTqCzMwZvNcE+k0A7ZhfqjwwW+dtMM7Hx+vS+MfqhXy6ZrLbEg=" 37 | on: 38 | branch: master 39 | tags: true 40 | condition: $TRAVIS_PYTHON_VERSION = "3.6" 41 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | pyfact |TravisBuildStatus| |PyPIStatus| 2 | ======================================= 3 | 4 | A python package with utils to work with the FACT Imaging Cerenkov Telescope 5 | ---------------------------------------------------------------------------- 6 | 7 | install with 8 | 9 | .. code:: 10 | 11 | $ pip install pyfact 12 | 13 | This takes automatically care of the dependencies which are installable 14 | with pip. 15 | 16 | However, if you want to use the GUI Event Viewer you will need to 17 | install Tk **before** you install ``matplotlib`` as it depends on the 18 | tkagg backend. 19 | 20 | functions: 21 | ~~~~~~~~~~ 22 | 23 | ``fact`` includes several functions to convert the times used in fact 24 | data to more standard formats and vice versa. 25 | 26 | e.g. : 27 | 28 | .. code:: python 29 | 30 | from fact import run2dt 31 | 32 | # convert fact fNight format to python datetime object: 33 | date = run2dt("20150101") 34 | 35 | Submodules 36 | ---------- 37 | 38 | io 39 | ~~ 40 | 41 | To store pandas dataframes in column-oriented storage into hdf5 files, 42 | we created some helpfull wrappers around ``pandas`` and ``h5py``: 43 | 44 | .. code:: python 45 | 46 | from fact.io import read_h5py, to_h5py 47 | import pandas as pd 48 | 49 | df = pd.DataFrame({'a': [1, 2, 3], 'b': [4, 5, 6]}) 50 | to_h5py(df, 'test.hdf5', key='events') 51 | 52 | print(read_h5py('test.hdf5', key='events')) 53 | 54 | 55 | plotting 56 | ~~~~~~~~ 57 | 58 | Utils for plotting data into a FACT camera view. Based on matplotlib. 59 | 60 | .. code:: python 61 | 62 | import matplotlib.pyplot as plt 63 | from fact.plotting import camera 64 | from numpy.random import normal 65 | 66 | # create some pseudo data with shape (10, 1440): 67 | data = normal(30, 5, (10, 1440)) 68 | 69 | camera(data[0]) 70 | plt.show() 71 | 72 | 73 | There are also functions to get the camera\_geometry from the delivered 74 | source file: 75 | 76 | .. code:: python 77 | 78 | from fact.plotting import get_pixel_coords 79 | 80 | pixel_x, pixel_y = get_pixel_coords() 81 | 82 | factdb 83 | ------ 84 | 85 | This module contains ``peewee`` ``Models`` for our ``factdata`` MySQL database. 86 | These were automatically created by ``peewee`` and provide means to query this database in python without writing raw sql queries. 87 | 88 | For example, to get the total number of runs take by FACT you can do: 89 | 90 | .. code:: python 91 | 92 | from fact.factdb import connect_database, RunInfo 93 | 94 | connect_database() # this uses the credentials module if no config is given 95 | 96 | num_runs = RunInfo.select().count() 97 | 98 | A few convenience functions are already implemented. 99 | To get a ``pandas.DataFrame`` containing the observation time per source and runtype, you can do: 100 | 101 | 102 | .. code:: python 103 | 104 | from fact.factdb import connect_database, get_ontime_per_source_and_runtype 105 | 106 | connect_database() 107 | 108 | num_runs = RunInfo.select().count() 109 | print(get_ontime_by_source_and_runtype()) 110 | 111 | 112 | To download the database and read it to Pandas dataframe without using peewee: 113 | 114 | .. code:: python 115 | 116 | from fact import credentials 117 | import pandas as pd 118 | 119 | factDB = credentials.create_factdb_engine() 120 | runInfo = pd.read_sql_table(table_name="RunInfo", con=factDB) 121 | 122 | 123 | 124 | 125 | auxservices 126 | ----------- 127 | 128 | Utilities to read in our aux fits files into pandas dataframes. 129 | 130 | .. code:: python 131 | 132 | 133 | from fact.auxservices import MagicWeather 134 | from datetime import date 135 | 136 | weather = MagicWeather(auxdir='/fact/aux/') 137 | 138 | df = weather.read_date(date(2016, 1, 1)) 139 | 140 | .. |TravisBuildStatus| image:: https://travis-ci.org/fact-project/pyfact.svg?branch=master 141 | :target: https://travis-ci.org/fact-project/pyfact 142 | 143 | .. |PyPIStatus| image:: https://badge.fury.io/py/pyfact.svg 144 | :target: https://pypi.python.org/pypi/pyfact 145 | 146 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # Internal variables. 11 | PAPEROPT_a4 = -D latex_paper_size=a4 12 | PAPEROPT_letter = -D latex_paper_size=letter 13 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 14 | # the i18n builder cannot share the environment and doctrees with the others 15 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 16 | 17 | .PHONY: help 18 | help: 19 | @echo "Please use \`make ' where is one of" 20 | @echo " html to make standalone HTML files" 21 | @echo " dirhtml to make HTML files named index.html in directories" 22 | @echo " singlehtml to make a single large HTML file" 23 | @echo " pickle to make pickle files" 24 | @echo " json to make JSON files" 25 | @echo " htmlhelp to make HTML files and a HTML help project" 26 | @echo " qthelp to make HTML files and a qthelp project" 27 | @echo " applehelp to make an Apple Help Book" 28 | @echo " devhelp to make HTML files and a Devhelp project" 29 | @echo " epub to make an epub" 30 | @echo " epub3 to make an epub3" 31 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 32 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 33 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 34 | @echo " text to make text files" 35 | @echo " man to make manual pages" 36 | @echo " texinfo to make Texinfo files" 37 | @echo " info to make Texinfo files and run them through makeinfo" 38 | @echo " gettext to make PO message catalogs" 39 | @echo " changes to make an overview of all changed/added/deprecated items" 40 | @echo " xml to make Docutils-native XML files" 41 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 42 | @echo " linkcheck to check all external links for integrity" 43 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 44 | @echo " coverage to run coverage check of the documentation (if enabled)" 45 | @echo " dummy to check syntax errors of document sources" 46 | 47 | .PHONY: clean 48 | clean: 49 | rm -rf $(BUILDDIR)/* 50 | 51 | .PHONY: html 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | .PHONY: dirhtml 58 | dirhtml: 59 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 60 | @echo 61 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 62 | 63 | .PHONY: singlehtml 64 | singlehtml: 65 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 66 | @echo 67 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 68 | 69 | .PHONY: pickle 70 | pickle: 71 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 72 | @echo 73 | @echo "Build finished; now you can process the pickle files." 74 | 75 | .PHONY: json 76 | json: 77 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 78 | @echo 79 | @echo "Build finished; now you can process the JSON files." 80 | 81 | .PHONY: htmlhelp 82 | htmlhelp: 83 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 84 | @echo 85 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 86 | ".hhp project file in $(BUILDDIR)/htmlhelp." 87 | 88 | .PHONY: qthelp 89 | qthelp: 90 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 91 | @echo 92 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 93 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 94 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/pyfact.qhcp" 95 | @echo "To view the help file:" 96 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/pyfact.qhc" 97 | 98 | .PHONY: applehelp 99 | applehelp: 100 | $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp 101 | @echo 102 | @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." 103 | @echo "N.B. You won't be able to view it unless you put it in" \ 104 | "~/Library/Documentation/Help or install it in your application" \ 105 | "bundle." 106 | 107 | .PHONY: devhelp 108 | devhelp: 109 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 110 | @echo 111 | @echo "Build finished." 112 | @echo "To view the help file:" 113 | @echo "# mkdir -p $$HOME/.local/share/devhelp/pyfact" 114 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/pyfact" 115 | @echo "# devhelp" 116 | 117 | .PHONY: epub 118 | epub: 119 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 120 | @echo 121 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 122 | 123 | .PHONY: epub3 124 | epub3: 125 | $(SPHINXBUILD) -b epub3 $(ALLSPHINXOPTS) $(BUILDDIR)/epub3 126 | @echo 127 | @echo "Build finished. The epub3 file is in $(BUILDDIR)/epub3." 128 | 129 | .PHONY: latex 130 | latex: 131 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 132 | @echo 133 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 134 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 135 | "(use \`make latexpdf' here to do that automatically)." 136 | 137 | .PHONY: latexpdf 138 | latexpdf: 139 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 140 | @echo "Running LaTeX files through pdflatex..." 141 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 142 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 143 | 144 | .PHONY: latexpdfja 145 | latexpdfja: 146 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 147 | @echo "Running LaTeX files through platex and dvipdfmx..." 148 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 149 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 150 | 151 | .PHONY: text 152 | text: 153 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 154 | @echo 155 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 156 | 157 | .PHONY: man 158 | man: 159 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 160 | @echo 161 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 162 | 163 | .PHONY: texinfo 164 | texinfo: 165 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 166 | @echo 167 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 168 | @echo "Run \`make' in that directory to run these through makeinfo" \ 169 | "(use \`make info' here to do that automatically)." 170 | 171 | .PHONY: info 172 | info: 173 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 174 | @echo "Running Texinfo files through makeinfo..." 175 | make -C $(BUILDDIR)/texinfo info 176 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 177 | 178 | .PHONY: gettext 179 | gettext: 180 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 181 | @echo 182 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 183 | 184 | .PHONY: changes 185 | changes: 186 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 187 | @echo 188 | @echo "The overview file is in $(BUILDDIR)/changes." 189 | 190 | .PHONY: linkcheck 191 | linkcheck: 192 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 193 | @echo 194 | @echo "Link check complete; look for any errors in the above output " \ 195 | "or in $(BUILDDIR)/linkcheck/output.txt." 196 | 197 | .PHONY: doctest 198 | doctest: 199 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 200 | @echo "Testing of doctests in the sources finished, look at the " \ 201 | "results in $(BUILDDIR)/doctest/output.txt." 202 | 203 | .PHONY: coverage 204 | coverage: 205 | $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage 206 | @echo "Testing of coverage in the sources finished, look at the " \ 207 | "results in $(BUILDDIR)/coverage/python.txt." 208 | 209 | .PHONY: xml 210 | xml: 211 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 212 | @echo 213 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 214 | 215 | .PHONY: pseudoxml 216 | pseudoxml: 217 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 218 | @echo 219 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 220 | 221 | .PHONY: dummy 222 | dummy: 223 | $(SPHINXBUILD) -b dummy $(ALLSPHINXOPTS) $(BUILDDIR)/dummy 224 | @echo 225 | @echo "Build finished. Dummy builder generates no files." 226 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # pyfact documentation build configuration file, created by 5 | # sphinx-quickstart on Mon Apr 10 12:17:42 2017. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | # If extensions (or modules to document with autodoc) are in another directory, 17 | # add these directories to sys.path here. If the directory is relative to the 18 | # documentation root, use os.path.abspath to make it absolute, like shown here. 19 | # 20 | import os 21 | import sys 22 | sys.path.insert(0, os.path.abspath('../fact')) 23 | from fact import __version__ 24 | 25 | # -- General configuration ------------------------------------------------ 26 | 27 | # If your documentation needs a minimal Sphinx version, state it here. 28 | # 29 | # needs_sphinx = '1.0' 30 | 31 | # Add any Sphinx extension module names here, as strings. They can be 32 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 33 | # ones. 34 | extensions = [ 35 | 'sphinx.ext.imgmath', 36 | 'sphinx.ext.autodoc', 37 | 'sphinx.ext.todo', 38 | 'sphinx.ext.viewcode', 39 | 'sphinx.ext.napoleon', 40 | ] 41 | 42 | # Napoleon settings 43 | napoleon_google_docstring = False 44 | napoleon_numpy_docstring = True 45 | napoleon_include_init_with_doc = False 46 | napoleon_include_private_with_doc = False 47 | napoleon_include_special_with_doc = True 48 | napoleon_use_admonition_for_examples = False 49 | napoleon_use_admonition_for_notes = False 50 | napoleon_use_admonition_for_references = False 51 | napoleon_use_ivar = False 52 | napoleon_use_param = True 53 | napoleon_use_rtype = True 54 | 55 | # mathjax cdn 56 | mathjax_path = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js' 57 | 58 | # Add any paths that contain templates here, relative to this directory. 59 | templates_path = ['_templates'] 60 | 61 | # The suffix(es) of source filenames. 62 | # You can specify multiple suffix as a list of string: 63 | # 64 | # source_suffix = ['.rst', '.md'] 65 | source_suffix = '.rst' 66 | 67 | # The encoding of source files. 68 | # 69 | # source_encoding = 'utf-8-sig' 70 | 71 | # The master toctree document. 72 | master_doc = 'index' 73 | 74 | # General information about the project. 75 | project = 'pyfact' 76 | copyright = 'MIT License' 77 | author = 'FACT Collaboration' 78 | 79 | # The version info for the project you're documenting, acts as replacement for 80 | # |version| and |release|, also used in various other places throughout the 81 | # built documents. 82 | # 83 | # The short X.Y version. 84 | version = __version__ 85 | # The full version, including alpha/beta/rc tags. 86 | release = __version__ 87 | 88 | # The language for content autogenerated by Sphinx. Refer to documentation 89 | # for a list of supported languages. 90 | # 91 | # This is also used if you do content translation via gettext catalogs. 92 | # Usually you set "language" from the command line for these cases. 93 | language = 'en' 94 | 95 | # There are two options for replacing |today|: either, you set today to some 96 | # non-false value, then it is used: 97 | # 98 | # today = '' 99 | # 100 | # Else, today_fmt is used as the format for a strftime call. 101 | # 102 | # today_fmt = '%B %d, %Y' 103 | 104 | # List of patterns, relative to source directory, that match files and 105 | # directories to ignore when looking for source files. 106 | # This patterns also effect to html_static_path and html_extra_path 107 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] 108 | 109 | # The reST default role (used for this markup: `text`) to use for all 110 | # documents. 111 | # 112 | # default_role = None 113 | 114 | # If true, '()' will be appended to :func: etc. cross-reference text. 115 | # 116 | # add_function_parentheses = True 117 | 118 | # If true, the current module name will be prepended to all description 119 | # unit titles (such as .. function::). 120 | # 121 | # add_module_names = True 122 | 123 | # If true, sectionauthor and moduleauthor directives will be shown in the 124 | # output. They are ignored by default. 125 | # 126 | # show_authors = False 127 | 128 | # The name of the Pygments (syntax highlighting) style to use. 129 | pygments_style = 'sphinx' 130 | 131 | # A list of ignored prefixes for module index sorting. 132 | # modindex_common_prefix = [] 133 | 134 | # If true, keep warnings as "system message" paragraphs in the built documents. 135 | # keep_warnings = False 136 | 137 | # If true, `todo` and `todoList` produce output, else they produce nothing. 138 | todo_include_todos = True 139 | 140 | 141 | # -- Options for HTML output ---------------------------------------------- 142 | 143 | # The theme to use for HTML and HTML Help pages. See the documentation for 144 | # a list of builtin themes. 145 | # 146 | html_theme = 'alabaster' 147 | 148 | # Theme options are theme-specific and customize the look and feel of a theme 149 | # further. For a list of options available for each theme, see the 150 | # documentation. 151 | # 152 | # html_theme_options = {} 153 | 154 | # Add any paths that contain custom themes here, relative to this directory. 155 | # html_theme_path = [] 156 | 157 | # The name for this set of Sphinx documents. 158 | # " v documentation" by default. 159 | # 160 | # html_title = 'pyfact v' 161 | 162 | # A shorter title for the navigation bar. Default is the same as html_title. 163 | # 164 | # html_short_title = None 165 | 166 | # The name of an image file (relative to this directory) to place at the top 167 | # of the sidebar. 168 | # 169 | # html_logo = None 170 | 171 | # The name of an image file (relative to this directory) to use as a favicon of 172 | # the docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 173 | # pixels large. 174 | # 175 | # html_favicon = None 176 | 177 | # Add any paths that contain custom static files (such as style sheets) here, 178 | # relative to this directory. They are copied after the builtin static files, 179 | # so a file named "default.css" will overwrite the builtin "default.css". 180 | html_static_path = ['_static'] 181 | 182 | # Add any extra paths that contain custom files (such as robots.txt or 183 | # .htaccess) here, relative to this directory. These files are copied 184 | # directly to the root of the documentation. 185 | # 186 | # html_extra_path = [] 187 | 188 | # If not None, a 'Last updated on:' timestamp is inserted at every page 189 | # bottom, using the given strftime format. 190 | # The empty string is equivalent to '%b %d, %Y'. 191 | # 192 | # html_last_updated_fmt = None 193 | 194 | # If true, SmartyPants will be used to convert quotes and dashes to 195 | # typographically correct entities. 196 | # 197 | # html_use_smartypants = True 198 | 199 | # Custom sidebar templates, maps document names to template names. 200 | # 201 | # html_sidebars = {} 202 | 203 | # Additional templates that should be rendered to pages, maps page names to 204 | # template names. 205 | # 206 | # html_additional_pages = {} 207 | 208 | # If false, no module index is generated. 209 | # 210 | # html_domain_indices = True 211 | 212 | # If false, no index is generated. 213 | # 214 | # html_use_index = True 215 | 216 | # If true, the index is split into individual pages for each letter. 217 | # 218 | # html_split_index = False 219 | 220 | # If true, links to the reST sources are added to the pages. 221 | # 222 | # html_show_sourcelink = True 223 | 224 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 225 | # 226 | # html_show_sphinx = True 227 | 228 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 229 | # 230 | # html_show_copyright = True 231 | 232 | # If true, an OpenSearch description file will be output, and all pages will 233 | # contain a tag referring to it. The value of this option must be the 234 | # base URL from which the finished HTML is served. 235 | # 236 | # html_use_opensearch = '' 237 | 238 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 239 | # html_file_suffix = None 240 | 241 | # Language to be used for generating the HTML full-text search index. 242 | # Sphinx supports the following languages: 243 | # 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja' 244 | # 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr', 'zh' 245 | # 246 | # html_search_language = 'en' 247 | 248 | # A dictionary with options for the search language support, empty by default. 249 | # 'ja' uses this config value. 250 | # 'zh' user can custom change `jieba` dictionary path. 251 | # 252 | # html_search_options = {'type': 'default'} 253 | 254 | # The name of a javascript file (relative to the configuration directory) that 255 | # implements a search results scorer. If empty, the default will be used. 256 | # 257 | # html_search_scorer = 'scorer.js' 258 | 259 | # Output file base name for HTML help builder. 260 | htmlhelp_basename = 'pyfactdoc' 261 | 262 | # -- Options for LaTeX output --------------------------------------------- 263 | 264 | latex_elements = { 265 | # The paper size ('letterpaper' or 'a4paper'). 266 | # 267 | # 'papersize': 'letterpaper', 268 | 269 | # The font size ('10pt', '11pt' or '12pt'). 270 | # 271 | # 'pointsize': '10pt', 272 | 273 | # Additional stuff for the LaTeX preamble. 274 | # 275 | # 'preamble': '', 276 | 277 | # Latex figure (float) alignment 278 | # 279 | # 'figure_align': 'htbp', 280 | } 281 | 282 | # Grouping the document tree into LaTeX files. List of tuples 283 | # (source start file, target name, title, 284 | # author, documentclass [howto, manual, or own class]). 285 | latex_documents = [ 286 | (master_doc, 'pyfact.tex', 'pyfact Documentation', 287 | 'Author', 'manual'), 288 | ] 289 | 290 | # The name of an image file (relative to this directory) to place at the top of 291 | # the title page. 292 | # 293 | # latex_logo = None 294 | 295 | # For "manual" documents, if this is true, then toplevel headings are parts, 296 | # not chapters. 297 | # 298 | # latex_use_parts = False 299 | 300 | # If true, show page references after internal links. 301 | # 302 | # latex_show_pagerefs = False 303 | 304 | # If true, show URL addresses after external links. 305 | # 306 | # latex_show_urls = False 307 | 308 | # Documents to append as an appendix to all manuals. 309 | # 310 | # latex_appendices = [] 311 | 312 | # It false, will not define \strong, \code, itleref, \crossref ... but only 313 | # \sphinxstrong, ..., \sphinxtitleref, ... To help avoid clash with user added 314 | # packages. 315 | # 316 | # latex_keep_old_macro_names = True 317 | 318 | # If false, no module index is generated. 319 | # 320 | # latex_domain_indices = True 321 | 322 | 323 | # -- Options for manual page output --------------------------------------- 324 | 325 | # One entry per manual page. List of tuples 326 | # (source start file, name, description, authors, manual section). 327 | man_pages = [ 328 | (master_doc, 'pyfact', 'pyfact Documentation', 329 | [author], 1) 330 | ] 331 | 332 | # If true, show URL addresses after external links. 333 | # 334 | # man_show_urls = False 335 | 336 | 337 | # -- Options for Texinfo output ------------------------------------------- 338 | 339 | # Grouping the document tree into Texinfo files. List of tuples 340 | # (source start file, target name, title, author, 341 | # dir menu entry, description, category) 342 | texinfo_documents = [ 343 | (master_doc, 'pyfact', 'pyfact Documentation', 344 | author, 'pyfact', 'One line description of project.', 345 | 'Miscellaneous'), 346 | ] 347 | 348 | # Documents to append as an appendix to all manuals. 349 | # 350 | # texinfo_appendices = [] 351 | 352 | # If false, no module index is generated. 353 | # 354 | # texinfo_domain_indices = True 355 | 356 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 357 | # 358 | # texinfo_show_urls = 'footnote' 359 | 360 | # If true, do not generate a @detailmenu in the "Top" node's menu. 361 | # 362 | # texinfo_no_detailmenu = False 363 | 364 | 365 | # -- Options for Epub output ---------------------------------------------- 366 | 367 | # Bibliographic Dublin Core info. 368 | epub_title = project 369 | epub_author = author 370 | epub_publisher = author 371 | epub_copyright = copyright 372 | 373 | # The basename for the epub file. It defaults to the project name. 374 | # epub_basename = project 375 | 376 | # The HTML theme for the epub output. Since the default themes are not 377 | # optimized for small screen space, using the same theme for HTML and epub 378 | # output is usually not wise. This defaults to 'epub', a theme designed to save 379 | # visual space. 380 | # 381 | # epub_theme = 'epub' 382 | 383 | # The language of the text. It defaults to the language option 384 | # or 'en' if the language is not set. 385 | # 386 | # epub_language = '' 387 | 388 | # The scheme of the identifier. Typical schemes are ISBN or URL. 389 | # epub_scheme = '' 390 | 391 | # The unique identifier of the text. This can be a ISBN number 392 | # or the project homepage. 393 | # 394 | # epub_identifier = '' 395 | 396 | # A unique identification for the text. 397 | # 398 | # epub_uid = '' 399 | 400 | # A tuple containing the cover image and cover page html template filenames. 401 | # 402 | # epub_cover = () 403 | 404 | # A sequence of (type, uri, title) tuples for the guide element of content.opf. 405 | # 406 | # epub_guide = () 407 | 408 | # HTML files that should be inserted before the pages created by sphinx. 409 | # The format is a list of tuples containing the path and title. 410 | # 411 | # epub_pre_files = [] 412 | 413 | # HTML files that should be inserted after the pages created by sphinx. 414 | # The format is a list of tuples containing the path and title. 415 | # 416 | # epub_post_files = [] 417 | 418 | # A list of files that should not be packed into the epub file. 419 | epub_exclude_files = ['search.html'] 420 | 421 | # The depth of the table of contents in toc.ncx. 422 | # 423 | # epub_tocdepth = 3 424 | 425 | # Allow duplicate toc entries. 426 | # 427 | # epub_tocdup = True 428 | 429 | # Choose between 'default' and 'includehidden'. 430 | # 431 | # epub_tocscope = 'default' 432 | 433 | # Fix unsupported image types using the Pillow. 434 | # 435 | # epub_fix_images = False 436 | 437 | # Scale large images. 438 | # 439 | # epub_max_image_width = 0 440 | 441 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 442 | # 443 | # epub_show_urls = 'inline' 444 | 445 | # If false, no index is generated. 446 | # 447 | # epub_use_index = True 448 | -------------------------------------------------------------------------------- /docs/fact.analysis.rst: -------------------------------------------------------------------------------- 1 | fact\.analysis package 2 | ====================== 3 | 4 | .. automodule:: fact.analysis 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Submodules 10 | ---------- 11 | 12 | fact\.analysis\.source module 13 | ------------------------------ 14 | 15 | .. automodule:: fact.analysis.source 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | fact\.analysis\.binning module 21 | ------------------------------ 22 | 23 | .. automodule:: fact.analysis.binning 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | fact\.analysis\.core module 29 | --------------------------- 30 | 31 | .. automodule:: fact.analysis.core 32 | :members: 33 | :undoc-members: 34 | :show-inheritance: 35 | 36 | fact\.analysis\.statistics module 37 | --------------------------------- 38 | 39 | .. automodule:: fact.analysis.statistics 40 | :members: 41 | :undoc-members: 42 | :show-inheritance: 43 | 44 | 45 | -------------------------------------------------------------------------------- /docs/fact.auxservices.rst: -------------------------------------------------------------------------------- 1 | fact.auxservices package 2 | ======================== 3 | 4 | .. automodule:: fact.auxservices 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Submodules 10 | ---------- 11 | 12 | fact.auxservices.base module 13 | ---------------------------- 14 | 15 | .. automodule:: fact.auxservices.base 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | fact.auxservices.services module 21 | -------------------------------- 22 | 23 | .. automodule:: fact.auxservices.services 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | 29 | -------------------------------------------------------------------------------- /docs/fact.coordinates.rst: -------------------------------------------------------------------------------- 1 | fact.coordinates package 2 | ======================== 3 | 4 | Print the location of Crab in the camera for a given time 5 | 6 | .. code-block:: python 7 | 8 | >>> from astropy.coordinates import SkyCoord, AltAz 9 | >>> from fact.coordinates import CameraFrame 10 | >>> obstime = '2013-10-03 04:00' 11 | >>> crab = SkyCoord.from_name('Crab') 12 | >>> pointing = AltAz(alt='62d', az='97d') 13 | >>> camera_frame = CameraFrame(pointing_direction=p, obstime=obstime) 14 | >>> crab_camera = crab.transform_to(camera_frame) 15 | >>> print(crab_camera.x, crab_camera.y) 16 | -34.79480274879211 mm 11.27416763997078 mm 17 | 18 | 19 | .. automodule:: fact.coordinates 20 | :members: 21 | :undoc-members: 22 | :show-inheritance: 23 | 24 | Submodules 25 | ---------- 26 | 27 | fact.coordinates.camera_frame module 28 | ------------------------------------ 29 | 30 | .. automodule:: fact.coordinates.camera_frame 31 | :members: 32 | :undoc-members: 33 | :show-inheritance: 34 | 35 | fact.coordinates.utils module 36 | ----------------------------- 37 | 38 | .. automodule:: fact.coordinates.utils 39 | :members: 40 | :undoc-members: 41 | :show-inheritance: 42 | -------------------------------------------------------------------------------- /docs/fact.credentials.rst: -------------------------------------------------------------------------------- 1 | fact.credentials package 2 | ======================== 3 | 4 | .. automodule:: fact.credentials 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | -------------------------------------------------------------------------------- /docs/fact.factdb.rst: -------------------------------------------------------------------------------- 1 | fact.factdb package 2 | =================== 3 | 4 | .. automodule:: fact.factdb 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Submodules 10 | ---------- 11 | 12 | fact.factdb.database module 13 | --------------------------- 14 | 15 | .. automodule:: fact.factdb.database 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | fact.factdb.models module 21 | ------------------------- 22 | 23 | .. automodule:: fact.factdb.models 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | fact.factdb.utils module 29 | ------------------------ 30 | 31 | .. automodule:: fact.factdb.utils 32 | :members: 33 | :undoc-members: 34 | :show-inheritance: 35 | 36 | 37 | -------------------------------------------------------------------------------- /docs/fact.instrument.rst: -------------------------------------------------------------------------------- 1 | fact\.instrument package 2 | ======================== 3 | 4 | .. automodule:: fact.instrument 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Submodules 10 | ---------- 11 | 12 | fact\.instrument\.constants module 13 | ---------------------------------- 14 | 15 | .. automodule:: fact.instrument.constants 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | fact\.instrument\.camera module 21 | ------------------------------- 22 | 23 | .. automodule:: fact.instrument.camera 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | 29 | -------------------------------------------------------------------------------- /docs/fact.plotting.rst: -------------------------------------------------------------------------------- 1 | fact.plotting package 2 | ===================== 3 | 4 | .. automodule:: fact.plotting 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Submodules 10 | ---------- 11 | 12 | fact.plotting.analysis module 13 | ----------------------------- 14 | 15 | .. automodule:: fact.plotting.analysis 16 | :members: 17 | :undoc-members: 18 | :show-inheritance: 19 | 20 | fact.plotting.core module 21 | ------------------------- 22 | 23 | .. automodule:: fact.plotting.core 24 | :members: 25 | :undoc-members: 26 | :show-inheritance: 27 | 28 | fact.plotting.utils module 29 | -------------------------- 30 | 31 | .. automodule:: fact.plotting.utils 32 | :members: 33 | :undoc-members: 34 | :show-inheritance: 35 | -------------------------------------------------------------------------------- /docs/fact.rst: -------------------------------------------------------------------------------- 1 | fact package 2 | ============ 3 | 4 | .. automodule:: fact 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | Subpackages 10 | ----------- 11 | 12 | .. toctree:: 13 | 14 | fact.analysis 15 | fact.auxservices 16 | fact.coordinates 17 | fact.credentials 18 | fact.factdb 19 | fact.instrument 20 | fact.plotting 21 | fact.slowdata 22 | 23 | Submodules 24 | ---------- 25 | 26 | fact\.io module 27 | --------------- 28 | 29 | .. automodule:: fact.io 30 | :members: 31 | :undoc-members: 32 | :show-inheritance: 33 | 34 | fact\.encrypt\_credentials module 35 | --------------------------------- 36 | 37 | .. automodule:: fact.encrypt_credentials 38 | :members: 39 | :undoc-members: 40 | :show-inheritance: 41 | 42 | 43 | fact\.qla module 44 | ---------------- 45 | 46 | .. automodule:: fact.qla 47 | :members: 48 | :undoc-members: 49 | :show-inheritance: 50 | 51 | fact\.time module 52 | ----------------- 53 | 54 | .. automodule:: fact.time 55 | :members: 56 | :undoc-members: 57 | :show-inheritance: 58 | 59 | 60 | -------------------------------------------------------------------------------- /docs/figures/pincushin_distortion_slope.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fact-project/pyfact/2f204dbf4a90bfa84899e2b8d6aaed891d2b09bc/docs/figures/pincushin_distortion_slope.png -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. pyfact documentation master file, created by 2 | sphinx-quickstart on Mon Apr 10 12:17:42 2017. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to pyfact's documentation! 7 | ================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 4 13 | 14 | fact 15 | setup 16 | 17 | 18 | Indices and tables 19 | ================== 20 | 21 | * :ref:`genindex` 22 | * :ref:`modindex` 23 | * :ref:`search` 24 | 25 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | REM Command file for Sphinx documentation 4 | 5 | if "%SPHINXBUILD%" == "" ( 6 | set SPHINXBUILD=sphinx-build 7 | ) 8 | set BUILDDIR=_build 9 | set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . 10 | set I18NSPHINXOPTS=%SPHINXOPTS% . 11 | if NOT "%PAPER%" == "" ( 12 | set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% 13 | set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% 14 | ) 15 | 16 | if "%1" == "" goto help 17 | 18 | if "%1" == "help" ( 19 | :help 20 | echo.Please use `make ^` where ^ is one of 21 | echo. html to make standalone HTML files 22 | echo. dirhtml to make HTML files named index.html in directories 23 | echo. singlehtml to make a single large HTML file 24 | echo. pickle to make pickle files 25 | echo. json to make JSON files 26 | echo. htmlhelp to make HTML files and a HTML help project 27 | echo. qthelp to make HTML files and a qthelp project 28 | echo. devhelp to make HTML files and a Devhelp project 29 | echo. epub to make an epub 30 | echo. epub3 to make an epub3 31 | echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter 32 | echo. text to make text files 33 | echo. man to make manual pages 34 | echo. texinfo to make Texinfo files 35 | echo. gettext to make PO message catalogs 36 | echo. changes to make an overview over all changed/added/deprecated items 37 | echo. xml to make Docutils-native XML files 38 | echo. pseudoxml to make pseudoxml-XML files for display purposes 39 | echo. linkcheck to check all external links for integrity 40 | echo. doctest to run all doctests embedded in the documentation if enabled 41 | echo. coverage to run coverage check of the documentation if enabled 42 | echo. dummy to check syntax errors of document sources 43 | goto end 44 | ) 45 | 46 | if "%1" == "clean" ( 47 | for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i 48 | del /q /s %BUILDDIR%\* 49 | goto end 50 | ) 51 | 52 | 53 | REM Check if sphinx-build is available and fallback to Python version if any 54 | %SPHINXBUILD% 1>NUL 2>NUL 55 | if errorlevel 9009 goto sphinx_python 56 | goto sphinx_ok 57 | 58 | :sphinx_python 59 | 60 | set SPHINXBUILD=python -m sphinx.__init__ 61 | %SPHINXBUILD% 2> nul 62 | if errorlevel 9009 ( 63 | echo. 64 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 65 | echo.installed, then set the SPHINXBUILD environment variable to point 66 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 67 | echo.may add the Sphinx directory to PATH. 68 | echo. 69 | echo.If you don't have Sphinx installed, grab it from 70 | echo.http://sphinx-doc.org/ 71 | exit /b 1 72 | ) 73 | 74 | :sphinx_ok 75 | 76 | 77 | if "%1" == "html" ( 78 | %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html 79 | if errorlevel 1 exit /b 1 80 | echo. 81 | echo.Build finished. The HTML pages are in %BUILDDIR%/html. 82 | goto end 83 | ) 84 | 85 | if "%1" == "dirhtml" ( 86 | %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml 87 | if errorlevel 1 exit /b 1 88 | echo. 89 | echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. 90 | goto end 91 | ) 92 | 93 | if "%1" == "singlehtml" ( 94 | %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml 95 | if errorlevel 1 exit /b 1 96 | echo. 97 | echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. 98 | goto end 99 | ) 100 | 101 | if "%1" == "pickle" ( 102 | %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle 103 | if errorlevel 1 exit /b 1 104 | echo. 105 | echo.Build finished; now you can process the pickle files. 106 | goto end 107 | ) 108 | 109 | if "%1" == "json" ( 110 | %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json 111 | if errorlevel 1 exit /b 1 112 | echo. 113 | echo.Build finished; now you can process the JSON files. 114 | goto end 115 | ) 116 | 117 | if "%1" == "htmlhelp" ( 118 | %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp 119 | if errorlevel 1 exit /b 1 120 | echo. 121 | echo.Build finished; now you can run HTML Help Workshop with the ^ 122 | .hhp project file in %BUILDDIR%/htmlhelp. 123 | goto end 124 | ) 125 | 126 | if "%1" == "qthelp" ( 127 | %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp 128 | if errorlevel 1 exit /b 1 129 | echo. 130 | echo.Build finished; now you can run "qcollectiongenerator" with the ^ 131 | .qhcp project file in %BUILDDIR%/qthelp, like this: 132 | echo.^> qcollectiongenerator %BUILDDIR%\qthelp\pyfact.qhcp 133 | echo.To view the help file: 134 | echo.^> assistant -collectionFile %BUILDDIR%\qthelp\pyfact.ghc 135 | goto end 136 | ) 137 | 138 | if "%1" == "devhelp" ( 139 | %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp 140 | if errorlevel 1 exit /b 1 141 | echo. 142 | echo.Build finished. 143 | goto end 144 | ) 145 | 146 | if "%1" == "epub" ( 147 | %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub 148 | if errorlevel 1 exit /b 1 149 | echo. 150 | echo.Build finished. The epub file is in %BUILDDIR%/epub. 151 | goto end 152 | ) 153 | 154 | if "%1" == "epub3" ( 155 | %SPHINXBUILD% -b epub3 %ALLSPHINXOPTS% %BUILDDIR%/epub3 156 | if errorlevel 1 exit /b 1 157 | echo. 158 | echo.Build finished. The epub3 file is in %BUILDDIR%/epub3. 159 | goto end 160 | ) 161 | 162 | if "%1" == "latex" ( 163 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 164 | if errorlevel 1 exit /b 1 165 | echo. 166 | echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. 167 | goto end 168 | ) 169 | 170 | if "%1" == "latexpdf" ( 171 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 172 | cd %BUILDDIR%/latex 173 | make all-pdf 174 | cd %~dp0 175 | echo. 176 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 177 | goto end 178 | ) 179 | 180 | if "%1" == "latexpdfja" ( 181 | %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex 182 | cd %BUILDDIR%/latex 183 | make all-pdf-ja 184 | cd %~dp0 185 | echo. 186 | echo.Build finished; the PDF files are in %BUILDDIR%/latex. 187 | goto end 188 | ) 189 | 190 | if "%1" == "text" ( 191 | %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text 192 | if errorlevel 1 exit /b 1 193 | echo. 194 | echo.Build finished. The text files are in %BUILDDIR%/text. 195 | goto end 196 | ) 197 | 198 | if "%1" == "man" ( 199 | %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man 200 | if errorlevel 1 exit /b 1 201 | echo. 202 | echo.Build finished. The manual pages are in %BUILDDIR%/man. 203 | goto end 204 | ) 205 | 206 | if "%1" == "texinfo" ( 207 | %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo 208 | if errorlevel 1 exit /b 1 209 | echo. 210 | echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. 211 | goto end 212 | ) 213 | 214 | if "%1" == "gettext" ( 215 | %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale 216 | if errorlevel 1 exit /b 1 217 | echo. 218 | echo.Build finished. The message catalogs are in %BUILDDIR%/locale. 219 | goto end 220 | ) 221 | 222 | if "%1" == "changes" ( 223 | %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes 224 | if errorlevel 1 exit /b 1 225 | echo. 226 | echo.The overview file is in %BUILDDIR%/changes. 227 | goto end 228 | ) 229 | 230 | if "%1" == "linkcheck" ( 231 | %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck 232 | if errorlevel 1 exit /b 1 233 | echo. 234 | echo.Link check complete; look for any errors in the above output ^ 235 | or in %BUILDDIR%/linkcheck/output.txt. 236 | goto end 237 | ) 238 | 239 | if "%1" == "doctest" ( 240 | %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest 241 | if errorlevel 1 exit /b 1 242 | echo. 243 | echo.Testing of doctests in the sources finished, look at the ^ 244 | results in %BUILDDIR%/doctest/output.txt. 245 | goto end 246 | ) 247 | 248 | if "%1" == "coverage" ( 249 | %SPHINXBUILD% -b coverage %ALLSPHINXOPTS% %BUILDDIR%/coverage 250 | if errorlevel 1 exit /b 1 251 | echo. 252 | echo.Testing of coverage in the sources finished, look at the ^ 253 | results in %BUILDDIR%/coverage/python.txt. 254 | goto end 255 | ) 256 | 257 | if "%1" == "xml" ( 258 | %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml 259 | if errorlevel 1 exit /b 1 260 | echo. 261 | echo.Build finished. The XML files are in %BUILDDIR%/xml. 262 | goto end 263 | ) 264 | 265 | if "%1" == "pseudoxml" ( 266 | %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml 267 | if errorlevel 1 exit /b 1 268 | echo. 269 | echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. 270 | goto end 271 | ) 272 | 273 | if "%1" == "dummy" ( 274 | %SPHINXBUILD% -b dummy %ALLSPHINXOPTS% %BUILDDIR%/dummy 275 | if errorlevel 1 exit /b 1 276 | echo. 277 | echo.Build finished. Dummy builder generates no files. 278 | goto end 279 | ) 280 | 281 | :end 282 | -------------------------------------------------------------------------------- /docs/setup.rst: -------------------------------------------------------------------------------- 1 | setup module 2 | ============ 3 | 4 | .. automodule:: setup 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | -------------------------------------------------------------------------------- /examples/export_ctapipe_camera_geometry.py: -------------------------------------------------------------------------------- 1 | from astropy.table import Table 2 | from fact.instrument import get_pixel_coords 3 | import astropy.units as u 4 | import numpy as np 5 | 6 | x, y = get_pixel_coords() 7 | x = (x * u.mm).to(u.m) 8 | y = (y * u.mm).to(u.m) 9 | pix_id = np.arange(1440) 10 | 11 | radius = np.sqrt(np.diff(x)**2 + np.diff(y)**2).min() / 2 12 | pix_area = 1.5 * np.sqrt(3) * radius**2 13 | 14 | 15 | t = Table() 16 | 17 | t['pix_id'] = pix_id 18 | 19 | # convert from fact coords into hess/cta camera coordinate system 20 | t['pix_x'] = -y 21 | t['pix_y'] = -x 22 | 23 | t['pix_area'] = pix_area 24 | t.meta['TAB_TYPE'] = 'ctapipe.instrument.CameraGeometry' 25 | t.meta['PIX_TYPE'] = 'hexagonal' 26 | t.meta['CAM_ID'] = 'FACT' 27 | t.meta['PIX_ROT'] = 30.0 28 | t.meta['CAM_ROT'] = 0.0 29 | t.meta['SOURCE'] = 'pyfact' 30 | t.meta['TAB_VER'] = '1' 31 | 32 | 33 | t.write('FACT.camgeom.fits.gz', overwrite=True) 34 | -------------------------------------------------------------------------------- /examples/other_factcamera.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | import fact.plotting as factplot 3 | 4 | import numpy as np 5 | 6 | data = np.random.normal(loc=5, scale=1, size=1440) 7 | bad_pixels = [863, 868, 297, 927, 80, 873, 1093, 1094, 527, 528, 721, 722] 8 | 9 | f, axes = plt.subplots(2, 2) 10 | 11 | axes[0, 0].plot(data, ".") 12 | axes[0, 0].set_title("Data vs pixel Id") 13 | 14 | axes[0, 1].hist(data, bins=np.linspace(0, 10, 100)) 15 | axes[0, 1].set_title("distribution") 16 | 17 | factplot.camera(data, ax=axes[1, 0]) 18 | factplot.mark_pixel(bad_pixels, ax=axes[1, 0], linewidth=1) 19 | axes[1, 0].set_title("bad_pixels highlighted") 20 | 21 | factplot.camera(data, ax=axes[1, 1]) 22 | factplot.mark_pixel(data > 6, ax=axes[1, 1], linewidth=1) 23 | axes[1, 1].set_title("data > 6 highlighted") 24 | 25 | plt.suptitle("Maximize me and see me adjust the pixel size") 26 | 27 | plt.show() 28 | -------------------------------------------------------------------------------- /examples/path_utils.ipynb: -------------------------------------------------------------------------------- 1 | { 2 | "cells": [ 3 | { 4 | "cell_type": "markdown", 5 | "metadata": {}, 6 | "source": [ 7 | "## fact.path Examples\n", 8 | "\n", 9 | "# path deconstruction\n", 10 | "\n", 11 | "Sometimes one wants to iterate over a bunch of file paths and get the (night, run) integer tuple from the path. Often in order to retrieve information for each file from the RunInfo DB. \n", 12 | "\n", 13 | "Often the paths we get from something like:\n", 14 | "\n", 15 | " paths = glob('/fact/raw/*/*/*/*')\n", 16 | " \n", 17 | "Below I have defined a couple of example paths, which I want to deconstruct.\n", 18 | "Note that not all of the `paths_for_parsing` contain the typical \"yyyy/mm/dd\" part.\n", 19 | "Still the `night` and `run` are found just fine. " 20 | ] 21 | }, 22 | { 23 | "cell_type": "code", 24 | "execution_count": 1, 25 | "metadata": {}, 26 | "outputs": [ 27 | { 28 | "name": "stdout", 29 | "output_type": "stream", 30 | "text": [ 31 | "Help on function parse in module fact.path:\n", 32 | "\n", 33 | "parse(path)\n", 34 | " Return a dict with {prefix, suffix, night, run} parsed from path.\n", 35 | " \n", 36 | " path: string\n", 37 | " any (absolute) path should be fine.\n", 38 | "\n" 39 | ] 40 | } 41 | ], 42 | "source": [ 43 | "from fact.path import parse\n", 44 | "help(parse)" 45 | ] 46 | }, 47 | { 48 | "cell_type": "code", 49 | "execution_count": 2, 50 | "metadata": { 51 | "collapsed": true 52 | }, 53 | "outputs": [], 54 | "source": [ 55 | "paths_for_parsing = [\n", 56 | " '/fact/raw/2016/01/01/20160101_011.fits.fz',\n", 57 | " '/fact/aux/2016/01/01/20160101.FSC_CONTROL_TEMPERATURE.fits',\n", 58 | " '/fact/aux/2016/01/01/20160101.log',\n", 59 | " '/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/20140115_079_079.root'\n", 60 | "]" 61 | ] 62 | }, 63 | { 64 | "cell_type": "code", 65 | "execution_count": 3, 66 | "metadata": {}, 67 | "outputs": [ 68 | { 69 | "name": "stdout", 70 | "output_type": "stream", 71 | "text": [ 72 | "/fact/raw/2016/01/01/20160101_011.fits.fz\n", 73 | "{'prefix': '/fact/raw', 'night': 20160101, 'run': 11, 'suffix': '.fits.fz'}\n", 74 | "\n", 75 | "/fact/aux/2016/01/01/20160101.FSC_CONTROL_TEMPERATURE.fits\n", 76 | "{'prefix': '/fact/aux', 'night': 20160101, 'run': None, 'suffix': '.FSC_CONTROL_TEMPERATURE.fits'}\n", 77 | "\n", 78 | "/fact/aux/2016/01/01/20160101.log\n", 79 | "{'prefix': '/fact/aux', 'night': 20160101, 'run': None, 'suffix': '.log'}\n", 80 | "\n", 81 | "/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/20140115_079_079.root\n", 82 | "{'prefix': '/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b', 'night': 20140115, 'run': 79, 'suffix': '_079.root'}\n", 83 | "\n" 84 | ] 85 | } 86 | ], 87 | "source": [ 88 | "for path in paths_for_parsing:\n", 89 | " print(path)\n", 90 | " print(parse(path))\n", 91 | " print()" 92 | ] 93 | }, 94 | { 95 | "cell_type": "code", 96 | "execution_count": 4, 97 | "metadata": {}, 98 | "outputs": [ 99 | { 100 | "name": "stdout", 101 | "output_type": "stream", 102 | "text": [ 103 | "The slowest run took 5.80 times longer than the fastest. This could mean that an intermediate result is being cached.\n", 104 | "100000 loops, best of 3: 3 µs per loop\n" 105 | ] 106 | } 107 | ], 108 | "source": [ 109 | "%timeit parse(paths_for_parsing[0])" 110 | ] 111 | }, 112 | { 113 | "cell_type": "markdown", 114 | "metadata": {}, 115 | "source": [ 116 | "Parsing is quicker than 10µs, but at the moment we have in the order of 250k runs, so parsing all paths in the raw folder might take as long as 2.5 seconds.\n", 117 | "\n", 118 | "However, usually `glob` is taking much longer to actually get all the paths in the first place, so speed should not be an issue.\n", 119 | "\n", 120 | "----\n" 121 | ] 122 | }, 123 | { 124 | "cell_type": "markdown", 125 | "metadata": {}, 126 | "source": [ 127 | "# Path construction\n", 128 | "\n", 129 | "Equally often, people already have runs from the RunInfo DB, and want to find the according files. Be it raw files or aux-files or other files, that happen to sit in a similar tree-like directory structure, like for example the photon-stream files.\n", 130 | "\n", 131 | "the typical task starts with the (night, run) tuple and wants to create a path like\n", 132 | "\"/gpfs0/fact/processing/photon-stream/yyyy/mm/dd/night_run.phs.jsonl.gz\"\n", 133 | "\n", 134 | "Or similar." 135 | ] 136 | }, 137 | { 138 | "cell_type": "code", 139 | "execution_count": 5, 140 | "metadata": {}, 141 | "outputs": [ 142 | { 143 | "name": "stdout", 144 | "output_type": "stream", 145 | "text": [ 146 | "Help on function tree_path in module fact.path:\n", 147 | "\n", 148 | "tree_path(night, run, prefix, suffix)\n", 149 | " Make a tree_path from a (night, run) for given prefix, suffix\n", 150 | " \n", 151 | " night: int\n", 152 | " eg. 20160101\n", 153 | " run: int or None\n", 154 | " eg. 11\n", 155 | " prefix: string\n", 156 | " eg. '/fact/raw' or '/fact/aux'\n", 157 | " suffix: string\n", 158 | " eg. '.fits.fz' or '.log' or '.AUX_FOO.fits'\n", 159 | "\n" 160 | ] 161 | } 162 | ], 163 | "source": [ 164 | "from fact.path import tree_path\n", 165 | "help(tree_path)" 166 | ] 167 | }, 168 | { 169 | "cell_type": "code", 170 | "execution_count": 6, 171 | "metadata": { 172 | "collapsed": true 173 | }, 174 | "outputs": [], 175 | "source": [ 176 | "from functools import partial\n", 177 | "\n", 178 | "night_run_tuples = [\n", 179 | " (20160101, 1),\n", 180 | " (20160101, 2),\n", 181 | " (20130506, 3),\n", 182 | "]" 183 | ] 184 | }, 185 | { 186 | "cell_type": "code", 187 | "execution_count": 7, 188 | "metadata": {}, 189 | "outputs": [ 190 | { 191 | "name": "stdout", 192 | "output_type": "stream", 193 | "text": [ 194 | "/gpfs0/fact/processing/photon-stream/2016/01/01/20160101_001.phs.jsonl.gz\n", 195 | "/gpfs0/fact/processing/photon-stream/2016/01/01/20160101_002.phs.jsonl.gz\n", 196 | "/gpfs0/fact/processing/photon-stream/2013/05/06/20130506_003.phs.jsonl.gz\n" 197 | ] 198 | } 199 | ], 200 | "source": [ 201 | "photon_stream_path = partial(tree_path,\n", 202 | " prefix='/gpfs0/fact/processing/photon-stream',\n", 203 | " suffix='.phs.jsonl.gz'\n", 204 | ")\n", 205 | "for night, run in night_run_tuples:\n", 206 | " print(photon_stream_path(night, run))" 207 | ] 208 | }, 209 | { 210 | "cell_type": "code", 211 | "execution_count": 8, 212 | "metadata": {}, 213 | "outputs": [ 214 | { 215 | "name": "stdout", 216 | "output_type": "stream", 217 | "text": [ 218 | "/fact/aux/2016/01/01/20160101.FSC_CONTROL_TEMPERATURE.fits\n", 219 | "/fact/aux/2016/01/01/20160101.FSC_CONTROL_TEMPERATURE.fits\n", 220 | "/fact/aux/2013/05/06/20130506.FSC_CONTROL_TEMPERATURE.fits\n" 221 | ] 222 | } 223 | ], 224 | "source": [ 225 | "aux_path = partial(\n", 226 | " tree_path,\n", 227 | " prefix='/fact/aux',\n", 228 | " suffix='.FSC_CONTROL_TEMPERATURE.fits',\n", 229 | " run=None\n", 230 | ")\n", 231 | "for night, run in night_run_tuples:\n", 232 | " print(aux_path(night))" 233 | ] 234 | }, 235 | { 236 | "cell_type": "markdown", 237 | "metadata": {}, 238 | "source": [ 239 | "\n", 240 | "But what about more special cases? I sometime copy files from ISDC or La Palma to my machine in order to work with them locally and try something out. In the past I often did not bother to recreate the yyyy/mm/dd file structure, since I copied the files e.g. like this:\n", 241 | "\n", 242 | " scp isdc:/fact/aux/*/*/*/*.FSC_CONTROL_TEMPERATURE.fits ~/fact/aux_toy/.\n", 243 | " \n", 244 | "In this case I cannot make use of the `TreePath` thing, so I have to roll my own solution again?\n", 245 | "\n", 246 | "Nope! We have you covered. Assume you have a quite sepcialized path format like e.g. this:\n", 247 | "\n", 248 | " '/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/20140115_079_079.root'\n", 249 | "\n", 250 | " * yyyy/mm/dd tree structure missing, and \n", 251 | " * file name contains **two** not one run id.\n", 252 | " \n", 253 | "Just define a template for this filename, e.g. like this:" 254 | ] 255 | }, 256 | { 257 | "cell_type": "code", 258 | "execution_count": 9, 259 | "metadata": {}, 260 | "outputs": [ 261 | { 262 | "name": "stdout", 263 | "output_type": "stream", 264 | "text": [ 265 | "Help on function template_to_path in module fact.path:\n", 266 | "\n", 267 | "template_to_path(night, run, template, **kwargs)\n", 268 | " Make path from template and (night, run) using kwargs existing.\n", 269 | " \n", 270 | " night: int\n", 271 | " e.g. night = 20160102\n", 272 | " is used to create Y,M,D,N template values as:\n", 273 | " Y = \"2016\"\n", 274 | " M = \"01\"\n", 275 | " D = \"02\"\n", 276 | " N = \"20160101\"\n", 277 | " run: int or None\n", 278 | " e.g. run = 1\n", 279 | " is used to create template value R = \"001\"\n", 280 | " template: string\n", 281 | " e.g. \"/foo/bar/{Y}/baz/{R}_{M}_{D}.gz.{N}\"\n", 282 | " kwargs:\n", 283 | " if template contains other place holders than Y,M,D,N,R\n", 284 | " kwargs are used to format these.\n", 285 | "\n" 286 | ] 287 | } 288 | ], 289 | "source": [ 290 | "from fact.path import template_to_path\n", 291 | "help(template_to_path)" 292 | ] 293 | }, 294 | { 295 | "cell_type": "code", 296 | "execution_count": 10, 297 | "metadata": { 298 | "scrolled": true 299 | }, 300 | "outputs": [ 301 | { 302 | "name": "stdout", 303 | "output_type": "stream", 304 | "text": [ 305 | "/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/20160101_001_001.root\n", 306 | "/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/20160101_002_002.root\n", 307 | "/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/20130506_003_003.root\n" 308 | ] 309 | } 310 | ], 311 | "source": [ 312 | "single_pe_path = partial(\n", 313 | " template_to_path,\n", 314 | " template='/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/{N}_{R}_{R}.root'\n", 315 | ")\n", 316 | "\n", 317 | "for night, run in night_run_tuples:\n", 318 | " print(single_pe_path(night, run))" 319 | ] 320 | }, 321 | { 322 | "cell_type": "markdown", 323 | "metadata": {}, 324 | "source": [ 325 | "Okay but what if the 2nd run id is not always the same as the first?\n", 326 | "\n", 327 | "In that case you'll have to type a bit more:" 328 | ] 329 | }, 330 | { 331 | "cell_type": "code", 332 | "execution_count": 11, 333 | "metadata": {}, 334 | "outputs": [ 335 | { 336 | "name": "stdout", 337 | "output_type": "stream", 338 | "text": [ 339 | "/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/20160101_001_003.root\n", 340 | "/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/20160101_002_004.root\n", 341 | "/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/20130506_003_005.root\n" 342 | ] 343 | } 344 | ], 345 | "source": [ 346 | "single_pe_path_2runs = partial(\n", 347 | " template_to_path,\n", 348 | " template='/home/guest/tbretz/gainanalysis.20130725/files/fit_bt2b/{N}_{R}_{run2:03d}.root'\n", 349 | ")\n", 350 | "\n", 351 | "for night, run in night_run_tuples:\n", 352 | " print(single_pe_path_2runs(night, run, run2=run+2))" 353 | ] 354 | } 355 | ], 356 | "metadata": { 357 | "kernelspec": { 358 | "display_name": "Python 3", 359 | "language": "python", 360 | "name": "python3" 361 | }, 362 | "language_info": { 363 | "codemirror_mode": { 364 | "name": "ipython", 365 | "version": 3 366 | }, 367 | "file_extension": ".py", 368 | "mimetype": "text/x-python", 369 | "name": "python", 370 | "nbconvert_exporter": "python", 371 | "pygments_lexer": "ipython3", 372 | "version": "3.6.1" 373 | } 374 | }, 375 | "nbformat": 4, 376 | "nbformat_minor": 2 377 | } 378 | -------------------------------------------------------------------------------- /examples/plot_qla.py: -------------------------------------------------------------------------------- 1 | from fact.qla import get_qla_data 2 | from fact.analysis import bin_runs 3 | from fact.plotting import plot_excess_rate 4 | import matplotlib.pyplot as plt 5 | 6 | runs = get_qla_data( 7 | 20140622, 20140624, 8 | sources=['Mrk 501', '1ES 1959+650', 'Crab', 'Mrk 421'], 9 | ) 10 | 11 | qla_results = bin_runs( 12 | runs, 13 | bin_width_minutes=20, 14 | ) 15 | ax1, ax2, ax1_mjd = plot_excess_rate(qla_results) 16 | ax1.grid() 17 | 18 | plt.show() 19 | -------------------------------------------------------------------------------- /examples/pydim_examples/commands-client.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | # -*- coding: UTF-8 -*- 3 | """ 4 | An example for showing how a client can run the commands on a DIM server. 5 | 6 | """ 7 | 8 | import sys 9 | import math 10 | import random 11 | 12 | # Import the pydim module 13 | import fact.dim as pydim 14 | 15 | 16 | def command1(): 17 | 18 | f = random.choice([math.pi, math.e, 42]) 19 | 20 | # The argument must be a tuple 21 | args = (f,) 22 | print "Calling command 1. Arguments: %s" % args 23 | res = pydim.dic_cmnd_service("example-command-1", args, "F") 24 | 25 | 26 | def command2(): 27 | n = random.choice(range(5)) 28 | text = random.choice(["hola", "hi", "bonjour"]) 29 | 30 | args = (n, text) 31 | print "Calling command 2. Arguments: %s, %s" % args 32 | res = pydim.dic_cmnd_service("example-command-2", args, "I:1;C") 33 | 34 | 35 | def help(): 36 | print """This is a DIM client for the commands example server. 37 | 38 | The following options are available: 39 | 40 | 1 Run command 1 41 | 2 Run command 2 42 | H Display this help text 43 | Q Exit this program 44 | 45 | """ 46 | 47 | 48 | def main(): 49 | 50 | help() 51 | 52 | exit = False 53 | while not exit: 54 | 55 | action = raw_input("Action (Press 'Q' to exit): ") 56 | 57 | if action == "1": 58 | command1() 59 | elif action == "2": 60 | command2() 61 | elif action == "Q": 62 | exit = True 63 | print "Bye!" 64 | elif action == "H": 65 | help() 66 | 67 | if __name__ == "__main__": 68 | main() 69 | -------------------------------------------------------------------------------- /examples/pydim_examples/commands-server.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | # -*- coding: UTF-8 -*- 3 | """ 4 | An example of a DIM server with commands. 5 | 6 | """ 7 | 8 | import sys 9 | import time 10 | 11 | # Import the pydim module 12 | import fact.dim as pydim 13 | 14 | 15 | def command_callback1(value, tag): 16 | """ 17 | 18 | value: A tuple containing a float 19 | tag: A context argument (possibly empty) 20 | 21 | """ 22 | print "command_callback1 called. Argument: %s %d" % (value[0], tag) 23 | 24 | 25 | def command_callback2(cmd, tag): 26 | """ 27 | 28 | cmd: A tuple containing an integer and a string of variable length 29 | tag: A context argument (possibly empty) 30 | """ 31 | print "command_callback2 called. Arguments: %s %s and tag%s" % (cmd[0], cmd[1], tag) 32 | 33 | 34 | def main(): 35 | cmd1 = pydim.dis_add_cmnd('example-command-1', 'F', command_callback1, 2) 36 | cmd2 = pydim.dis_add_cmnd('example-command-2', 'I:1;C', command_callback2, 3) 37 | 38 | if not cmd1 or not cmd2: 39 | print "An error occurred while registering the commands" 40 | sys.exit(1) 41 | 42 | pydim.dis_start_serving("example-commands") 43 | print "Starting the server" 44 | while True: 45 | time.sleep(1) 46 | 47 | if __name__ == "__main__": 48 | main() 49 | -------------------------------------------------------------------------------- /examples/pydim_examples/services-client.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # -*- coding: UTF-8 -*- 3 | """ 4 | An example for showing clients for DIM servers. 5 | 6 | The client will query the values from the two services created in 7 | `service-server.py`, thus it should be run along with that script. 8 | 9 | """ 10 | 11 | import sys 12 | import time 13 | 14 | # Import the pydim module 15 | import fact.dim as pydim 16 | 17 | 18 | def client_callback1(now): 19 | """ 20 | Callback function for the service 1. 21 | 22 | Callback functions receive as many arguments as values are returned by the 23 | service. For example, as the service 1 returns only one string this callback 24 | function has only one argument. 25 | 26 | """ 27 | print "Client callback function for service 1" 28 | print "Message received: '%s' (%s)" % (now, type(now)) 29 | 30 | 31 | def client_callback2(val1, val2): 32 | """ 33 | Callback function for service 2. 34 | 35 | As the service 2 returned two arguments 36 | """ 37 | 38 | print "Client callback function for service 2" 39 | print "Values received: %s (%s) and %s (%s)" % (val1, type(val1), val2, type(val2)) 40 | 41 | 42 | def main(): 43 | """ 44 | A client for subscribing to two DIM services 45 | """ 46 | 47 | # Again, check if a Dim DNS node has been configured. 48 | # Normally this is done by setting an environment variable named DIM_DNS_NODE 49 | # with the host name, e.g. 'localhost'. 50 | # 51 | if not pydim.dis_get_dns_node(): 52 | print "No Dim DNS node found. Please set the environment variable DIM_DNS_NODE" 53 | sys.exit(1) 54 | 55 | # The function `dic_info_service` allows to subscribe to a service. 56 | # The arguments are the following: 57 | # 1. The name of the service. 58 | # 2. Service description string 59 | # 3. Callback function that will be called when the service is 60 | # updated. 61 | res1 = pydim.dic_info_service("example-service-1", "C", client_callback1) 62 | res2 = pydim.dic_info_service("example-service-2", "F:1;I:1;", client_callback2) 63 | 64 | if not res1 or not res2: 65 | print "There was an error registering the clients" 66 | sys.exit(1) 67 | 68 | # Wait for updates 69 | while True: 70 | time.sleep(10) 71 | 72 | 73 | if __name__ == "__main__": 74 | main() 75 | -------------------------------------------------------------------------------- /examples/pydim_examples/services-server.py: -------------------------------------------------------------------------------- 1 | #!/bin/env python 2 | # -*- coding: UTF-8 -*- 3 | """ 4 | An example for showing how to create a server with DIM. 5 | 6 | The server exposes two services that are updated periodically. One of them uses 7 | its function callback to get a string with the current time, the other one 8 | returns a pair of numeric values. 9 | 10 | """ 11 | 12 | import sys 13 | import time 14 | 15 | # Import the pydim module 16 | import fact.dim as pydim 17 | 18 | 19 | def service_callback(tag): 20 | """ 21 | Service callbacks are functions (in general, Python callable objects) 22 | that take one argument: the DIM tag used when the service was added, 23 | and returns a tuple with the values that corresponds to the service 24 | parameters definition in DIM. 25 | """ 26 | 27 | print "Running callback function for service 1" 28 | 29 | # Calculate the value 30 | # This example returns a string with the current time 31 | now = time.strftime("%X") 32 | 33 | # Remember, the callback function must return a tuple 34 | return ("Hello! The time is %s" % now,) 35 | 36 | 37 | def service_callback2(tag): 38 | """ 39 | The callback function for the second service. 40 | """ 41 | # Calculate the value of the server 42 | # ... 43 | print "Running callback function for service 2" 44 | val1 = 3.11 45 | val2 = 42 46 | return (val1, val2) 47 | 48 | 49 | def main(): 50 | """ 51 | A simple DIM server with two services. 52 | """ 53 | 54 | # First of all check if a Dim DNS node has been configured. 55 | # Normally this is done by setting an environment variable named DIM_DNS_NODE 56 | # with the host name, e.g. 'localhost'. 57 | # 58 | if not pydim.dis_get_dns_node(): 59 | print "No Dim DNS node found. Please set the environment variable DIM_DNS_NODE" 60 | sys.exit(1) 61 | 62 | # The function dis_add_service is used to register the service in DIM 63 | # The arguments used are the following: 64 | # 1. Service name. It must be a unique name within a DNS server. 65 | # 2. Service description string. 66 | # 3. A callback function that will be executed for getting the value of 67 | # the service. 68 | # 4. Tag. A parameter to be sent to the callback in order to identify 69 | # the service. Normally this parameter is rarely used (but it's still 70 | # mandatory, though). 71 | svc = pydim.dis_add_service("example-service-1", "C", service_callback, 0) 72 | 73 | # Register another service 74 | svc2 = pydim.dis_add_service("example-service-2", "F:1;I:1;", service_callback2, 0) 75 | 76 | # The return value is the service identifier. It can be used to check 77 | # if the service was registered correctly. 78 | if not svc or not svc2: 79 | sys.stderr.write("An error occurred while registering the service\n") 80 | sys.exit(1) 81 | 82 | print "Services correctly registered" 83 | 84 | # A service must be updated before using it. 85 | print "Updating the services ..." 86 | pydim.dis_update_service(svc) 87 | pydim.dis_update_service(svc2) 88 | print "" 89 | 90 | # Start the DIM server. 91 | pydim.dis_start_serving("server-name") 92 | print "Starting the server ..." 93 | 94 | # Initial values for the service 2. Please see below. 95 | val1 = 3.11 96 | val2 = 0 97 | 98 | while True: 99 | # Update the services periodically (each 5 seconds) 100 | time.sleep(5) 101 | print "" 102 | 103 | # Case 1: When `dis_update_service` is called without arguments the 104 | # callback function will be executed and its return value 105 | # will be sent to the clients. 106 | print "Updating the service 1 with the callback function" 107 | pydim.dis_update_service(svc) 108 | 109 | # Case 2: When `dis_update_service` is called with arguments, they are 110 | # sent directly to the clients as the service value, *without* executing the 111 | # callback function. Please note that the number and the type of the 112 | # arguments must correspond to the service description. 113 | # 114 | 115 | # Update the second server each 10 seconds 116 | # 117 | if val2 % 2: 118 | print "Updating the service 2 with direct values" 119 | pydim.dis_update_service(svc2, (val1, val2)) 120 | 121 | # For the sake of the example, update the values passed to svc2: 122 | val1 = val1 + 11.30 123 | val2 = val2 + 1 124 | 125 | 126 | if __name__ == "__main__": 127 | main() 128 | -------------------------------------------------------------------------------- /examples/toy_events.py: -------------------------------------------------------------------------------- 1 | # -*- coding:utf8 -*- 2 | ''' 3 | Usage: 4 | toy_events.py (gamma | muon) [options] 5 | 6 | Options: 7 | --num-events= Number of Events to create [default: 30] 8 | --noise= Standard Deviation for the white noise [default: 0.5] 9 | --nsb= Mean nsb photons per pixel [default: 1.0] 10 | ''' 11 | import numpy as np 12 | from fact.plotting import Viewer, get_pixel_coords 13 | from docopt import docopt 14 | 15 | px, py = get_pixel_coords() 16 | 17 | 18 | def muon_ring(psf=5, mirror_radius=1.75, max_impact=None): 19 | if max_impact is None: 20 | max_impact = mirror_radius 21 | 22 | n_photons = np.random.poisson(350) 23 | mx, my = np.random.uniform(-100, 100, 2) 24 | r = np.random.uniform(60, 102) 25 | 26 | photon_theta = np.random.uniform(0, 2 * np.pi, n_photons) 27 | 28 | photon_x = r * np.cos(photon_theta) + mx 29 | photon_y = r * np.sin(photon_theta) + my 30 | 31 | photon_x += np.random.normal(0, psf, n_photons) 32 | photon_y += np.random.normal(0, psf, n_photons) 33 | 34 | data = pixel_content(photon_x, photon_y) 35 | return data 36 | 37 | 38 | def gamma(psf=5): 39 | n_photons = rand_power() 40 | 41 | length = np.random.normal(6.5 * np.log10(n_photons), 0.2 * np.log10(n_photons)) 42 | width = np.random.uniform(0.4 * length, 0.8 * length) 43 | delta = np.random.uniform(0, 2 * np.pi) 44 | 45 | cov = [[length**2, 0], [0, width**2]] 46 | cov = rotate_cov(cov, delta) 47 | 48 | mean = np.random.uniform(-100, 100, 2) 49 | photon_x, photon_y = np.random.multivariate_normal(mean, cov, n_photons).T 50 | 51 | data = pixel_content(photon_x, photon_y) 52 | 53 | return data 54 | 55 | 56 | def pixel_content(photon_x, photon_y): 57 | data = np.zeros(1440) 58 | for x, y in zip(photon_x, photon_y): 59 | if np.sqrt(x**2 + y**2) > 200: 60 | continue 61 | pixel = np.argmin((x - px)**2 + (y - py)**2) 62 | data[pixel] += 1 63 | return data 64 | 65 | 66 | def rotate_cov(cov, angle): 67 | rot = np.matrix([ 68 | [np.cos(angle), -np.sin(angle)], 69 | [np.sin(angle), np.cos(angle)] 70 | ]) 71 | 72 | cov = np.matrix(cov) 73 | 74 | return rot * cov * rot.T 75 | 76 | 77 | def rand_power(N=1, gamma=2.7, a=500, b=1000): 78 | assert gamma > 2, 'gamma has to be > 2' 79 | x = np.random.rand(N) 80 | exp = 1 - gamma 81 | return (a**exp - x * (a**exp - b**exp))**(1 / exp) 82 | 83 | 84 | def noise(sigma=0.5, n_pix=1440): 85 | ''' returns gaussian white noise with std dev sigma ''' 86 | return np.random.normal(0, sigma, n_pix) 87 | 88 | 89 | def nsb(lamb, n_pix=1440): 90 | return np.random.poisson(lamb, n_pix) 91 | 92 | 93 | def main(): 94 | args = docopt(__doc__) 95 | 96 | num_events = int(args['--num-events']) 97 | noise_sigma = float(args['--noise']) 98 | nsb_lambda = float(args['--nsb']) 99 | 100 | data = np.empty((num_events, 1440)) 101 | for event in range(num_events): 102 | 103 | if args['muon']: 104 | data[event] = muon_ring() 105 | if args['gamma']: 106 | data[event] = gamma() 107 | 108 | if noise_sigma > 0: 109 | data[event] += noise(noise_sigma) 110 | 111 | if nsb_lambda > 0: 112 | data[event] += nsb(nsb_lambda) 113 | 114 | Viewer(data, 'photons', vmin=0) 115 | 116 | if __name__ == '__main__': 117 | try: 118 | main() 119 | except (KeyboardInterrupt, SystemExit): 120 | pass 121 | -------------------------------------------------------------------------------- /fact/VERSION: -------------------------------------------------------------------------------- 1 | 0.26.2 2 | -------------------------------------------------------------------------------- /fact/__init__.py: -------------------------------------------------------------------------------- 1 | from . import plotting 2 | from . import auxservices 3 | from pkg_resources import resource_string 4 | 5 | __version__ = resource_string('fact', 'VERSION').decode().strip() 6 | 7 | 8 | __all__ = [ 9 | 'plotting', 10 | 'auxservices', 11 | ] 12 | -------------------------------------------------------------------------------- /fact/analysis/__init__.py: -------------------------------------------------------------------------------- 1 | from .statistics import li_ma_significance 2 | from .binning import ontime_binning, qla_binning, groupby_observation_blocks, bin_runs 3 | 4 | from .core import calc_run_summary_source_independent, split_on_off_source_independent 5 | 6 | from .source import ( 7 | calc_theta_equatorial, 8 | calc_theta_camera, 9 | calc_theta_offs_camera, 10 | ) 11 | 12 | 13 | __all__ = [ 14 | 'li_ma_significance', 15 | 'ontime_binning', 16 | 'qla_binning', 17 | 'groupby_observation_blocks', 18 | 'bin_runs', 19 | 'calc_run_summary_source_independent', 20 | 'split_on_off_source_independent', 21 | 'calc_theta_equatorial', 22 | 'calc_theta_camera', 23 | 'calc_theta_offs_camera', 24 | ] 25 | -------------------------------------------------------------------------------- /fact/analysis/binning.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import datetime 3 | import numpy as np 4 | 5 | from .statistics import li_ma_significance 6 | 7 | 8 | def ontime_binning(runs, bin_width_minutes=20): 9 | ''' 10 | Calculate bin numbers for given runs. 11 | A new bin is created if either a bin would have more ontime 12 | than `bin_width_minutes` or `run_start` of the next run is 13 | more than `bin_width_minutes` after `run_stop` of the last run. 14 | 15 | Parameters 16 | ---------- 17 | 18 | runs: pd.DataFrame 19 | DataFrame containing analysis results and meta data 20 | for each run 21 | bin_width_minutes: number 22 | The desired amount of ontime in each bin. 23 | Note: The ontime in each bin will allways be 24 | slightly less than `bin_width_minutes` 25 | ''' 26 | bin_width_sec = bin_width_minutes * 60 27 | bin_number = 0 28 | ontime_sum = 0 29 | 30 | bins = [] 31 | last_stop = runs['run_start'].iloc[0] 32 | delta_t_max = datetime.timedelta(seconds=bin_width_sec) 33 | 34 | for key, row in runs.iterrows(): 35 | delta_t = row.run_start - last_stop 36 | last_stop = row.run_stop 37 | 38 | if ontime_sum + row.ontime > bin_width_sec or delta_t > delta_t_max: 39 | bin_number += 1 40 | ontime_sum = 0 41 | 42 | bins.append(bin_number) 43 | ontime_sum += row.ontime 44 | 45 | return pd.Series(bins, index=runs.index) 46 | 47 | 48 | def qla_binning(data, bin_width_minutes=20): 49 | ''' 50 | The binning algorithm as used by lightcurve.c 51 | ''' 52 | bin_number = 0 53 | ontime_sum = 0 54 | bins = [] 55 | 56 | for key, row in data.iterrows(): 57 | if ontime_sum + row.fOnTimeAfterCuts > bin_width_minutes * 60: 58 | bin_number += 1 59 | ontime_sum = 0 60 | 61 | bins.append(bin_number) 62 | ontime_sum += row['ontime'] 63 | 64 | return pd.Series(bins, index=data.index) 65 | 66 | 67 | def groupby_observation_blocks(runs): 68 | ''' Groupby for consecutive runs of the same source''' 69 | runs = runs.sort_values('run_start') 70 | new_source = runs.fSourceName != runs.fSourceName.shift(1) 71 | observation_blocks = new_source.cumsum() 72 | return runs.groupby(observation_blocks) 73 | 74 | 75 | def nightly_binning(runs): 76 | nights = runs['night'].unique() 77 | bins = pd.Series(index=runs.index, dtype=int) 78 | 79 | for bin_id, night in enumerate(nights): 80 | bins.loc[runs.night == night] = bin_id 81 | 82 | return bins 83 | 84 | 85 | def bin_runs( 86 | runs, 87 | alpha=0.2, 88 | binning_function=ontime_binning, 89 | **kwargs 90 | ): 91 | ''' 92 | Bin runs using `binning_function` to assign bins to 93 | the individual runs. 94 | Calculates n_on, n_off, ontime, n_excess, excess_rate_per_h, 95 | excess_rate_err, li_ma_significance and bin_width 96 | 97 | Parameters 98 | ---------- 99 | runs: pandas.DataFrame 100 | The analysis results and necessary metadata for each run. 101 | Required are: ontime, n_on, n_off, run_start, run_stop, source 102 | 103 | alpha: float 104 | The weight for the off regions, e.g. 1 / number of off regions 105 | 106 | binning_function: function 107 | A function that takes the run df and returns a 108 | pd.Series containing bin ids with the index of the origininal 109 | dataframe 110 | 111 | All `**kwargs` are passed to the binning function 112 | ''' 113 | runs = runs.sort_values(by='run_start') 114 | sources = [] 115 | for source, df in runs.groupby('source'): 116 | 117 | df = df.copy() 118 | df['bin'] = binning_function(df, **kwargs) 119 | 120 | binned = df.groupby('bin').aggregate({ 121 | 'ontime': 'sum', 122 | 'n_on': 'sum', 123 | 'n_off': 'sum', 124 | 'run_start': 'min', 125 | 'run_stop': 'max', 126 | }) 127 | 128 | binned['n_excess'] = binned.n_on - binned.n_off * alpha 129 | binned['excess_rate_per_h'] = binned.n_excess / binned.ontime * 3600 130 | 131 | binned['time_width'] = binned.run_stop - binned.run_start 132 | binned['time_mean'] = binned.run_start + 0.5 * binned.time_width 133 | 134 | binned['excess_rate_err'] = np.sqrt(binned.n_on + alpha**2 * binned.n_off) 135 | binned['excess_rate_err'] /= binned.ontime / 3600 136 | 137 | binned['significance'] = li_ma_significance( 138 | binned.n_on, binned.n_off, 0.2 139 | ) 140 | 141 | binned['source'] = source 142 | binned['night'] = ( 143 | binned.time_mean - pd.Timedelta(hours=12) 144 | ).dt.strftime('%Y%m%d').astype(int) 145 | 146 | sources.append(binned) 147 | 148 | return pd.concat(sources) 149 | -------------------------------------------------------------------------------- /fact/analysis/core.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | import pandas as pd 3 | import re 4 | 5 | from .statistics import li_ma_significance 6 | 7 | default_theta_off_keys = tuple('theta_deg_off_{}'.format(i) for i in range(1, 6)) 8 | default_prediction_off_keys = tuple( 9 | 'gamma_prediction_off_{}'.format(i) for i in range(1, 6) 10 | ) 11 | 12 | 13 | off_key_re = re.compile('([a-zA-z1-9]+_deg)_off_([0-9])?') 14 | 15 | 16 | def calc_run_summary_source_independent( 17 | events, runs, 18 | prediction_threshold, 19 | theta2_cut, 20 | prediction_key='gamma_prediction', 21 | theta_key='theta_deg', 22 | theta_off_keys=default_theta_off_keys, 23 | ): 24 | 25 | ''' 26 | Calculate run summaries for the given theta^2 and signal prediction cuts. 27 | This function requires, that no source dependent features, 28 | like Theta were used in the classification. 29 | 30 | Parameters 31 | ---------- 32 | events: pd.DataFrame 33 | DataFrame with event data, needs to contain the columns 34 | `'night'`, `'run'`, `theta_key` and the `theta_off_keys` 35 | prediction_threshold: float 36 | Threshold for the classifier prediction 37 | theta2_cut: float 38 | Selection cut for theta^2 in deg^2 39 | prediction_key: str: 40 | Key to the classifier prediction 41 | theta_key: str 42 | Column name of the column containing theta in degree 43 | theta_off_keys: list[str] 44 | Column names of the column containing theta in degree 45 | for all off regions 46 | ''' 47 | 48 | runs = runs.set_index(['night', 'run_id']) 49 | runs.sort_index(inplace=True) 50 | 51 | # apply prediction threshold cut 52 | selected = events.query( 53 | '{} >= {}'.format(prediction_key, prediction_threshold) 54 | ) 55 | 56 | on_data, off_data = split_on_off_source_independent( 57 | selected, theta2_cut, theta_key, theta_off_keys 58 | ) 59 | 60 | alpha = 1 / len(theta_off_keys) 61 | 62 | runs['n_on'] = on_data.groupby(['night', 'run_id']).size() 63 | runs['n_on'].fillna(0, inplace=True) 64 | 65 | runs['n_off'] = off_data.groupby(['night', 'run_id']).size() 66 | runs['n_off'].fillna(0, inplace=True) 67 | 68 | runs['n_excess'] = runs['n_on'] - alpha * runs['n_off'] 69 | runs['n_excess_err'] = np.sqrt(runs['n_on'] + alpha**2 * runs['n_off']) 70 | 71 | runs['excess_rate_per_h'] = runs['n_excess'] / runs['ontime'] / 3600 72 | runs['excess_rate_per_h_err'] = runs['n_excess_err'] / runs['ontime'] / 3600 73 | 74 | runs['significance'] = li_ma_significance( 75 | runs['n_on'], runs['n_off'], alpha 76 | ) 77 | 78 | runs.reset_index(inplace=True) 79 | 80 | return runs 81 | 82 | 83 | def split_on_off_source_independent( 84 | events, 85 | theta2_cut, 86 | theta_key='theta_deg', 87 | theta_off_keys=default_theta_off_keys, 88 | ): 89 | ''' 90 | Split events dataframe into on and off region 91 | 92 | Parameters 93 | ---------- 94 | events: pd.DataFrame 95 | DataFrame containing event information, required are 96 | `theta_key` and `theta_off_keys`. 97 | theta2_cut: float 98 | Selection cut for theta^2 in deg^2 99 | theta_key: str 100 | Column name of the column containing theta in degree 101 | theta_off_keys: list[str] 102 | Column names of the column containing theta in degree 103 | for all off regions 104 | ''' 105 | # apply theta2_cut 106 | theta_cut = np.sqrt(theta2_cut) 107 | 108 | on_data = events.query('{} <= {}'.format(theta_key, theta_cut)) 109 | 110 | off_dfs = [] 111 | for region, theta_off_key in enumerate(theta_off_keys, start=1): 112 | off_df = events.query('{} <= {}'.format( 113 | theta_off_key, theta_cut) 114 | ).copy() 115 | 116 | off_df['off_region'] = region 117 | drop_off_columns(off_df, region, inplace=True) 118 | 119 | off_dfs.append(off_df) 120 | 121 | off_data = pd.concat(off_dfs) 122 | 123 | return on_data, off_data 124 | 125 | 126 | def drop_off_columns(df, off_region, inplace=False): 127 | ''' 128 | Replace the "On" column with the column 129 | of the respective off region. 130 | For example for `off_region = 1`, `theta` is replaced by 131 | theta_off_1 and all theta_off_ columns are dropped. 132 | Same for all other columns, containing the pattern `_off_` 133 | ''' 134 | if inplace is False: 135 | df = df.copy() 136 | 137 | for col in df.columns: 138 | m = off_key_re.match(col) 139 | if m: 140 | on_key, key_region = m.groups() 141 | if int(key_region) == off_region: 142 | df.drop(on_key, axis=1, inplace=True) 143 | df[on_key] = df[col] 144 | 145 | df.drop(col, axis=1, inplace=True) 146 | 147 | if inplace is False: 148 | return df 149 | -------------------------------------------------------------------------------- /fact/analysis/scripts/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fact-project/pyfact/2f204dbf4a90bfa84899e2b8d6aaed891d2b09bc/fact/analysis/scripts/__init__.py -------------------------------------------------------------------------------- /fact/analysis/scripts/radec.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import pandas as pd 3 | 4 | from joblib import Parallel, delayed 5 | import h5py 6 | import click 7 | 8 | from ...coordinates.utils import camera_to_equatorial 9 | from ...io import read_h5py_chunked 10 | from ...io import create_empty_h5py_dataset, append_to_h5py_dataset 11 | 12 | 13 | def calc_ra_dec(events): 14 | events['obstime'] = pd.to_datetime(events['timestamp']) 15 | 16 | events['ra_prediction'], events['dec_prediction'] = camera_to_equatorial( 17 | events['source_x_prediction'], 18 | events['source_y_prediction'], 19 | events['pointing_position_zd'], 20 | events['pointing_position_az'], 21 | events['obstime'].dt.to_pydatetime(), 22 | ) 23 | return events 24 | 25 | 26 | columns = ('ra_prediction', 'dec_prediction') 27 | 28 | 29 | @click.command() 30 | @click.argument('INPUTFILE', type=click.Path(exists=True, dir_okay=False)) 31 | @click.option('-c', '--chunksize', type=int, default=10000) 32 | @click.option('-n', '--n-jobs', type=int, default=-1) 33 | @click.option('-y', '--yes', is_flag=True, help='Do not ask to overwrite existing keys') 34 | def main(inputfile, chunksize, n_jobs, yes): 35 | ''' 36 | Calculate ra and dec from source position in camera coordinates 37 | e.g. for example for files analysed with the classifier-tools 38 | 39 | The following keys have to be present in the h5py hdf5 file. 40 | * pointing_position_az 41 | * pointing_position_zd 42 | * source_x_prediction 43 | * source_y_prediction 44 | * unix_time_utc 45 | ''' 46 | 47 | with h5py.File(inputfile, 'r') as f: 48 | if any(col in f['events'].keys() for col in columns) and not yes: 49 | click.confirm('Output keys already exist, overwrite? ', abort=True) 50 | 51 | df_it = read_h5py_chunked( 52 | inputfile, 53 | key='events', 54 | columns=[ 55 | 'pointing_position_az', 56 | 'pointing_position_zd', 57 | 'source_x_prediction', 58 | 'source_y_prediction', 59 | 'timestamp', 60 | ], 61 | chunksize=chunksize 62 | ) 63 | with Parallel(n_jobs, verbose=10) as pool: 64 | 65 | dfs = pool( 66 | delayed(calc_ra_dec)(df) 67 | for df, start, stop in df_it 68 | ) 69 | 70 | df = pd.concat(dfs) 71 | 72 | with h5py.File(inputfile, mode='r+') as f: 73 | for col in ('ra_prediction', 'dec_prediction'): 74 | if col in f['events']: 75 | del f['events'][col] 76 | create_empty_h5py_dataset(df[col].values, f['events'], col) 77 | append_to_h5py_dataset(df[col].values, f['events'][col]) 78 | 79 | 80 | if __name__ == '__main__': 81 | main() 82 | -------------------------------------------------------------------------------- /fact/analysis/scripts/theta.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | import pandas as pd 3 | from fact.analysis import calc_theta_camera, calc_theta_offs_camera 4 | from fact.io import read_h5py_chunked 5 | from fact.io import create_empty_h5py_dataset, append_to_h5py_dataset 6 | from fact.instrument.constants import LOCATION 7 | from astropy.coordinates import SkyCoord, AltAz 8 | from astropy.time import Time 9 | from joblib import Parallel, delayed 10 | import h5py 11 | import click 12 | 13 | 14 | def calc_theta_source(df, source): 15 | obstime = Time(pd.to_datetime(df['timestamp']).dt.to_pydatetime()) 16 | 17 | altaz = AltAz(location=LOCATION, obstime=obstime) 18 | source_altaz = source.transform_to(altaz) 19 | 20 | df['theta_deg'] = calc_theta_camera( 21 | df.source_x_prediction, 22 | df.source_y_prediction, 23 | source_altaz.zen.deg, 24 | source_altaz.az.deg, 25 | zd_pointing=df['pointing_position_zd'], 26 | az_pointing=df['pointing_position_az'], 27 | ) 28 | theta_offs = calc_theta_offs_camera( 29 | df.source_x_prediction, 30 | df.source_y_prediction, 31 | source_altaz.zen.deg, 32 | source_altaz.az.deg, 33 | zd_pointing=df['pointing_position_zd'], 34 | az_pointing=df['pointing_position_az'], 35 | n_off=5, 36 | ) 37 | 38 | for i, theta_off in enumerate(theta_offs, start=1): 39 | df['theta_deg_off_{}'.format(i)] = theta_off 40 | 41 | return df 42 | 43 | 44 | def calc_theta_coordinates(df): 45 | 46 | df['theta_deg'] = calc_theta_camera( 47 | df.source_x_prediction, 48 | df.source_y_prediction, 49 | df['source_position_zd'], 50 | df['source_position_az'], 51 | zd_pointing=df['pointing_position_zd'], 52 | az_pointing=df['pointing_position_az'], 53 | ) 54 | theta_offs = calc_theta_offs_camera( 55 | df.source_x_prediction, 56 | df.source_y_prediction, 57 | df['source_position_zd'], 58 | df['source_position_az'], 59 | zd_pointing=df['pointing_position_zd'], 60 | az_pointing=df['pointing_position_az'], 61 | n_off=5, 62 | ) 63 | 64 | for i, theta_off in enumerate(theta_offs, start=1): 65 | df['theta_deg_off_{}'.format(i)] = theta_off 66 | 67 | return df 68 | 69 | 70 | cols = [ 71 | 'theta_deg' if i == 0 else 'theta_deg_off_{}'.format(i) 72 | for i in range(6) 73 | ] 74 | 75 | 76 | @click.command() 77 | @click.argument('INPUTFILE') 78 | @click.option( 79 | '-s', '--source', 80 | help='Source name, if not given, take `source_position_az`, and `source_position_zd`' 81 | ) 82 | @click.option('-c', '--chunksize', type=int, default=10000) 83 | @click.option('-y', '--yes', is_flag=True, help='Do not ask to overwrite existing keys') 84 | def main(inputfile, source, chunksize, yes): 85 | ''' 86 | Calculate theta_deg and theta_deg_offs from source position in camera coordinates 87 | e.g. for example for files analysed with the classifier-tools 88 | 89 | The following keys have to be present in the h5py hdf5 file. 90 | * az_tracking 91 | * zd_tracking 92 | * source_x_prediction 93 | * source_y_prediction 94 | * unix_time_utc (Only if a source name is given) 95 | ''' 96 | 97 | with h5py.File(inputfile, 'r') as f: 98 | if any(col in f['events'].keys() for col in cols) and not yes: 99 | click.confirm('Output keys already exist, overwrite? ', abort=True) 100 | 101 | if source is None: 102 | df_it = read_h5py_chunked( 103 | inputfile, 104 | key='events', 105 | columns=[ 106 | 'pointing_position_az', 107 | 'pointing_position_zd', 108 | 'source_x_prediction', 109 | 'source_y_prediction', 110 | 'source_position_az', 111 | 'source_position_zd', 112 | ], 113 | chunksize=chunksize 114 | ) 115 | with Parallel(-1, verbose=10) as pool: 116 | 117 | dfs = pool( 118 | delayed(calc_theta_coordinates)(df) 119 | for df, start, stop in df_it 120 | ) 121 | else: 122 | source = SkyCoord.from_name(source) 123 | 124 | df_it = read_h5py_chunked( 125 | inputfile, 126 | key='events', 127 | columns=[ 128 | 'pointing_position_az', 129 | 'pointing_position_zd', 130 | 'source_x_prediction', 131 | 'source_y_prediction', 132 | 'timestamp', 133 | ], 134 | chunksize=chunksize 135 | ) 136 | 137 | with Parallel(-1, verbose=10) as pool: 138 | 139 | dfs = pool( 140 | delayed(calc_theta_source)(df, source) 141 | for df, start, stop in df_it 142 | ) 143 | 144 | df = pd.concat(dfs) 145 | 146 | with h5py.File(inputfile, mode='r+') as f: 147 | for i in range(6): 148 | if i == 0: 149 | col = 'theta_deg' 150 | else: 151 | col = 'theta_deg_off_{}'.format(i) 152 | 153 | if col in f['events']: 154 | del f['events'][col] 155 | create_empty_h5py_dataset(df[col].values, f['events'], col) 156 | append_to_h5py_dataset(df[col].values, f['events'][col]) 157 | 158 | 159 | if __name__ == '__main__': 160 | main() 161 | -------------------------------------------------------------------------------- /fact/analysis/source.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | import astropy.units as u 4 | from astropy.coordinates import AltAz, SkyCoord 5 | from astropy.coordinates.angle_utilities import angular_separation 6 | 7 | from ..coordinates import CameraFrame 8 | from ..coordinates.utils import ( 9 | arrays_to_camera, 10 | arrays_to_altaz, 11 | arrays_to_equatorial, 12 | ) 13 | from ..instrument.constants import LOCATION 14 | 15 | 16 | def calc_off_position(source_x, source_y, off_index, n_off=5): 17 | ''' 18 | For a given source position in camera coordinates, 19 | return an array with n_off off_positions 20 | 21 | Parameters 22 | ---------- 23 | source_x: float or astropy.units.Quantity 24 | x position of the wanted source position 25 | source_y: float or astropy.units.Quantity 26 | y position of the wanted source position 27 | n_off: int 28 | number of off positions to calculate 29 | 30 | Returns 31 | ------- 32 | x_off: float or astropy.units.Quantity 33 | x coordinate of the off position 34 | y_off: float or astropy.units.Quantity 35 | y coordinate of the off position 36 | ''' 37 | 38 | if off_index < 1 or off_index > n_off: 39 | raise ValueError('off_index must be >= 1 and <= n_off') 40 | 41 | r = np.sqrt(source_x**2 + source_y**2) 42 | phi = np.arctan2(source_y, source_x) 43 | delta_phi = 2 * np.pi / (n_off + 1) 44 | if hasattr(phi, 'unit'): 45 | delta_phi *= u.rad 46 | 47 | x_off = r * np.cos(phi + off_index * delta_phi) 48 | y_off = r * np.sin(phi + off_index * delta_phi) 49 | 50 | return x_off, y_off 51 | 52 | 53 | def calc_theta_equatorial( 54 | source_ra_prediction, 55 | source_dec_prediction, 56 | source_ra, 57 | source_dec): 58 | ''' 59 | Calculate the angular distance between reconstructed source 60 | position and assumed source position, both given in the 61 | equatorial coordinate system. 62 | 63 | Parameters 64 | ---------- 65 | source_ra_prediction: number or array-like 66 | prediction of the right ascension of the source position 67 | in hourangle 68 | source_dec_prediction: number or array-like 69 | prediction of the declination of the source position 70 | in degree 71 | source_ra: number or array-like 72 | Right ascension of the source position in hourangle 73 | source_dec: number or array-like 74 | Declination of the source position in degree 75 | 76 | Returns 77 | ------- 78 | theta_deg: array 79 | theta in degrees 80 | ''' 81 | source_prediction = arrays_to_equatorial( 82 | source_ra_prediction, source_dec_prediction, 83 | ) 84 | source_pos = arrays_to_equatorial(source_ra, source_dec) 85 | 86 | return source_pos.separation(source_prediction).deg 87 | 88 | 89 | def calc_theta_camera( 90 | source_x_prediction, 91 | source_y_prediction, 92 | source_zd, 93 | source_az, 94 | zd_pointing, 95 | az_pointing): 96 | ''' 97 | Calculate the angular distance between reconstructed source 98 | position and assumed source position, where the 99 | prediction is given as camera coordinates and the source position 100 | in the horizontal coordinate system. 101 | 102 | Parameters 103 | ---------- 104 | source_x_prediction: number or array-like 105 | prediction of the x postions of the source in mm 106 | source_y_prediction: number or array-like 107 | prediction of the y postions of the source in mm 108 | source_zd: number or array-like 109 | Zenith of the source position in degree 110 | source_az: number or array-like 111 | Azimuth of the source position in degree 112 | zd_pointing: number or array-like 113 | zenith angle of the pointing direction in degrees 114 | az_pointing: number or array-like 115 | azimuth angle of the pointing direction in degrees 116 | 117 | Returns 118 | ------- 119 | theta_deg: array 120 | theta in degrees 121 | ''' 122 | pointing = arrays_to_altaz(zd_pointing, az_pointing) 123 | altaz = AltAz(location=LOCATION) 124 | 125 | source_prediction = arrays_to_camera( 126 | source_x_prediction, source_y_prediction, 127 | pointing_direction=pointing, 128 | ) 129 | 130 | source_pos = arrays_to_altaz(source_zd, source_az) 131 | source_prediction_alt_az = source_prediction.transform_to(altaz) 132 | return angular_separation( 133 | source_prediction_alt_az.az, source_prediction_alt_az.alt, 134 | source_pos.az, source_pos.alt, 135 | ).to(u.deg).value 136 | 137 | 138 | def calc_theta_offs_camera( 139 | source_x_prediction, 140 | source_y_prediction, 141 | source_zd, 142 | source_az, 143 | zd_pointing, 144 | az_pointing, 145 | n_off=5): 146 | ''' 147 | Calculate the angular distance between reconstructed source 148 | position and `n_off` off positions. 149 | 150 | Parameters 151 | ---------- 152 | source_x_prediction: number or array-like 153 | prediction of the x coordinate of the source position in mm 154 | source_y_prediction: number or array-like 155 | prediction of the y coordinate of the source position in mm 156 | source_zd: number or array-like 157 | Zenith of the source position in degree 158 | source_az: number or array-like 159 | Azimuth of the source position in degree 160 | zd_pointing: number or array-like 161 | zenith angle of the pointing direction in degrees 162 | az_pointing: number or array-like 163 | azimuth angle of the pointing direction in degrees 164 | n_off: int 165 | How many off positions to calculate 166 | 167 | Returns 168 | ------- 169 | theta_deg: n_off-tuple 170 | theta in degrees for each off position 171 | ''' 172 | pointing = arrays_to_altaz(zd_pointing, az_pointing) 173 | source = arrays_to_altaz(source_zd, source_az) 174 | source_prediction = arrays_to_camera( 175 | source_x_prediction, source_y_prediction, 176 | pointing_direction=pointing, 177 | ) 178 | 179 | altaz = AltAz(location=LOCATION) 180 | camera_frame = CameraFrame( 181 | location=LOCATION, 182 | pointing_direction=pointing 183 | ) 184 | 185 | source_prediction_alt_az = source_prediction.transform_to(altaz) 186 | source_camera = source.transform_to(camera_frame) 187 | 188 | theta_offs = [] 189 | 190 | for i in range(1, n_off + 1): 191 | off_x, off_y = calc_off_position( 192 | source_camera.x, source_camera.y, off_index=i, n_off=n_off 193 | ) 194 | 195 | off_pos = SkyCoord(off_x, off_y, frame=camera_frame) 196 | off_pos_altaz = off_pos.transform_to(altaz) 197 | theta_offs.append(angular_separation( 198 | source_prediction_alt_az.az, source_prediction_alt_az.alt, 199 | off_pos_altaz.az, off_pos_altaz.alt, 200 | ).to(u.deg).value) 201 | return tuple(theta_offs) 202 | -------------------------------------------------------------------------------- /fact/auxservices/__init__.py: -------------------------------------------------------------------------------- 1 | from .base import AuxService 2 | from .services import * 3 | -------------------------------------------------------------------------------- /fact/auxservices/base.py: -------------------------------------------------------------------------------- 1 | from astropy.table import Table 2 | from astropy.units import UnitsWarning 3 | from ..path import tree_path 4 | from functools import partial 5 | import warnings 6 | 7 | 8 | class AuxService: 9 | 10 | renames = {} 11 | ignored_columns = [] 12 | transforms = {} 13 | basename = 'AUX_SERVICE' 14 | 15 | def __init__(self, auxdir='/fact/aux'): 16 | self.path = partial( 17 | tree_path, 18 | run=None, 19 | prefix=auxdir, 20 | suffix='.' + self.basename + '.fits') 21 | 22 | @classmethod 23 | def read_file(cls, filename): 24 | with warnings.catch_warnings(): 25 | warnings.simplefilter('ignore', category=UnitsWarning) 26 | table = Table.read(filename) 27 | 28 | for column in table.columns.keys(): 29 | if column in cls.ignored_columns: 30 | table.remove_column(column) 31 | 32 | elif column in cls.renames: 33 | table[column].name = cls.renames[column] 34 | 35 | for column in table.columns.keys(): 36 | shape = table[column].shape 37 | if len(shape) > 1: 38 | for i in range(shape[1]): 39 | table[column + '_{}'.format(i)] = table[column][:, i] 40 | table.remove_column(column) 41 | 42 | df = table.to_pandas() 43 | 44 | for key, transform in cls.transforms.items(): 45 | df[key] = transform(df[key]) 46 | 47 | return df 48 | 49 | def read_date(self, date): 50 | return self.read_file(self.path(int('{:%Y%m%d}'.format(date)))) 51 | -------------------------------------------------------------------------------- /fact/auxservices/services.py: -------------------------------------------------------------------------------- 1 | from .base import AuxService 2 | import pandas as pd 3 | 4 | 5 | def fact_mjd_to_datetime(fact_mjd): 6 | ''' convert fact mjds (days since unix epoch) to pandas datetimes ''' 7 | return pd.to_datetime(fact_mjd * 24 * 3600 * 1e9) 8 | 9 | 10 | __all__ = [ 11 | 'MagicWeather', 12 | 'PfMini', 13 | 'DriveTracking', 14 | 'DrivePointing', 15 | 'DriveSource', 16 | 'FSCHumidity', 17 | 'FSCTemperature', 18 | 'FTMTriggerRates', 19 | 'BiasVoltage', 20 | 'FADTemperature', 21 | ] 22 | 23 | 24 | class MagicWeather(AuxService): 25 | basename = 'MAGIC_WEATHER_DATA' 26 | renames = { 27 | 'Time': 'timestamp', 28 | 'T': 'temperature', 29 | 'T_dew': 'dewpoint', 30 | 'H': 'humidity', 31 | 'P': 'pressure', 32 | 'v': 'wind_speed', 33 | 'v_max': 'wind_gust_speed', 34 | 'd': 'wind_direction', 35 | } 36 | 37 | ignored_columns = ['stat', 'QoS'] 38 | transforms = {'timestamp': fact_mjd_to_datetime} 39 | 40 | 41 | class PfMini(AuxService): 42 | basename = 'PFMINI_CONTROL_DATA' 43 | renames = { 44 | 'Time': 'timestamp', 45 | 'Temperature': 'temperature', 46 | 'Humidity': 'humidity', 47 | } 48 | 49 | ignored_columns = ['QoS', ] 50 | transforms = {'timestamp': fact_mjd_to_datetime} 51 | 52 | 53 | class DriveTracking(AuxService): 54 | basename = 'DRIVE_CONTROL_TRACKING_POSITION' 55 | renames = { 56 | 'Time': 'timestamp', 57 | 'Ra': 'right_ascension', 58 | 'Dec': 'declination', 59 | 'Ha': 'hourangle', 60 | 'SrcHa': 'hourangle_source', 61 | 'SrcRa': 'right_ascension_source', 62 | 'SrcDec': 'declination_source', 63 | 'HaDec': 'hourangle_source', 64 | 'Zd': 'zenith', 65 | 'Az': 'azimuth', 66 | 'dZd': 'zenith_deviation', 67 | 'dAz': 'azimuth_deviation', 68 | 'dev': 'absolute_control_deviation', 69 | 'avgdev': 'average_control_deviation', 70 | } 71 | transforms = {'timestamp': fact_mjd_to_datetime} 72 | ignored_columns = ['QoS', ] 73 | 74 | 75 | class DrivePointing(AuxService): 76 | basename = 'DRIVE_CONTROL_POINTING_POSITION' 77 | renames = { 78 | 'Time': 'timestamp', 79 | 'Zd': 'zenith', 80 | 'Az': 'azimuth', 81 | } 82 | transforms = {'timestamp': fact_mjd_to_datetime} 83 | ignored_columns = ['QoS', ] 84 | 85 | 86 | class DriveSource(AuxService): 87 | basename = 'DRIVE_CONTROL_SOURCE_POSITION' 88 | renames = { 89 | 'Time': 'timestamp', 90 | 'Ra_src': 'right_ascension_source', 91 | 'Ra_cmd': 'right_ascension_command', 92 | 'Dec_src': 'declination_source', 93 | 'Dec_cmd': 'declination_command', 94 | 'Offset': 'wobble_offset', 95 | 'Angle': 'wobble_angle', 96 | 'Name': 'source', 97 | 'Period': 'orbit_period', 98 | } 99 | transforms = {'timestamp': fact_mjd_to_datetime} 100 | ignored_columns = ['QoS', ] 101 | 102 | 103 | class FSCHumidity(AuxService): 104 | basename = 'FSC_CONTROL_HUMIDITY' 105 | renames = { 106 | 'Time': 'timestamp', 107 | 't': 'fsc_uptime', 108 | 'H': 'humidity', 109 | } 110 | transforms = {'timestamp': fact_mjd_to_datetime} 111 | ignored_columns = ['QoS', ] 112 | 113 | 114 | class FSCTemperature(AuxService): 115 | basename = 'FSC_CONTROL_TEMPERATURE' 116 | renames = { 117 | 'Time': 'timestamp', 118 | 't': 'fsc_uptime', 119 | 'T_crate': 'crate_temperature', 120 | 'T_sens': 'sensor_compartment_temperature', 121 | 'T_ps': 'power_supply_temperature', 122 | 'T_aux': 'auxiliary_power_supply_temperature', 123 | 'T_back': 'ftm_backpanel_temperature', 124 | 'T_eth': 'ethernet_temperature', 125 | } 126 | transforms = {'timestamp': fact_mjd_to_datetime} 127 | ignored_columns = ['QoS', ] 128 | 129 | 130 | class FTMTriggerRates(AuxService): 131 | basename = 'FTM_CONTROL_TRIGGER_RATES' 132 | renames = { 133 | 'Time': 'timestamp', 134 | 'FTMtimeStamp': 'ftm_timestamp', 135 | 'OnTimeCounter': 'effective_ontime', 136 | 'TriggerCounter': 'trigger_counter', 137 | 'TriggerRate': 'trigger_rate', 138 | 'BoardRate': 'board_rate', 139 | 'PatchRate': 'patch_rate', 140 | 'OnTime': 'ontime', 141 | 'ElapsedTime': 'elapsed_time', 142 | } 143 | transforms = { 144 | 'timestamp': fact_mjd_to_datetime, 145 | 'ftm_timestamp': lambda x: x/1e6, 146 | 'effective_ontime': lambda x: x/1e6, 147 | } 148 | ignored_columns = ['QoS', ] 149 | 150 | 151 | class BiasVoltage(AuxService): 152 | basename = "BIAS_CONTROL_VOLTAGE" 153 | renames = { 154 | 'Time': 'timestamp', 155 | 'Uout': 'bias_voltage', 156 | } 157 | transforms = { 158 | 'timestamp': fact_mjd_to_datetime, 159 | } 160 | ignored_columns = ['QoS', ] 161 | 162 | 163 | class FADTemperature(AuxService): 164 | basename = "FAD_CONTROL_TEMPERATURE" 165 | renames = { 166 | 'Time': 'timestamp', 167 | 'cnt': 'count', 168 | 'temp': 'temperature', 169 | } 170 | transforms = { 171 | 'timestamp': fact_mjd_to_datetime, 172 | } 173 | ignored_columns = ['QoS', ] 174 | -------------------------------------------------------------------------------- /fact/coordinates/__init__.py: -------------------------------------------------------------------------------- 1 | from .camera_frame import CameraFrame 2 | from .utils import equatorial_to_camera, camera_to_equatorial 3 | from .utils import horizontal_to_camera, camera_to_horizontal 4 | 5 | 6 | __all__ = [ 7 | 'CameraFrame', 8 | 'equatorial_to_camera', 9 | 'camera_to_equatorial', 10 | 'horizontal_to_camera', 11 | 'camera_to_horizontal', 12 | ] 13 | -------------------------------------------------------------------------------- /fact/coordinates/camera_frame.py: -------------------------------------------------------------------------------- 1 | from astropy.coordinates import ( 2 | BaseCoordinateFrame, 3 | AltAz, 4 | frame_transform_graph, 5 | FunctionTransform, 6 | ) 7 | 8 | from astropy.coordinates import ( 9 | CoordinateAttribute, 10 | TimeAttribute, 11 | EarthLocationAttribute, 12 | Attribute 13 | ) 14 | 15 | from astropy.coordinates.matrix_utilities import rotation_matrix 16 | from astropy.coordinates.representation import CartesianRepresentation 17 | import astropy.units as u 18 | 19 | from .representation import FACTPlanarRepresentation 20 | from ..instrument.constants import FOCAL_LENGTH_MM, LOCATION 21 | import numpy as np 22 | 23 | focal_length = FOCAL_LENGTH_MM * u.mm 24 | 25 | 26 | class CameraFrame(BaseCoordinateFrame): 27 | ''' 28 | Astropy CoordinateFrame representing coordinates in the CameraPlane 29 | 30 | Attributes 31 | ---------- 32 | pointing_direction: astropy.coordinates.AltAz 33 | The pointing direction of the telescope 34 | obstime: astropy.Time 35 | The timestamp of the observation, only needed to directly transform 36 | to Equatorial coordinates, transforming to AltAz does not need this. 37 | location: astropy.coordinates.EarthLocation 38 | The location of the observer, only needed to directly transform 39 | to Equatorial coordinates, transforming to AltAz does not need this, 40 | default is FACT's location 41 | rotated: bool 42 | True means x points right and y points up when looking on the camera 43 | from the dish, which is the efinition of FACT-Tools >= 1.0 and Mars. 44 | False means x points up and y points left, 45 | which is definition in the original FACTPixelMap file. 46 | ''' 47 | default_representation = FACTPlanarRepresentation 48 | pointing_direction = CoordinateAttribute(frame=AltAz, default=None) 49 | obstime = TimeAttribute(default=None) 50 | location = EarthLocationAttribute(default=LOCATION) 51 | rotated = Attribute(default=True) 52 | 53 | 54 | @frame_transform_graph.transform(FunctionTransform, CameraFrame, AltAz) 55 | def camera_to_altaz(camera, altaz): 56 | if camera.pointing_direction is None: 57 | raise ValueError('Pointing Direction must not be None') 58 | 59 | x = camera.x.copy() 60 | y = camera.y.copy() 61 | 62 | if camera.rotated is True: 63 | x, y = y, -x 64 | 65 | z = 1 / np.sqrt(1 + (x / focal_length)**2 + (y / focal_length)**2) 66 | x *= z / focal_length 67 | y *= z / focal_length 68 | 69 | cartesian = CartesianRepresentation(x, y, z, copy=False) 70 | 71 | rot_z_az = rotation_matrix(-camera.pointing_direction.az, 'z') 72 | rot_y_zd = rotation_matrix(-camera.pointing_direction.zen, 'y') 73 | 74 | cartesian = cartesian.transform(rot_y_zd) 75 | cartesian = cartesian.transform(rot_z_az) 76 | 77 | altitude = 90 * u.deg - np.arccos(cartesian.z) 78 | azimuth = np.arctan2(cartesian.y, cartesian.x) 79 | 80 | return AltAz( 81 | alt=altitude, 82 | az=azimuth, 83 | location=camera.location, 84 | obstime=camera.obstime, 85 | ) 86 | 87 | 88 | @frame_transform_graph.transform(FunctionTransform, AltAz, CameraFrame) 89 | def altaz_to_camera(altaz, camera): 90 | if camera.pointing_direction is None: 91 | raise ValueError('Pointing Direction must not be None') 92 | 93 | cartesian = altaz.cartesian 94 | 95 | rot_z_az = rotation_matrix(camera.pointing_direction.az, 'z') 96 | rot_y_zd = rotation_matrix(camera.pointing_direction.zen, 'y') 97 | 98 | cartesian = cartesian.transform(rot_z_az) 99 | cartesian = cartesian.transform(rot_y_zd) 100 | 101 | x = (cartesian.x * focal_length / cartesian.z).copy() 102 | y = (cartesian.y * focal_length / cartesian.z).copy() 103 | 104 | if camera.rotated is True: 105 | x, y = -y, x 106 | 107 | return CameraFrame( 108 | x=x, 109 | y=y, 110 | pointing_direction=camera.pointing_direction, 111 | obstime=altaz.obstime, 112 | location=camera.location, 113 | ) 114 | -------------------------------------------------------------------------------- /fact/coordinates/representation.py: -------------------------------------------------------------------------------- 1 | from astropy.coordinates import BaseRepresentation, CartesianRepresentation 2 | import astropy.units as u 3 | from collections import OrderedDict 4 | import numpy as np 5 | 6 | 7 | class FACTPlanarRepresentation(BaseRepresentation): 8 | ''' 9 | Representation of a point in a 2D plane. This is needed for coordinate 10 | frames to store their coordinates internally. 11 | This is essentially a copy of the Cartesian representation used in astropy. 12 | Copied from ctapipe. 13 | 14 | Parameters 15 | ---------- 16 | x, y : `~astropy.units.Quantity` 17 | The x and y coordinates of the point(s). If ``x`` and ``y``have 18 | different shapes, they should be broadcastable. 19 | copy : bool, optional 20 | If True arrays will be copied rather than referenced. 21 | ''' 22 | attr_classes = OrderedDict([('x', u.Quantity), 23 | ('y', u.Quantity)]) 24 | 25 | def __init__(self, x, y, copy=True, **kwargs): 26 | 27 | if x is None or y is None: 28 | raise ValueError( 29 | 'x and y are required to instantiate CartesianRepresentation' 30 | ) 31 | 32 | if not isinstance(x, self.attr_classes['x']): 33 | raise TypeError('x should be a {0}'.format(self.attr_classes['x'].__name__)) 34 | 35 | if not isinstance(y, self.attr_classes['y']): 36 | raise TypeError('y should be a {0}'.format(self.attr_classes['y'].__name__)) 37 | 38 | x = self.attr_classes['x'](x, copy=copy) 39 | y = self.attr_classes['y'](y, copy=copy) 40 | 41 | if not (x.unit.physical_type == y.unit.physical_type): 42 | raise u.UnitsError("x and y should have matching physical types") 43 | 44 | try: 45 | x, y = np.broadcast_arrays(x, y, subok=True) 46 | except ValueError: 47 | raise ValueError("Input parameters x and y cannot be broadcast") 48 | 49 | self._x = x 50 | self._y = y 51 | self._differentials = {} 52 | 53 | @property 54 | def x(self): 55 | ''' 56 | The x component of the point(s). 57 | ''' 58 | return self._x 59 | 60 | @property 61 | def y(self): 62 | ''' 63 | The y component of the point(s). 64 | ''' 65 | return self._y 66 | 67 | @property 68 | def xy(self): 69 | return u.Quantity((self._x, self._y)) 70 | 71 | @property 72 | def components(self): 73 | return 'x', 'y' 74 | 75 | @classmethod 76 | def from_cartesian(cls, cartesian): 77 | return cls(x=cartesian.x, y=cartesian.y) 78 | 79 | def to_cartesian(self): 80 | return CartesianRepresentation(x=self._x, y=self._y, z=0*self._x.unit) 81 | -------------------------------------------------------------------------------- /fact/coordinates/utils.py: -------------------------------------------------------------------------------- 1 | from astropy.coordinates import AltAz, ICRS, SkyCoord 2 | from astropy.time import Time 3 | import astropy.units as u 4 | import numpy as np 5 | import pandas as pd 6 | from .camera_frame import CameraFrame 7 | from ..instrument.constants import LOCATION 8 | from datetime import datetime 9 | 10 | 11 | def arrays_to_altaz(zenith, azimuth, obstime=None): 12 | if obstime is not None: 13 | obstime = to_astropy_time(obstime) 14 | return AltAz( 15 | az=np.asanyarray(azimuth) * u.deg, 16 | alt=np.asanyarray(90 - zenith) * u.deg, 17 | location=LOCATION, 18 | obstime=obstime, 19 | ) 20 | 21 | 22 | def arrays_to_camera(x, y, pointing_direction, obstime=None, rotated=True): 23 | if obstime is not None: 24 | obstime = to_astropy_time(obstime) 25 | frame = CameraFrame( 26 | pointing_direction=pointing_direction, 27 | obstime=obstime, 28 | rotated=rotated, 29 | ) 30 | return SkyCoord( 31 | x=np.asanyarray(x) * u.mm, 32 | y=np.asanyarray(y) * u.mm, 33 | frame=frame, 34 | ) 35 | 36 | 37 | def arrays_to_equatorial(ra, dec, obstime=None): 38 | if obstime is not None: 39 | obstime = to_astropy_time(obstime) 40 | 41 | return SkyCoord( 42 | ra=np.asanyarray(ra) * u.hourangle, 43 | dec=np.asanyarray(dec) * u.deg, 44 | obstime=obstime 45 | ) 46 | 47 | 48 | def to_astropy_time(series_or_array): 49 | ''' 50 | Convert a pandas or numpy time object to an astropy time 51 | ''' 52 | if isinstance(series_or_array, Time): 53 | return series_or_array 54 | elif isinstance(series_or_array, pd.Series): 55 | time = series_or_array.dt.to_pydatetime() 56 | elif isinstance(series_or_array, (pd.DatetimeIndex, pd.Timestamp)): 57 | time = series_or_array.to_pydatetime() 58 | else: 59 | # convert to us, as ns does return longs instead of datetimes, 60 | # datetimes cannot handle ns 61 | time = ( 62 | np.asanyarray(series_or_array) 63 | .astype('datetime64[us]') 64 | .astype(datetime) 65 | ) 66 | 67 | return Time(time, scale='utc') 68 | 69 | 70 | def equatorial_to_camera(ra, dec, zd_pointing, az_pointing, obstime, rotated=True): 71 | ''' 72 | Convert sky coordinates from the equatorial frame to FACT camera 73 | coordinates. 74 | 75 | Parameters 76 | ---------- 77 | ra: number or array-like 78 | Right ascension in hourangle 79 | dec: number or array-like 80 | Declination in degrees 81 | zd_pointing: number or array-like 82 | Zenith distance of the telescope pointing direction in degree 83 | az_pointing: number or array-like 84 | Azimuth of the telescope pointing direction in degree 85 | obstime: datetime or np.datetime64 86 | Time of the observations 87 | rotated: bool 88 | True means x points right and y points up when looking on the camera 89 | from the dish, which is the efinition of FACT-Tools >= 1.0 and Mars. 90 | False means x points up and y points left, 91 | which is definition in the original FACTPixelMap file. 92 | 93 | Returns 94 | ------- 95 | x: number or array-like 96 | x-coordinate in the camera plane in mm. 97 | Following the axis of the the FACTPixelMap file (and FACT-Tools). 98 | y: number or array-like 99 | y-coordinate in the camera plane in mm. 100 | Following the axis of the the FACTPixelMap file (and FACT-Tools). 101 | ''' 102 | eq_coordinates = arrays_to_equatorial(ra, dec, obstime=obstime) 103 | pointing_direction = arrays_to_altaz(zd_pointing, az_pointing, obstime) 104 | 105 | camera_frame = CameraFrame(pointing_direction=pointing_direction, rotated=rotated) 106 | cam_coordinates = eq_coordinates.transform_to(camera_frame) 107 | 108 | return cam_coordinates.x.to(u.mm).value, cam_coordinates.y.to(u.mm).value 109 | 110 | 111 | def camera_to_equatorial(x, y, zd_pointing, az_pointing, obstime, rotated=True): 112 | ''' 113 | Convert FACT camera coordinates to sky coordinates in the equatorial (icrs) 114 | frame. 115 | 116 | Parameters 117 | ---------- 118 | x: number or array-like 119 | x-coordinate in the camera plane in mm. 120 | Following the axis of the the FACTPixelMap file (and FACT-Tools). 121 | y: number or array-like 122 | y-coordinate in the camera plane in mm. 123 | Following the axis of the the FACTPixelMap file (and FACT-Tools). 124 | zd_pointing: number or array-like 125 | Zenith distance of the telescope pointing direction in degree 126 | az_pointing: number or array-like 127 | Azimuth of the telescope pointing direction in degree 128 | obstime: datetime or np.datetime64 129 | Time of the observations 130 | rotated: bool 131 | True means x points right and y points up when looking on the camera 132 | from the dish, which is the efinition of FACT-Tools >= 1.0 and Mars. 133 | False means x points up and y points left, 134 | which is definition in the original FACTPixelMap file. 135 | 136 | Returns 137 | ------- 138 | ra: number or array-like 139 | Right ascension in hourangle 140 | dec: number or array-like 141 | Declination in degrees 142 | ''' 143 | pointing_direction = arrays_to_altaz(zd_pointing, az_pointing, obstime) 144 | cam_coordinates = arrays_to_camera( 145 | x, y, pointing_direction, obstime=obstime, rotated=True 146 | ) 147 | eq_coordinates = cam_coordinates.transform_to(ICRS) 148 | 149 | return eq_coordinates.ra.hourangle, eq_coordinates.dec.deg 150 | 151 | 152 | def horizontal_to_camera(zd, az, zd_pointing, az_pointing, rotated=True): 153 | ''' 154 | Convert sky coordinates from the equatorial frame to FACT camera 155 | coordinates. 156 | 157 | Parameters 158 | ---------- 159 | zd: number or array-like 160 | Zenith distance in degrees 161 | az: number or array-like 162 | azimuth in degrees 163 | zd_pointing: number or array-like 164 | Zenith distance of the telescope pointing direction in degree 165 | az_pointing: number or array-like 166 | Azimuth of the telescope pointing direction in degree 167 | rotated: bool 168 | True means x points right and y points up when looking on the camera 169 | from the dish, which is the efinition of FACT-Tools >= 1.0 and Mars. 170 | False means x points up and y points left, 171 | which is definition in the original FACTPixelMap file. 172 | 173 | Returns 174 | ------- 175 | x: number or array-like 176 | x-coordinate in the camera plane in mm. 177 | Following the axis of the the FACTPixelMap file (and FACT-Tools). 178 | y: number or array-like 179 | y-coordinate in the camera plane in mm. 180 | Following the axis of the the FACTPixelMap file (and FACT-Tools). 181 | ''' 182 | altaz = arrays_to_altaz(zd, az) 183 | pointing_direction = arrays_to_altaz(zd_pointing, az_pointing) 184 | 185 | camera_frame = CameraFrame( 186 | pointing_direction=pointing_direction, rotated=rotated 187 | ) 188 | cam_coordinates = altaz.transform_to(camera_frame) 189 | 190 | return cam_coordinates.x.to(u.mm).value, cam_coordinates.y.to(u.mm).value 191 | 192 | 193 | def camera_to_horizontal(x, y, zd_pointing, az_pointing, rotated=True): 194 | ''' 195 | Convert FACT camera coordinates to sky coordinates in the equatorial (icrs) 196 | frame. 197 | 198 | Parameters 199 | ---------- 200 | x: number or array-like 201 | x-coordinate in the camera plane in mm. 202 | Following the axis of the the FACTPixelMap file (and FACT-Tools). 203 | y: number or array-like 204 | y-coordinate in the camera plane in mm. 205 | Following the axis of the the FACTPixelMap file (and FACT-Tools). 206 | zd_pointing: number or array-like 207 | Zenith distance of the telescope pointing direction in degree 208 | az_pointing: number or array-like 209 | Azimuth of the telescope pointing direction in degree 210 | rotated: bool 211 | True means x points right and y points up when looking on the camera 212 | from the dish, which is the efinition of FACT-Tools >= 1.0 and Mars. 213 | False means x points up and y points left, 214 | which is definition in the original FACTPixelMap file. 215 | 216 | Returns 217 | ------- 218 | zd: number or array-like 219 | Zenith distance in degrees 220 | az: number or array-like 221 | Declination in degrees 222 | ''' 223 | pointing_direction = arrays_to_altaz(zd_pointing, az_pointing) 224 | cam_coordinates = arrays_to_camera( 225 | x, y, pointing_direction, rotated=rotated 226 | ) 227 | altaz = cam_coordinates.transform_to(AltAz(location=LOCATION)) 228 | 229 | return altaz.zen.deg, altaz.az.deg 230 | -------------------------------------------------------------------------------- /fact/credentials/__init__.py: -------------------------------------------------------------------------------- 1 | from functools import lru_cache 2 | from configparser import SafeConfigParser 3 | from getpass import getpass 4 | from simplecrypt import decrypt 5 | from io import StringIO 6 | from pkg_resources import resource_stream 7 | from sqlalchemy import create_engine 8 | import socket 9 | import os 10 | 11 | 12 | __all__ = ['get_credentials', 'create_factdb_engine'] 13 | 14 | 15 | @lru_cache(1) 16 | def get_credentials(): 17 | ''' 18 | Get a SafeConfigParser instance with FACT credentials 19 | On the first call, you will be prompted for the FACT password 20 | 21 | The folling credentials are stored: 22 | 23 | - telegram 24 | - token 25 | 26 | - database 27 | - user 28 | - password 29 | - host 30 | - database 31 | 32 | - twilio 33 | - sid 34 | - auth_token 35 | - number 36 | 37 | use get_credentials().get(group, element) to retrieve elements 38 | ''' 39 | if 'FACT_PASSWORD' in os.environ: 40 | passwd = os.environ['FACT_PASSWORD'] 41 | else: 42 | passwd = getpass('Please enter the current, universal FACT password: ') 43 | 44 | with resource_stream('fact', 'credentials/credentials.encrypted') as f: 45 | decrypted = decrypt(passwd, f.read()).decode('utf-8') 46 | 47 | config = SafeConfigParser() 48 | config.readfp(StringIO(decrypted)) 49 | 50 | return config 51 | 52 | 53 | def create_factdb_engine(database=None): 54 | ''' 55 | returns a sqlalchemy.Engine pointing to the factdata database 56 | 57 | The different hostname on isdc machines is handled correctly. 58 | ''' 59 | spec = 'mysql+pymysql://{user}:{password}@{host}/{database}' 60 | 61 | creds = get_credentials() 62 | config = dict(creds['database']) 63 | 64 | if socket.gethostname().startswith('isdc'): 65 | config['host'] = 'lp-fact' 66 | 67 | if database is not None: 68 | config['database'] = database 69 | 70 | return create_engine( 71 | spec.format(**config), 72 | connect_args={'ssl': {'ssl-mode': 'preferred'}}, 73 | ) 74 | -------------------------------------------------------------------------------- /fact/credentials/credentials.encrypted: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fact-project/pyfact/2f204dbf4a90bfa84899e2b8d6aaed891d2b09bc/fact/credentials/credentials.encrypted -------------------------------------------------------------------------------- /fact/encrypt_credentials.py: -------------------------------------------------------------------------------- 1 | from getpass import getpass 2 | import simplecrypt 3 | 4 | def decrypt(inpath, outpath): 5 | password = getpass() 6 | with open(inpath, 'rb') as infile, open(outpath, 'wb') as outfile: 7 | outfile.write( 8 | simplecrypt.decrypt(password, infile.read()) 9 | ) 10 | 11 | 12 | def encrypt(inpath, outpath): 13 | password = getpass() 14 | with open(inpath, 'rb') as infile, open(outpath, 'wb') as outfile: 15 | outfile.write( 16 | simplecrypt.encrypt(password, infile.read()) 17 | ) 18 | 19 | -------------------------------------------------------------------------------- /fact/factdb/__init__.py: -------------------------------------------------------------------------------- 1 | from .database import factdata_db, connect_database 2 | from .utils import ( 3 | read_into_dataframe, 4 | get_ontime_by_source, 5 | get_ontime_by_source_and_runtype, 6 | get_correct_ontime, 7 | ontime, 8 | run_duration, 9 | ) 10 | from .models import * 11 | -------------------------------------------------------------------------------- /fact/factdb/database.py: -------------------------------------------------------------------------------- 1 | from peewee import MySQLDatabase 2 | import wrapt 3 | from ..credentials import get_credentials 4 | 5 | factdata_db = MySQLDatabase(None) 6 | 7 | 8 | def connect_database(config=None): 9 | if config is None: 10 | config = get_credentials()['database'] 11 | factdata_db.init(**config) 12 | factdata_db.connect() 13 | -------------------------------------------------------------------------------- /fact/factdb/utils.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import peewee 3 | from .models import RunInfo, Source, RunType 4 | from ..credentials import create_factdb_engine 5 | from ..time import to_night_int 6 | 7 | 8 | SECOND = peewee.SQL('SECOND') 9 | run_duration = peewee.fn.TIMESTAMPDIFF( 10 | SECOND, RunInfo.frunstart, RunInfo.frunstop 11 | ) 12 | ontime = (run_duration * RunInfo.feffectiveon) 13 | 14 | 15 | def read_into_dataframe(query, engine=None): 16 | ''' read the result of a peewee query object into a pandas DataFrame ''' 17 | engine = engine or create_factdb_engine() 18 | sql, params = query.sql() 19 | 20 | with engine.connect() as conn: 21 | df = pd.read_sql_query(sql, conn, params=params) 22 | 23 | return df 24 | 25 | 26 | def get_correct_ontime(start=None, end=None, engine=None): 27 | ''' 28 | The database field fOnTime underestimates the real ontime by about 5 seconds 29 | because of how the number is calculated from the FTM auxfiles. 30 | A better estimate can be obtained by taking (fRunStop - fRunStart) * fEffectiveOn. 31 | 32 | Parameters 33 | ---------- 34 | start : int or datetime.date 35 | First night to select, either in fact int format or as date 36 | end : int or datetime.date 37 | Last night to select, either in fact int format or as date 38 | engine: sqlalchemy.Engine 39 | The engine connected to the database. 40 | If None, fact.credentials.create_factdb_engine will be used to create one. 41 | 42 | Source: D. Neise, A. Biland. Also see github.com/dneise/about_fact_ontime 43 | ''' 44 | 45 | query = RunInfo.select( 46 | RunInfo.fnight.alias('night'), 47 | RunInfo.frunid.alias('run_id'), 48 | RunInfo.frunstart.alias('start'), 49 | RunInfo.frunstart.alias('stop'), 50 | ontime.alias('ontime'), 51 | ) 52 | 53 | if start is not None: 54 | start = to_night_int(start) if not isinstance(start, int) else start 55 | query = query.where(RunInfo.fnight >= start) 56 | 57 | if end is not None: 58 | end = to_night_int(end) if not isinstance(end, int) else end 59 | query = query.where(RunInfo.fnight <= end) 60 | 61 | df = read_into_dataframe(query, engine=engine) 62 | 63 | return df 64 | 65 | 66 | def get_ontime_by_source_and_runtype(engine=None): 67 | query = ( 68 | RunInfo 69 | .select( 70 | peewee.fn.SUM(ontime).alias('ontime'), 71 | Source.fsourcename.alias('source'), 72 | RunType.fruntypename.alias('runtype') 73 | ) 74 | .join(Source, on=Source.fsourcekey == RunInfo.fsourcekey) 75 | .switch(RunInfo) 76 | .join(RunType, on=RunType.fruntypekey == RunInfo.fruntypekey) 77 | .group_by(Source.fsourcename, RunType.fruntypename) 78 | ) 79 | df = read_into_dataframe(query, engine or create_factdb_engine()) 80 | df.set_index(['source', 'runtype'], inplace=True) 81 | 82 | return df 83 | 84 | 85 | def get_ontime_by_source(runtype=None, engine=None): 86 | query = ( 87 | RunInfo 88 | .select( 89 | peewee.fn.SUM(ontime).alias('ontime'), 90 | Source.fsourcename.alias('source'), 91 | ) 92 | .join(Source, on=Source.fsourcekey == RunInfo.fsourcekey) 93 | .switch(RunInfo) 94 | .join(RunType, on=RunType.fruntypekey == RunInfo.fruntypekey) 95 | ) 96 | if runtype is not None: 97 | query = query.where(RunType.fruntypename == runtype) 98 | 99 | query = query.group_by(Source.fsourcename) 100 | 101 | df = read_into_dataframe(query, engine or create_factdb_engine()) 102 | df.set_index('source', inplace=True) 103 | 104 | return df 105 | -------------------------------------------------------------------------------- /fact/instrument/__init__.py: -------------------------------------------------------------------------------- 1 | from .camera import ( 2 | get_pixel_coords, 3 | get_pixel_dataframe, 4 | camera_distance_mm_to_deg, 5 | ) 6 | from . import trigger 7 | 8 | __all__ = [ 9 | 'get_pixel_coords', 'get_pixel_dataframe', 'camera_distance_mm_to_deg' 10 | ] 11 | -------------------------------------------------------------------------------- /fact/instrument/camera.py: -------------------------------------------------------------------------------- 1 | import pkg_resources as res 2 | import numpy as np 3 | from functools import lru_cache 4 | import pandas as pd 5 | from scipy.sparse import csr_matrix 6 | from scipy.spatial import cKDTree 7 | 8 | from .constants import ( 9 | FOCAL_LENGTH_MM, PINCUSHION_DISTORTION_SLOPE, 10 | PIXEL_SPACING_MM, FOV_PER_PIXEL_DEG, N_PIXEL 11 | ) 12 | 13 | 14 | def camera_distance_mm_to_deg(distance_mm): 15 | ''' 16 | Transform a distance in mm in the camera plane 17 | to it's approximate equivalent in degrees. 18 | ''' 19 | 20 | return distance_mm * FOV_PER_PIXEL_DEG / PIXEL_SPACING_MM 21 | 22 | 23 | pixel_mapping = np.genfromtxt( 24 | res.resource_filename('fact', 'resources/FACTmap111030.txt'), 25 | names=[ 26 | 'softID', 'hardID', 'geom_i', 'geom_j', 27 | 'G-APD', 'V_op', 'HV_B', 'HV_C', 28 | 'pos_X', 'pos_Y', 'radius' 29 | ], 30 | dtype=None, 31 | ) 32 | 33 | non_standard_pixel_chids = dict( 34 | dead=[927, 80, 873], 35 | crazy=[863, 297, 868], 36 | twins=[ # the signals of these pairs of pixels are the same 37 | (1093, 1094), 38 | (527, 528), 39 | (721, 722), 40 | ] 41 | ) 42 | 43 | 44 | GEOM_2_SOFTID = { 45 | (i, j): soft for i, j, soft in zip( 46 | pixel_mapping['geom_i'], pixel_mapping['geom_j'], pixel_mapping['softID'] 47 | )} 48 | 49 | 50 | def reorder_softid2chid(array): 51 | ''' 52 | Returns view to the given array, remapped from softid order 53 | to chid order (e.g. MARS ordering to fact-tools ordering) 54 | ''' 55 | return array[CHID_2_SOFTID] 56 | 57 | 58 | @lru_cache(maxsize=1) 59 | def get_pixel_dataframe(): 60 | ''' return pixel mapping as pd.DataFrame 61 | 62 | ''' 63 | pm = pd.DataFrame(pixel_mapping) 64 | pm.sort_values('hardID', inplace=True) 65 | # after sorting the CHID is in principle the index 66 | # of pm, but I'll like to have it explicitely 67 | pm['CHID'] = np.arange(len(pm)) 68 | 69 | pm['trigger_patch_id'] = pm['CHID'] // 9 70 | pm['bias_patch_id'] = pm['HV_B'] * 32 + pm['HV_C'] 71 | 72 | bias_patch_sizes = pm.bias_patch_id.value_counts().sort_index() 73 | pm['bias_patch_size'] = bias_patch_sizes[pm.bias_patch_id].values 74 | 75 | pm['x'] = -pm.pos_Y.values * PIXEL_SPACING_MM 76 | pm['y'] = pm.pos_X.values * PIXEL_SPACING_MM 77 | 78 | pm['x_angle'] = np.rad2deg( 79 | np.arctan(pm.x / FOCAL_LENGTH_MM) * 80 | (1 + PINCUSHION_DISTORTION_SLOPE) 81 | ) 82 | pm['y_angle'] = np.rad2deg( 83 | np.arctan(pm.y / FOCAL_LENGTH_MM) * 84 | (1 + PINCUSHION_DISTORTION_SLOPE) 85 | ) 86 | 87 | return pm 88 | 89 | 90 | FOV_RADIUS = np.hypot( 91 | get_pixel_dataframe().x_angle, get_pixel_dataframe().y_angle 92 | ).max() 93 | 94 | 95 | patch_indices = get_pixel_dataframe()[[ 96 | 'trigger_patch_id', 97 | 'bias_patch_id', 98 | 'bias_patch_size', 99 | ]].drop_duplicates().reset_index(drop=True) 100 | 101 | 102 | @np.vectorize 103 | def geom2soft(i, j): 104 | return GEOM_2_SOFTID[(i, j)] 105 | 106 | 107 | def softid2chid(softid): 108 | return hardid2chid(pixel_mapping['hardID'])[softid] 109 | 110 | 111 | def softid2hardid(softid): 112 | return pixel_mapping['hardID'][softid] 113 | 114 | 115 | def hardid2chid(hardid): 116 | crate = hardid // 1000 117 | board = (hardid // 100) % 10 118 | patch = (hardid // 10) % 10 119 | pixel = (hardid % 10) 120 | return pixel + 9 * patch + 36 * board + 360 * crate 121 | 122 | 123 | CHID_2_SOFTID = np.empty(1440, dtype=int) 124 | for softid in range(1440): 125 | hardid = pixel_mapping['hardID'][softid] 126 | chid = hardid2chid(hardid) 127 | CHID_2_SOFTID[chid] = softid 128 | 129 | 130 | def chid2softid(chid): 131 | return CHID_2_SOFTID[chid] 132 | 133 | 134 | def hardid2softid(hardid): 135 | return chid2softid(hardid2chid(hardid)) 136 | 137 | 138 | def get_pixel_coords(): 139 | ''' 140 | Calculate the pixel coordinates from the standard pixel-map file 141 | by default it gets rotated by 90 degrees clockwise to show the same 142 | orientation as MARS and fact-tools 143 | ''' 144 | df = get_pixel_dataframe() 145 | 146 | return df.x.values, df.y.values 147 | 148 | 149 | @lru_cache(maxsize=1) 150 | def bias_to_trigger_patch_map(): 151 | by_chid = pixel_mapping['hardID'].argsort() 152 | 153 | bias_channel = pixel_mapping[by_chid]['HV_B'] * 32 + pixel_mapping[by_chid]['HV_C'] 154 | 155 | _, idx = np.unique(bias_channel, return_index=True) 156 | 157 | return bias_channel[np.sort(idx)] 158 | 159 | 160 | def combine_bias_patch_current_to_trigger_patch_current(bias_patch_currents): 161 | """ 162 | For this to work, you need to know that the calibrated currents in FACT 163 | which are delivered by the program FEEDBACK for all 320 bias patches 164 | are given as "uA per pixel per bias patch", not as "uA per bias patch" 165 | So if you want to combine these currents into one value given as: 166 | "uA per trigger patch" or "uA per pixel per trigger patch", you need to know 167 | the number of pixels in a bias patch. 168 | 169 | Luckily this is given by our patch_indices() DataFrame 170 | """ 171 | 172 | pi = patch_indices() 173 | fourers = pi[pi.bias_patch_size == 4].sort_values('trigger_patch_id') 174 | fivers = pi[pi.bias_patch_size == 5].sort_values('trigger_patch_id') 175 | 176 | b_c = bias_patch_currents # just to shorten the name 177 | t_c = ( 178 | b_c[fourers.bias_patch_id.values] * 4 / 9 179 | + b_c[fivers.bias_patch_id.values] * 5 / 9 180 | ) 181 | trigger_patch_currents = t_c # unshorten the name 182 | return trigger_patch_currents 183 | 184 | 185 | def take_apart_trigger_values_for_bias_patches(trigger_rates): 186 | """ 187 | Assumption is you have 160 values from trigger patches, 188 | such as the trigger rates per patch, or the trigger threshold per patch 189 | or whatever else you cant find per patch. 190 | 191 | And for some reason you say: Well this is valid for the entire trigger 192 | patch really, but I believe it also correlates with the bias patch 193 | """ 194 | pi = patch_indices().sort_values('bias_patch_id') 195 | 196 | return trigger_rates[pi.trigger_patch_id.values] 197 | 198 | 199 | @lru_cache(maxsize=1) 200 | def get_neighbor_matrix(): 201 | ''' 202 | Returns a sparse boolean neighbor matrix with n[chid, other_chid] = is neighbor. 203 | ''' 204 | xy = get_pixel_dataframe().loc[:, ['x', 'y']].values 205 | tree = cKDTree(xy) 206 | neighbors = tree.query_ball_tree(tree, r=10) 207 | 208 | n_neighbors = [len(n) for n in neighbors] 209 | col = np.repeat(np.arange(N_PIXEL), n_neighbors) 210 | row = [pix for n in neighbors for pix in n] 211 | data = np.ones(len(row)) 212 | m = csr_matrix((data, (row, col)), shape=(N_PIXEL, N_PIXEL), dtype=bool) 213 | m.setdiag(False) 214 | return m 215 | 216 | 217 | @lru_cache(maxsize=1) 218 | def get_num_neighbors(): 219 | ''' 220 | Return a numpy array with the number of neighbors for each pixel in chid order 221 | ''' 222 | return get_neighbor_matrix().sum(axis=0).A1 223 | 224 | 225 | @lru_cache() 226 | def get_border_pixel_mask(width=1): 227 | if width == 1: 228 | return get_num_neighbors() < 6 229 | 230 | n = get_neighbor_matrix().todense().A 231 | 232 | return (n & get_border_pixel_mask(width - 1)).any(axis=1) 233 | -------------------------------------------------------------------------------- /fact/instrument/constants.py: -------------------------------------------------------------------------------- 1 | ''' 2 | Constants describing the FACT telescope 3 | ''' 4 | import numpy as np 5 | from astropy.coordinates import EarthLocation 6 | import astropy.units as u 7 | 8 | N_PIXEL = 1440 9 | #: The inner diameter of the hexagonal pixels in mm. 10 | #: This is also the grid constant of the hex grid. 11 | PIXEL_SPACING_MM = 9.5 12 | FOCAL_LENGTH_MM = 4889 13 | #:The segmented imaging reflector of FACT is well described using the thin lens 14 | #:equation. However, the most prominent deviation from the thin lens, is the 15 | #:imaging reflectors pincushin distortion. The pincushin distortion projects 16 | #:incomning light further away from the optical axis as it is expected from the 17 | #:thin lens equation. The additional outward pin-cushin distortion gets stronger 18 | #:the further the distance to the optical axis is. 19 | #: 20 | #:``actual_projection_angle = (1 + PINCUSHION_DISTORTION_SLOPE) * thin_lens_prediction_angle`` 21 | #: 22 | #:Example: 23 | #:According to the tĥin lens model one expects incoming light of 1.5 deg 24 | #:incident angle relative to the optical axis to be projected in: 25 | #: 26 | #:``tan(1.5deg)*focal_length = 128.02mm`` 27 | #: 28 | #:distance to the optical axis on the image sensor screen. 29 | #:But the reflector actually projects this incoming light further to the 30 | #:outside to: 31 | #: 32 | #:``128.02mm * (1 + PINCUSHION_DISTORTION_SLOPE) = 130.67mm`` 33 | #: 34 | #:As one can see, the correction is minor. It is only half a pixel at the outer 35 | #:rim of the field of view. 36 | #: 37 | #:.. image:: figures/pincushin_distortion_slope.png 38 | PINCUSHION_DISTORTION_SLOPE = 0.031/1.5 39 | LATITUDE_DEC_DEG = 28.7616 #: FACT's latitude in decimal degrees 40 | LONGITUDE_DEC_DEG = -17.8911 #: FACT's longitude in decimal degrees 41 | ALTITUDE_ASL_M = 2200 #: FACT's altitude above sea level in meters 42 | FOCAL_LENGTH_MM = 4889 #: FACT's reflector focal length in mm. 43 | #: Field of view of a single pixel in decimal degrees 44 | FOV_PER_PIXEL_DEG = np.rad2deg(2 * np.arctan(0.5 * PIXEL_SPACING_MM / FOCAL_LENGTH_MM)) 45 | 46 | #: astropy.coordinates.EarthLocation for the fact position 47 | LOCATION = EarthLocation( 48 | lat=LATITUDE_DEC_DEG * u.deg, lon=LONGITUDE_DEC_DEG * u.deg, 49 | height=ALTITUDE_ASL_M * u.m 50 | ) 51 | -------------------------------------------------------------------------------- /fact/instrument/trigger.py: -------------------------------------------------------------------------------- 1 | ''' 2 | The varoius trigger types of the FACT camera. 3 | 4 | For a full overview of the FACT trigger types, see the 5 | [Phd of Patrick Vogler, table 4.3.b, page 93] 6 | (http://e-collection.library.ethz.ch/eserv/eth:48381/eth-48381-02.pdf) 7 | 8 | The trigger type IDs also encode the N-out-of-4 trigger patten logic, but here 9 | are only trigger type IDs for N=1 as it is the most common. 10 | ''' 11 | 12 | #: Self triggered, these events are likely to contain photon clusters. 13 | PHYSICS = 4 14 | #: These events are likely to contain only night sky background photons. 15 | PEDESTAL = 1024 16 | #: The external light pulser which is located in the center of the reflector 17 | #: dish. 18 | LIGHT_PULSER_EXTERNAL = 260 19 | #: The internal lightpulser was deactivated in May 2014. 20 | LIGHT_PULSER_INTERNAL = 512 21 | #: Not sure, seems to be for the DRS4 time calibration. 22 | TIME_CALIBRATION = 33792 23 | #: These events are likely to contain only night sky background photons. 24 | #: Here the GPS module is used as reference clock. The GPS is sometimes 25 | #: connected to EXT1 and sometimes to EXT2. 26 | EXT1 = 1 27 | #: See EXT1. 28 | EXT2 = 2 -------------------------------------------------------------------------------- /fact/path.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import re 3 | 4 | __all__ = [ 5 | 'parse' 6 | 'template_to_path', 7 | 'tree_path', 8 | ] 9 | 10 | 11 | def template_to_path(night, run, template, **kwargs): 12 | '''Make path from template and (night, run) using kwargs existing. 13 | 14 | night: int 15 | e.g. night = 20160102 16 | is used to create Y,M,D,N template values as: 17 | Y = "2016" 18 | M = "01" 19 | D = "02" 20 | N = "20160101" 21 | run: int or None 22 | e.g. run = 1 23 | is used to create template value R = "001" 24 | template: string 25 | e.g. "/foo/bar/{Y}/baz/{R}_{M}_{D}.gz.{N}" 26 | kwargs: 27 | if template contains other place holders than Y,M,D,N,R 28 | kwargs are used to format these. 29 | ''' 30 | night = '{:08d}'.format(night) 31 | if run is not None: 32 | kwargs['R'] = '{:03d}'.format(run) 33 | 34 | kwargs['N'] = night 35 | kwargs['Y'] = night[0:4] 36 | kwargs['M'] = night[4:6] 37 | kwargs['D'] = night[6:8] 38 | return template.format(**kwargs) 39 | 40 | 41 | def tree_path(night, run, prefix, suffix): 42 | '''Make a tree_path from a (night, run) for given prefix, suffix 43 | 44 | night: int 45 | eg. 20160101 46 | run: int or None 47 | eg. 11 48 | prefix: string 49 | eg. '/fact/raw' or '/fact/aux' 50 | suffix: string 51 | eg. '.fits.fz' or '.log' or '.AUX_FOO.fits' 52 | ''' 53 | if run is not None: 54 | base_name = '{N}_{R}' 55 | else: 56 | base_name = '{N}' 57 | 58 | template = os.path.join( 59 | prefix, 60 | '{Y}', 61 | '{M}', 62 | '{D}', 63 | base_name + suffix) 64 | return template_to_path(night, run, template) 65 | 66 | 67 | path_regex = re.compile( 68 | r'(?P.*?)' + 69 | r'((/\d{4})(/\d{2})(/\d{2}))?/' + 70 | r'(?P\d{8})' + 71 | r'(_?(?P\d{3}))?' + 72 | r'(?P.*)' 73 | ) 74 | 75 | 76 | def parse(path): 77 | '''Return a dict with {prefix, suffix, night, run} parsed from path. 78 | 79 | path: string 80 | any (absolute) path should be fine. 81 | ''' 82 | d = path_regex.match(path).groupdict() 83 | if d['run'] is not None: 84 | d['run'] = int(d['run']) 85 | d['night'] = int(d['night']) 86 | return d 87 | -------------------------------------------------------------------------------- /fact/plotting/__init__.py: -------------------------------------------------------------------------------- 1 | ''' 2 | This module contains functions to plot fact data into the camera view. 3 | 4 | The Viewer class starts a GUI with tkinter, that let's you click through 5 | events. You will only have access to the Viewer if you have installed 6 | matplotlib with tcl/tk support 7 | 8 | Currently these functions only work with shape (num_events, 1440), so 9 | on a pixel bases 10 | ''' 11 | from .core import mark_pixel, camera, pixelids 12 | from .analysis import plot_excess_rate 13 | 14 | __all__ = [ 15 | 'camera', 16 | 'mark_pixel', 17 | 'pixelids', 18 | 'plot_excess_rate', 19 | ] 20 | -------------------------------------------------------------------------------- /fact/plotting/analysis.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | from matplotlib.dates import DateFormatter 3 | import matplotlib.transforms as mtransforms 4 | from mpl_toolkits.axes_grid1.parasite_axes import SubplotHost 5 | import numpy as np 6 | from ..time import MJD_EPOCH 7 | 8 | 9 | # Matplotlib uses ordinal for internal date represantion 10 | # to get from ordinal to mjd, we shift by the ordinal value 11 | # of the MJD_EPOCH 12 | MJD_AXES_TRANSFORM = ( 13 | mtransforms.Affine2D().translate(MJD_EPOCH.toordinal(), 0) 14 | ) 15 | 16 | 17 | def create_datetime_mjd_axes(fig=None, *args, **kwargs): 18 | ''' 19 | Create a plot with two x-axis, bottom axis using 20 | dates, top axis using mjd. 21 | 22 | Parameters 23 | ---------- 24 | fig: matplotlib.Figure or None 25 | the figure to use, if None use plt.gcf() 26 | 27 | Returns 28 | ------- 29 | ax: mpl_toolkits.axes_grid1.parasite_axes.SubplotHost 30 | The ax for the dates 31 | mjd_ax: mpl_toolkits.axes_grid1.parasite_axes.ParasiteAxis 32 | The axis with the mjd axis 33 | 34 | ''' 35 | if fig is None: 36 | fig = plt.gcf() 37 | 38 | if args == []: 39 | ax = SubplotHost(fig, 1, 1, 1, **kwargs) 40 | else: 41 | ax = SubplotHost(fig, *args, **kwargs) 42 | 43 | # The second axis shows MJD if the first axis uses dates 44 | mjd_ax = ax.twin(MJD_AXES_TRANSFORM) 45 | mjd_ax.set_viewlim_mode('transform') 46 | 47 | # disable unwanted axes 48 | mjd_ax.axis['right'].toggle(ticklabels=False, ticks=False) 49 | mjd_ax.axis['bottom'].toggle(ticklabels=False, ticks=False) 50 | mjd_ax.axis['bottom'].toggle(label=False) 51 | 52 | # add/remove label 53 | mjd_ax.axis['top'].set_label('MJD') 54 | 55 | # Deactivate offset 56 | mjd_ax.ticklabel_format(useOffset=False) 57 | 58 | fig.add_subplot(ax) 59 | 60 | return ax, mjd_ax 61 | 62 | 63 | def plot_excess_rate(binned_runs, outputfile=None, mjd=True): 64 | ''' 65 | Create an excess rate plot from given data 66 | 67 | Parameters 68 | ---------- 69 | binned_runs: pd.DataFrame 70 | Binned data as returned by `fact.analysis.bin_runs` 71 | outputfile: path 72 | if not None, the plot is saved to this file 73 | 74 | Returns 75 | ------- 76 | ax_excess: matplotlib.axes.Axes 77 | the matplotlib Axes for the excess rate plot 78 | ax_significance: matplotlib.axes.Axes 79 | the matplotlib Axes for the significance plot 80 | ''' 81 | fig = plt.figure() 82 | 83 | gridspec = plt.GridSpec(8, 1) 84 | spec_sig = gridspec.new_subplotspec((6, 0), rowspan=2) 85 | spec_rate = gridspec.new_subplotspec((1, 0), rowspan=5) 86 | 87 | ax_sig = plt.subplot(spec_sig) 88 | 89 | if mjd is True: 90 | ax_rate, ax_rate_mjd = create_datetime_mjd_axes( 91 | fig, spec_rate, sharex=ax_sig 92 | ) 93 | else: 94 | ax_rate = plt.subplot(spec_rate, sharex=ax_sig) 95 | 96 | colors = [e['color'] for e in plt.rcParams['axes.prop_cycle']] 97 | 98 | labels = [] 99 | plots = [] 100 | 101 | groups = list(sorted( 102 | binned_runs.groupby('source'), 103 | key=lambda g: g[1].time_mean.min() 104 | )) 105 | 106 | for (name, group), color in zip(groups, colors): 107 | if len(group.index) == 0: 108 | continue 109 | 110 | labels.append(name) 111 | plots.append(ax_rate.errorbar( 112 | x=group.time_mean.values, 113 | y=group.excess_rate_per_h.values, 114 | xerr=group.time_width.values / 2, 115 | yerr=group.excess_rate_err.values, 116 | label=name, 117 | linestyle='', 118 | mec='none', 119 | color=color, 120 | )) 121 | 122 | ax_sig.errorbar( 123 | x=group.time_mean.values, 124 | y=group.significance.values, 125 | xerr=group.time_width.values / 2, 126 | label=name, 127 | linestyle='', 128 | mec='none', 129 | color=color, 130 | ) 131 | 132 | fig.legend( 133 | plots, 134 | labels, 135 | loc='upper center', 136 | ncol=3, 137 | columnspacing=0.5, 138 | numpoints=1, 139 | handletextpad=0.1, 140 | bbox_to_anchor=[0.5, 0.99], 141 | ) 142 | 143 | ax_rate.set_ylabel('Excess Event Rate / $\mathrm{h}^{-1}$') 144 | ax_sig.set_ylabel('$S_{\mathrm{Li/Ma}} \,\, / \,\, \sigma$') 145 | 146 | ymax = max(3.25, np.ceil(ax_sig.get_ylim()[1])) 147 | ax_sig.set_ylim(0, ymax) 148 | ax_sig.set_yticks(np.arange(0, ymax + 0.1, ymax // 4 + 1)) 149 | 150 | plt.setp(ax_rate.get_xticklabels(), visible=False) 151 | plt.setp(ax_rate_mjd.get_xticklabels(), visible=False) 152 | plt.setp(ax_sig.get_xticklabels(), rotation=30, va='top', ha='right') 153 | 154 | ax_rate_mjd.set_xlabel('') 155 | 156 | if binned_runs.night.nunique() <= 7: 157 | ax_sig.xaxis.set_major_formatter(DateFormatter('%Y-%m-%d %H:%M')) 158 | else: 159 | ax_sig.xaxis.set_major_formatter(DateFormatter('%Y-%m-%d')) 160 | 161 | fig.tight_layout() 162 | 163 | if mjd is True: 164 | plt.setp(ax_rate_mjd.get_xticklabels(), visible=True) 165 | 166 | if outputfile is not None: 167 | fig.savefig(outputfile) 168 | 169 | if mjd is True: 170 | return ax_rate, ax_sig, ax_rate_mjd 171 | else: 172 | return ax_rate, ax_sig 173 | -------------------------------------------------------------------------------- /fact/plotting/core.py: -------------------------------------------------------------------------------- 1 | from matplotlib.patches import RegularPolygon 2 | from matplotlib.collections import PatchCollection 3 | 4 | import numpy as np 5 | import matplotlib.pyplot as plt 6 | 7 | from ..instrument import get_pixel_coords 8 | from .utils import calc_linewidth, calc_text_size 9 | 10 | lastpixel = -1 11 | 12 | 13 | def onpick(event): 14 | global lastpixel 15 | hitpixel = event.ind[0] 16 | if hitpixel != lastpixel: 17 | lastpixel = hitpixel 18 | plot = event.artist 19 | 20 | ecols = plot.get_edgecolors() 21 | before = np.array(ecols[hitpixel]) 22 | ecols[hitpixel] = [1, 0, 0, 1] 23 | plot.set_edgecolors(ecols) 24 | plt.draw() 25 | ecols[hitpixel] = before 26 | 27 | print('chid:', hitpixel) 28 | print('value', plot.get_array()[hitpixel]) 29 | 30 | 31 | def camera( 32 | data, 33 | ax=None, 34 | cmap='gray', 35 | vmin=None, 36 | vmax=None, 37 | pixelcoords=None, 38 | edgecolor='k', 39 | linewidth=None, 40 | picker=False, 41 | ): 42 | ''' 43 | Parameters 44 | ---------- 45 | 46 | data : array like with shape 1440 47 | the data you want to plot into the pixels 48 | ax : a matplotlib.axes.Axes instace or None 49 | The matplotlib axes in which to plot. If None, plt.gca() is used 50 | cmap : str or matplotlib colormap instance 51 | the colormap to use for plotting the 'dataset' 52 | [default: gray] 53 | vmin : float 54 | the minimum for the colorbar, if None min(data) is used 55 | [default: None] 56 | vmax : float 57 | the maximum for the colorbar, if None max(data) is used 58 | [default: None] 59 | pixelcoords : the coordinates for the pixels in form [x-values, y-values] 60 | if None, the package resource is used 61 | [default: None] 62 | edgecolor : any matplotlib color 63 | the color around the pixel 64 | picker: bool 65 | if True then the the pixel are made clickable to show information 66 | ''' 67 | 68 | if ax is None: 69 | ax = plt.gca() 70 | 71 | ax.set_aspect('equal') 72 | 73 | if picker is True: 74 | fig = ax.get_figure() 75 | fig.canvas.mpl_connect('pick_event', onpick) 76 | 77 | # if the axes limit is still (0,1) assume new axes 78 | if ax.get_xlim() == (0, 1) and ax.get_ylim() == (0, 1): 79 | ax.set_xlim(-200, 200) 80 | ax.set_ylim(-200, 200) 81 | 82 | if pixelcoords is None: 83 | pixel_x, pixel_y = get_pixel_coords() 84 | else: 85 | pixel_x, pixel_y = pixelcoords 86 | 87 | if vmin is None: 88 | vmin = np.min(data) 89 | if vmax is None: 90 | vmax = np.max(data) 91 | 92 | edgecolors = np.array(1440 * [edgecolor]) 93 | patches = [] 94 | for x, y, ec in zip(pixel_x, pixel_y, edgecolors): 95 | patches.append( 96 | RegularPolygon( 97 | xy=(x, y), 98 | numVertices=6, 99 | radius=9.51/np.sqrt(3), 100 | orientation=0., # in radians 101 | ) 102 | ) 103 | 104 | if linewidth is None: 105 | linewidth = calc_linewidth(ax=ax) 106 | 107 | collection = PatchCollection(patches, picker=0) 108 | collection.set_linewidth(linewidth) 109 | collection.set_edgecolors(edgecolors) 110 | collection.set_cmap(cmap) 111 | collection.set_array(data) 112 | collection.set_clim(vmin, vmax) 113 | 114 | ax.add_collection(collection) 115 | plt.draw_if_interactive() 116 | return collection 117 | 118 | 119 | def pixelids(ax=None, size=None, pixelcoords=None, *args, **kwargs): 120 | ''' 121 | plot the chids into the pixels 122 | ''' 123 | if pixelcoords is None: 124 | pixel_x, pixel_y = get_pixel_coords() 125 | else: 126 | pixel_x, pixel_y = pixelcoords 127 | 128 | if ax is None: 129 | ax = plt.gca() 130 | 131 | if size is None: 132 | size = calc_text_size(ax) 133 | 134 | x1, x2 = ax.get_xlim() 135 | y1, y2 = ax.get_ylim() 136 | 137 | maskx = np.logical_and(pixel_x + 4.5 < x2, pixel_x - 4.5 > x1) 138 | masky = np.logical_and(pixel_y + 4.5 < y2, pixel_y - 4.5 > y1) 139 | mask = np.logical_and(maskx, masky) 140 | 141 | chids = np.arange(1440) 142 | for px, py, chid in zip(pixel_x[mask], pixel_y[mask], chids[mask]): 143 | ax.text( 144 | px, py, 145 | str(chid), 146 | size=size, 147 | va='center', 148 | ha='center', 149 | **kwargs 150 | ) 151 | 152 | plt.draw_if_interactive() 153 | 154 | 155 | def mark_pixel(pixels, color='g', ax=None, linewidth=None): 156 | ''' surrounds pixels given by pixels with a border ''' 157 | pixel_x, pixel_y = get_pixel_coords() 158 | 159 | if ax is None: 160 | ax = plt.gca() 161 | 162 | patches = [] 163 | for xy in zip(pixel_x[pixels], pixel_y[pixels]): 164 | patches.append( 165 | RegularPolygon( 166 | xy=xy, 167 | numVertices=6, 168 | radius=9.5 / np.sqrt(3), 169 | orientation=0., # in radians 170 | fill=False, 171 | ) 172 | ) 173 | 174 | if linewidth is None: 175 | linewidth = calc_linewidth(ax=ax) 176 | 177 | collection = PatchCollection(patches, picker=0) 178 | collection.set_linewidth(linewidth) 179 | collection.set_edgecolors(color) 180 | collection.set_facecolor('none') 181 | 182 | ax.add_collection(collection) 183 | 184 | plt.draw_if_interactive() 185 | return collection 186 | -------------------------------------------------------------------------------- /fact/plotting/utils.py: -------------------------------------------------------------------------------- 1 | import matplotlib.pyplot as plt 2 | 3 | __all__ = ['calc_linewidth', 'calc_text_size'] 4 | 5 | 6 | def calc_linewidth(ax=None): 7 | """ 8 | calculate the correct linewidth for the fact pixels, 9 | so that the patches fit nicely together 10 | 11 | Arguments 12 | --------- 13 | ax : matplotlib Axes instance 14 | the axes you want to calculate the size for 15 | 16 | Returns 17 | ------- 18 | linewidth : float 19 | """ 20 | 21 | if ax is None: 22 | ax = plt.gca() 23 | 24 | fig = ax.get_figure() 25 | 26 | bbox = ax.get_window_extent().transformed(fig.dpi_scale_trans.inverted()) 27 | width, height = bbox.width, bbox.height 28 | 29 | x1, x2 = ax.get_xlim() 30 | y1, y2 = ax.get_ylim() 31 | 32 | x_stretch = (x2 - x1) / 400 33 | y_stretch = (y2 - y1) / 400 34 | 35 | linewidth = min(width / x_stretch, height / y_stretch) / 10 36 | return linewidth 37 | 38 | 39 | def calc_text_size(ax=None): 40 | if ax is None: 41 | ax = plt.gca() 42 | linewidth = calc_linewidth(ax) 43 | 44 | textsize = linewidth*5 45 | 46 | return textsize 47 | -------------------------------------------------------------------------------- /fact/qla.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | from .factdb import ( 4 | read_into_dataframe, 5 | AnalysisResultsRunLP as QLA, 6 | RunInfo, 7 | Source 8 | ) 9 | 10 | 11 | def get_qla_data( 12 | first_night=None, 13 | last_night=None, 14 | sources=None, 15 | database_engine=None 16 | ): 17 | ''' 18 | Request QLA results from our database 19 | 20 | first_night: int or None 21 | If given, first night to query as FACT night integer. 22 | last_night: int or None 23 | If given, last night to query as FACT night integer. 24 | sources: iterable[str] 25 | If given, only these sources will be requested. 26 | Names have to match Source.fSourceName in our db. 27 | database_engine: sqlalchmey.Engine 28 | If given, the connection to use for the query. 29 | Else, `fact.credentials.create_factdb_engine` will be used to create it. 30 | ''' 31 | 32 | query = QLA.select( 33 | QLA.frunid.alias('run_id'), 34 | QLA.fnight.alias('night'), 35 | QLA.fnumexcevts.alias('n_excess'), 36 | QLA.fnumsigevts.alias('n_on'), 37 | (QLA.fnumbgevts * 5).alias('n_off'), 38 | QLA.fontimeaftercuts.alias('ontime'), 39 | RunInfo.frunstart.alias('run_start'), 40 | RunInfo.frunstop.alias('run_stop'), 41 | Source.fsourcename.alias('source'), 42 | Source.fsourcekey.alias('source_key'), 43 | ) 44 | 45 | on = (RunInfo.fnight == QLA.fnight) & (RunInfo.frunid == QLA.frunid) 46 | query = query.join(RunInfo, on=on) 47 | query = query.join(Source, on=RunInfo.fsourcekey == Source.fsourcekey) 48 | 49 | if first_night is not None: 50 | query = query.where(QLA.fnight >= first_night) 51 | if last_night is not None: 52 | query = query.where(QLA.fnight <= last_night) 53 | 54 | if sources is not None: 55 | query = query.where(Source.fsourcename.in_(sources)) 56 | 57 | runs = read_into_dataframe(query, engine=database_engine) 58 | 59 | # drop rows with NaNs from the table, these are unfinished qla results 60 | runs.dropna(inplace=True) 61 | 62 | runs.sort_values('run_start', inplace=True) 63 | 64 | return runs 65 | -------------------------------------------------------------------------------- /fact/resources/known_problems_from_trac.txt: -------------------------------------------------------------------------------- 1 | = New Bad Pixel = 2 | 3 | For small time periods, two other pixels showed a wrong signal (seems to be the underflow value of the ADC) 4 | 5 | || Soft ID || continuous Hard ID || Bias Ch(B/C) || Time Period || 6 | || || || || || 7 | || || 729 || || only on 8.1.2015 || 8 | || || 750 || || 8.1.2015 - 31.1.2015 || 9 | 10 | Logbook entry of 8.1.2015: 11 | https://www.fact-project.org/logbook/showthread.php?tid=2943 12 | 13 | == Patch with slightly higher rate 14 | 15 | * 2|5|3 aka 103 aka 40 16 | 17 | The current readout of bias channel 262 is identical to the readout of 263. Apart from that, it seems to work fine. 18 | 19 | == Broken Drs Board == 20 | 21 | first occurrence: 2014/11/15[[BR]] 22 | repaired: 2015/05/26[[BR]] 23 | 24 | pixels in affected board: [[BR]] 25 | SOFTIDs: 1193 1194 1195 1391 1392 1393 1304 1305 1306[[BR]] 26 | CHIDs: 720, 721, 722, 723, 724, 725, 726, 727, 728 (not the same order as the SOFTIDs!) 27 | 28 | Important threads: 29 | 30 | https://www.fact-project.org/logbook/showthread.php?tid=3521 31 | 32 | == Dead (and suspicious bias channels) == 33 | 34 | With beginning of February 2015 two bias patches got some problems. 35 | I spot-checked the voltage curves (at interleaved lightpulser events) of the pixels and got two different behaviours: 36 | 37 | - Sometimes the pixels showed no signal at all, except for electronic noise. 38 | - Sometimes the pixels showed lowered signals. 39 | 40 | || Patch CHIDs || no Bias voltage || lowered bias voltage || normal signal || comments || 41 | || || || || || 42 | || 171,172,173,174 || 6.2.-11.2. ; 16.2. - now || 12.2. - 14.2. || beginning - 31.1. || checked one day each month, until 2.11. || 43 | || 184,185,186,187,188 || no run found || 11.2.-13.2. ; 16.2. ; 20.2. ; 15.3. ; 20.5. ; 15.8. ; 2.11. || beginning - 15.2. ; 22.2. ; 23.2. ; 26.2. ; 10.4. ; 15.6. ; 15.7. ; 15.9. ; 14.10. || checked one day each month, until 2.11. || 44 | 45 | Dominik showed the bias voltage of the two suspicious channels in this thread: 46 | 47 | https://www.fact-project.org/logbook/showthread.php?tid=3564&pid=19495 48 | 49 | The following two telcons have had the bias patches on the agenda: 50 | 51 | 09.09.2015: https://www.fact-project.org/logbook/showthread.php?tid=3568 52 | 53 | 23.09.2015: https://www.fact-project.org/logbook/showthread.php?tid=3583 54 | 55 | 56 | Since 13.03.2016: 57 | 58 | * bias channel 272 (i.e. board 8, channel 16) has a shortcut ( approx 1.3 k ohms) 59 | 60 | -------------------------------------------------------------------------------- /fact/time.py: -------------------------------------------------------------------------------- 1 | ''' 2 | This module contains functions to deal with several time formats 3 | in astronomy in general and for the FACT telescope in particular. 4 | 5 | FACT uses a Modified Julian Date with an epoch of 1970-01-01T00:00Z, 6 | same as unix time. We will call this FJD. 7 | Most of the time, e.g. in aux fits files, a column called `Time` will use 8 | this modified julian date and also give the reference in the fits header 9 | keyword `MJDREF = 40587`, unit as `TIMEUNIT = d` and `TIMESYS = UTC`. 10 | ''' 11 | from datetime import datetime, timedelta, timezone 12 | import warnings 13 | 14 | import dateutil 15 | import dateutil.parser 16 | import numpy as np 17 | 18 | import pandas as pd 19 | 20 | OFFSET = (datetime(1970, 1, 1) - datetime(1, 1, 1)).days 21 | 22 | UNIX_EPOCH = datetime(1970, 1, 1, 0, 0, tzinfo=timezone.utc) 23 | MJD_EPOCH = datetime(1858, 11, 17, 0, 0, tzinfo=timezone.utc) 24 | 25 | 26 | def datetime_to_numpy(dt): 27 | ''' 28 | Convert a python datetime object to numpy.datetime64 29 | ''' 30 | return np.array(dt.timestamp()).astype('datetime64[s]') 31 | 32 | 33 | def unixtime_to_mjd(unixtime): 34 | ''' 35 | Convert a unix timestamp to mjd 36 | ''' 37 | return (unixtime - (MJD_EPOCH - UNIX_EPOCH).total_seconds()) / 3600 / 24 38 | 39 | 40 | def mjd_to_unixtime(mjd): 41 | ''' 42 | Convert an mjd timestamp to unix 43 | ''' 44 | return (mjd + (MJD_EPOCH - UNIX_EPOCH).total_seconds()) * 3600 * 24 45 | 46 | 47 | def datetime_to_mjd(dt, epoch=MJD_EPOCH): 48 | ''' 49 | Convert a datetime to julian date float. 50 | This function can handle python dates, numpy arrays and pandas Series. 51 | 52 | Parameters 53 | ---------- 54 | dt: datetime, np.ndarray[datetime64], pd.DateTimeIndex, pd.Series[datetime64] 55 | The datetime object to convert to mjd 56 | epoch: datetime 57 | The epoch, default is classic MJD (1858-11-17T00:00) 58 | ''' 59 | # handle numpy arrays 60 | if isinstance(dt, np.ndarray): 61 | jd_ns = (dt - datetime_to_numpy(epoch)).astype('timedelta64[ns]').astype(float) 62 | return jd_ns / 1e9 / 3600 / 24 63 | 64 | # assume datetimes without timezone are utc 65 | if isinstance(dt, datetime): 66 | if dt.tzinfo is None: 67 | dt = dt.replace(tzinfo=timezone.utc) 68 | elif isinstance(dt, pd.DatetimeIndex): 69 | if dt.tz is None: 70 | dt = dt.tz_localize(timezone.utc) 71 | elif isinstance(dt, pd.Series): 72 | if dt.dt.tz is None: 73 | dt = dt.dt.tz_localize(timezone.utc) 74 | return (dt - epoch).dt.total_seconds() / 24 / 3600 75 | 76 | return (dt - epoch).total_seconds() / 24 / 3600 77 | 78 | 79 | def mjd_to_datetime(jd, epoch=MJD_EPOCH): 80 | ''' 81 | Convert a julian date float to datetime. 82 | This function can handle python int, float, numpy arrays and pandas Series. 83 | 84 | Parameters 85 | ---------- 86 | dt: int, float, np.ndarray, pd.Series 87 | The datetime object to convert to mjd 88 | epoch: datetime 89 | The epoch, default is classic MJD (1858-11-17T00:00) 90 | ''' 91 | if isinstance(jd, (int, float)): 92 | delta = timedelta(microseconds=jd * 24 * 3600 * 1e6) 93 | return epoch + delta 94 | 95 | if isinstance(jd, pd.Series): 96 | delta = (jd * 24 * 3600 * 1e9).astype('timedelta64[ns]') 97 | return epoch + delta 98 | 99 | # other types will be returned as numpy array if possible 100 | jd = np.asanyarray(jd) 101 | delta = (jd * 24 * 3600 * 1e9).astype('timedelta64[ns]') 102 | return datetime_to_numpy(epoch) + delta 103 | 104 | 105 | def fjd_to_datetime(fjd): 106 | ''' 107 | Convert a FACT julian date float to datetime, epoch is 1970-01-01T00:00Z 108 | This function can handle python int, float, numpy arrays and pandas Series. 109 | ''' 110 | return mjd_to_datetime(fjd, epoch=UNIX_EPOCH) 111 | 112 | 113 | def datetime_to_fjd(dt): 114 | ''' 115 | Convert a datetime to FACT julian date float, epoch is 1970-01-01T00:00Z. 116 | This function can handle python dates, numpy arrays and pandas Series. 117 | ''' 118 | return datetime_to_mjd(dt, epoch=UNIX_EPOCH) 119 | 120 | 121 | def iso_to_datetime(iso): 122 | ''' 123 | parse iso8601 to timezone aware datetime instance, 124 | if timezone specification is missing, UTC is assumed. 125 | ''' 126 | if isinstance(iso, (bytes, bytearray)): 127 | iso = iso.decode('ascii') 128 | 129 | if isinstance(iso, str): 130 | dt = dateutil.parser.parse(iso) 131 | if dt.tzinfo is None: 132 | dt = dt.replace(tzinfo=dateutil.tz.tzutc()) 133 | return dt 134 | 135 | if isinstance(iso, pd.Series): 136 | if isinstance(iso.iloc[0], bytes): 137 | iso = iso.str.decode('ascii') 138 | 139 | return pd.Series(pd.to_datetime(iso)) 140 | 141 | 142 | def to_night(timestamp=None): 143 | ''' 144 | gives the date for a day change at noon instead of midnight 145 | ''' 146 | if timestamp is None: 147 | timestamp = datetime.utcnow() 148 | if timestamp.hour < 12: 149 | timestamp = timestamp - timedelta(days=1) 150 | return timestamp 151 | 152 | 153 | def to_night_int(date): 154 | ''' 155 | return FACT night integer for date 156 | ''' 157 | date -= timedelta(days=0.5) 158 | if isinstance(date, pd.Series): 159 | return date.dt.year * 10000 + date.dt.month * 100 + date.dt.day 160 | return date.year * 10000 + date.month * 100 + date.day 161 | -------------------------------------------------------------------------------- /github_deploy_key.enc: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fact-project/pyfact/2f204dbf4a90bfa84899e2b8d6aaed891d2b09bc/github_deploy_key.enc -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | matplotlib >= 1.4 2 | numpy 3 | scipy 4 | -------------------------------------------------------------------------------- /resources/PatchList.txt: -------------------------------------------------------------------------------- 1 | #PatchSoftID ContPatchHardID 2 | 145 1 3 | 125 2 4 | 115 3 5 | 103 4 6 | 117 5 7 | 149 6 8 | 129 7 9 | 158 8 10 | 105 9 11 | 151 10 12 | 81 11 13 | 58 12 14 | 88 13 15 | 79 14 16 | 67 15 17 | 49 16 18 | 131 17 19 | 141 18 20 | 97 19 21 | 69 20 22 | 51 21 23 | 34 22 24 | 25 23 25 | 39 24 26 | 154 25 27 | 107 26 28 | 71 27 29 | 41 28 30 | 27 29 31 | 16 30 32 | 11 31 33 | 4 32 34 | 119 33 35 | 83 34 36 | 53 35 37 | 29 36 38 | 124 37 39 | 90 38 40 | 60 39 41 | 36 40 42 | 18 41 43 | 6 42 44 | 5 43 45 | 0 44 46 | 30 45 47 | 12 46 48 | 17 47 49 | 9 48 50 | 155 49 51 | 120 50 52 | 84 51 53 | 54 52 54 | 132 53 55 | 108 54 56 | 72 55 57 | 42 56 58 | 35 57 59 | 28 58 60 | 50 59 61 | 26 60 62 | 142 61 63 | 98 62 64 | 70 63 65 | 52 64 66 | 152 65 67 | 106 66 68 | 82 67 69 | 59 68 70 | 150 69 71 | 118 70 72 | 130 71 73 | 89 72 74 | 80 73 75 | 68 74 76 | 104 75 77 | 93 76 78 | 159 77 79 | 116 78 80 | 146 79 81 | 126 80 82 | 147 81 83 | 127 82 84 | 113 83 85 | 101 84 86 | 111 85 87 | 135 86 88 | 122 87 89 | 156 88 90 | 99 89 91 | 133 90 92 | 75 91 93 | 56 92 94 | 86 93 95 | 77 94 96 | 47 95 97 | 65 96 98 | 143 97 99 | 137 98 100 | 91 99 101 | 63 100 102 | 45 101 103 | 32 102 104 | 23 103 105 | 40 104 106 | 139 105 107 | 95 106 108 | 61 107 109 | 37 108 110 | 21 109 111 | 14 110 112 | 7 111 113 | 2 112 114 | 109 113 115 | 73 114 116 | 43 115 117 | 19 116 118 | 121 117 119 | 153 118 120 | 85 119 121 | 55 120 122 | 31 121 123 | 13 122 124 | 1 123 125 | 3 124 126 | 20 125 127 | 8 126 128 | 15 127 129 | 10 128 130 | 140 129 131 | 110 130 132 | 74 131 133 | 44 132 134 | 144 133 135 | 96 134 136 | 62 135 137 | 38 136 138 | 33 137 139 | 22 138 140 | 48 139 141 | 24 140 142 | 138 141 143 | 92 142 144 | 64 143 145 | 46 144 146 | 134 145 147 | 100 146 148 | 76 147 149 | 57 148 150 | 136 149 151 | 112 150 152 | 123 151 153 | 87 152 154 | 78 153 155 | 66 154 156 | 102 155 157 | 94 156 158 | 157 157 159 | 114 158 160 | 148 159 161 | 128 160 162 | -------------------------------------------------------------------------------- /resources/Trigger-Patches.txt: -------------------------------------------------------------------------------- 1 | #Trigger Patches (SoftID), sorted by PatchHardID [110610, TPK] 2 | 1348 1419 1249 1349 1420 1250 1350 1421 1251 3 | 1345 1416 1246 1346 1417 1247 1347 1418 1248 4 | 1025 917 1138 1024 916 1137 1023 915 1136 5 | 1022 914 1135 1021 913 1134 1020 912 1133 6 | 1145 1031 1144 1030 922 1029 921 819 920 7 | 1356 1355 1260 1146 1354 1259 1258 1257 1143 8 | 1256 1142 1141 1255 1028 1027 919 1026 918 9 | 1140 1254 1139 1353 1253 1352 1252 1438 1351 10 | 1148 1034 1147 1033 925 1032 924 822 923 11 | 1424 1359 1358 1263 1423 1422 1357 1262 1261 12 | 821 725 820 724 634 723 633 549 632 13 | 548 470 547 469 397 546 468 396 545 14 | 818 722 817 721 631 816 720 630 815 15 | 719 629 814 718 628 813 717 627 812 16 | 716 626 811 715 625 810 714 624 809 17 | 467 395 544 466 394 543 465 393 542 18 | 1365 1429 1364 1363 1268 1362 1267 1266 1152 19 | 1428 1427 1426 1361 1425 1360 1265 1264 1149 20 | 1151 1037 1150 1036 928 1035 927 825 926 21 | 824 728 823 727 637 726 636 552 635 22 | 551 473 550 472 400 471 399 333 398 23 | 332 272 331 271 217 330 270 216 329 24 | 269 215 328 268 214 327 267 213 326 25 | 464 392 541 463 391 540 462 390 539 26 | 1273 1367 1272 1158 1366 1271 1270 1269 1155 27 | 1154 1040 1153 1039 931 1038 930 828 929 28 | 827 731 826 730 640 729 639 555 638 29 | 554 476 553 475 403 474 402 336 401 30 | 335 275 334 274 220 273 219 171 218 31 | 170 128 169 127 91 168 126 90 167 32 | 173 131 172 130 94 129 93 63 92 33 | 62 38 61 37 19 60 36 18 59 34 | 1157 1043 1156 1042 934 1041 933 831 932 35 | 830 734 829 733 643 732 642 558 641 36 | 557 479 556 478 406 477 405 339 404 37 | 338 278 337 277 223 276 222 174 221 38 | 1160 1161 1159 1045 1046 1044 936 937 935 39 | 833 834 832 736 737 735 645 646 644 40 | 560 561 559 481 482 480 408 409 407 41 | 341 342 340 280 281 279 225 226 224 42 | 176 177 175 133 134 132 96 97 95 43 | 65 66 64 40 41 39 21 22 20 44 | 68 69 42 43 44 23 24 45 10 45 | 8 9 7 1 2 6 0 3 5 46 | 344 345 282 283 284 227 228 229 178 47 | 179 180 135 136 137 98 99 100 67 48 | 182 183 138 139 140 101 102 141 70 49 | 103 142 71 104 143 72 105 144 73 50 | 1368 1274 1275 1162 1369 1276 1277 1278 1165 51 | 1163 1164 1047 1048 1049 938 939 940 835 52 | 836 837 738 739 740 647 648 649 562 53 | 563 564 483 484 485 410 411 412 343 54 | 1430 1370 1371 1372 1373 1279 1280 1281 1168 55 | 1166 1167 1050 1051 1052 941 942 943 838 56 | 839 840 741 742 743 650 651 652 565 57 | 566 567 486 487 488 413 414 415 346 58 | 350 351 288 289 290 233 234 291 184 59 | 347 348 285 286 287 230 231 232 181 60 | 421 496 353 422 497 354 423 498 355 61 | 235 292 185 236 293 186 237 294 187 62 | 1431 1432 1433 1374 1434 1375 1282 1283 1171 63 | 1169 1170 1053 1054 1055 944 945 946 841 64 | 842 843 744 745 746 653 654 655 568 65 | 569 570 489 490 491 416 417 418 349 66 | 1435 1376 1377 1284 1436 1437 1378 1285 1286 67 | 1172 1173 1056 1057 1058 947 948 949 844 68 | 845 846 747 748 749 656 657 658 571 69 | 572 573 492 493 494 419 420 495 352 70 | 1379 1380 1287 1174 1381 1288 1289 1290 1177 71 | 1175 1176 1059 1060 1061 950 951 952 847 72 | 1291 1178 1179 1180 1062 1063 953 954 850 73 | 848 849 750 751 752 659 660 753 574 74 | 661 754 575 662 755 576 663 756 577 75 | 664 757 578 665 758 579 666 759 580 76 | 958 1069 854 959 1070 855 960 1071 856 77 | 961 1072 857 962 1073 858 963 1074 859 78 | 1064 1181 1292 1065 1182 1293 1439 1183 1294 79 | 955 1066 851 956 1067 852 957 1068 853 80 | 1297 1186 1384 1296 1185 1383 1295 1184 1382 81 | 1300 1189 1387 1299 1188 1386 1298 1187 1385 82 | 1306 1195 1393 1305 1194 1392 1304 1193 1391 83 | 1303 1192 1390 1302 1191 1389 1301 1190 1388 84 | 967 1078 863 968 1079 864 969 1080 865 85 | 964 1075 860 965 1076 861 966 1077 862 86 | 769 868 770 869 974 870 975 1086 976 87 | 1085 1202 1203 1309 1087 1088 1204 1310 1205 88 | 970 866 971 867 972 973 1084 1200 1201 89 | 1196 1307 1197 1308 1081 1198 1082 1199 1083 90 | 772 871 773 872 977 873 978 1089 979 91 | 1090 1206 1091 1207 1311 1312 1394 1313 1395 92 | 508 589 509 590 677 591 678 771 679 93 | 430 505 362 431 506 363 432 507 433 94 | 673 766 587 674 767 588 675 768 676 95 | 670 763 584 671 764 585 672 765 586 96 | 427 502 359 428 503 360 429 504 361 97 | 667 760 581 668 761 582 669 762 583 98 | 1094 1210 1211 1317 1399 1318 1400 1319 1401 99 | 1208 1209 1093 1316 1315 1396 1397 1314 1398 100 | 775 874 776 875 980 876 981 1092 982 101 | 511 592 512 593 680 594 681 774 682 102 | 301 364 302 365 434 366 435 510 436 103 | 241 298 191 242 299 192 243 300 244 104 | 238 295 188 239 296 189 240 297 190 105 | 424 499 356 425 500 357 426 501 358 106 | 1096 1212 1097 1213 1214 1320 1215 1321 1322 107 | 778 877 779 878 983 879 984 1095 985 108 | 514 595 515 596 683 597 684 777 685 109 | 304 367 305 368 437 369 438 513 439 110 | 148 193 149 194 245 195 246 303 247 111 | 106 145 74 107 146 75 108 147 109 112 | 49 76 50 77 110 78 111 150 112 113 | 25 46 11 26 47 12 27 48 28 114 | 781 880 782 881 986 882 987 1098 988 115 | 517 598 518 599 686 600 687 780 688 116 | 307 370 308 371 440 372 441 516 442 117 | 151 196 152 197 248 198 249 306 250 118 | 784 883 885 884 991 1105 1104 1219 1218 119 | 1099 989 1100 1216 990 1101 1103 1217 1102 120 | 520 601 603 602 689 691 690 783 785 121 | 310 373 375 374 443 445 444 519 521 122 | 154 199 201 200 251 253 252 309 311 123 | 52 79 81 80 113 115 114 153 155 124 | 4 13 15 14 29 31 30 51 53 125 | 35 17 58 34 16 57 33 32 56 126 | 157 156 204 203 202 256 255 254 314 127 | 55 54 84 83 82 118 117 116 158 128 | 122 86 163 121 85 162 120 119 161 129 | 125 89 166 124 88 165 123 87 164 130 | 1108 1107 1223 1222 1221 1220 1325 1324 1323 131 | 787 786 888 887 886 994 993 992 1106 132 | 523 522 606 605 604 694 693 692 788 133 | 313 312 378 377 376 448 447 446 524 134 | 1110 1225 1224 1328 1327 1404 1326 1403 1402 135 | 790 789 891 890 889 997 996 995 1109 136 | 526 525 609 608 607 697 696 695 791 137 | 316 315 381 380 379 451 450 449 527 138 | 263 209 322 262 208 321 261 260 320 139 | 160 159 207 206 205 259 258 257 317 140 | 461 389 538 460 388 537 459 387 536 141 | 266 212 325 265 211 324 264 210 323 142 | 1111 1227 1226 1329 1331 1330 1407 1406 1405 143 | 793 792 894 893 892 1000 999 998 1112 144 | 529 528 612 611 610 700 699 698 794 145 | 319 318 384 383 382 454 453 452 530 146 | 1114 1113 1229 1228 1334 1333 1332 1409 1408 147 | 796 795 897 896 895 1003 1002 1001 1115 148 | 532 531 615 614 613 703 702 701 797 149 | 458 386 535 457 385 534 456 455 533 150 | 1119 1233 1232 1336 1117 1116 1231 1230 1335 151 | 799 798 900 899 898 1006 1005 1004 1118 152 | 1010 902 1009 901 1008 1007 1120 1235 1234 153 | 707 617 802 706 616 801 705 704 800 154 | 710 620 805 709 619 804 708 618 803 155 | 713 623 808 712 622 807 711 621 806 156 | 1016 908 1129 1015 907 1128 1014 906 1127 157 | 1019 911 1132 1018 910 1131 1017 909 1130 158 | 1239 1338 1238 1337 1123 1237 1122 1236 1121 159 | 1013 905 1126 1012 904 1125 1011 903 1124 160 | 1339 1410 1240 1340 1411 1241 1341 1412 1242 161 | 1342 1413 1243 1343 1414 1244 1344 1415 1245 162 | -------------------------------------------------------------------------------- /resources/bias-positions.txt: -------------------------------------------------------------------------------- 1 | 33.25 172.772 2 | 47.5 172.772 3 | 4.75 172.772 4 | 19 172.772 5 | 66.5 148.09 6 | 52.25 148.09 7 | 38 148.09 8 | 23.75 148.09 9 | 109.25 123.409 10 | 95 123.409 11 | 125.875 123.409 12 | 114 139.863 13 | 97.375 156.318 14 | 85.5 143.154 15 | 79.5625 162.488 16 | 63.65 171.127 17 | 123.5 98.7269 18 | 109.25 98.7269 19 | 144.875 102.841 20 | 137.75 115.181 21 | 95 98.7269 22 | 80.75 98.7269 23 | 66.5 98.7269 24 | 52.25 98.7269 25 | 80.75 123.409 26 | 66.5 123.409 27 | 52.25 123.409 28 | 38 123.409 29 | 23.75 123.409 30 | 9.5 123.409 31 | 38 98.7269 32 | 23.75 98.7269 33 | 172.188 59.6475 34 | 155.8 67.4634 35 | 163.875 82.2724 36 | 146.3 90.4997 37 | 137.75 74.0452 38 | 123.5 74.0452 39 | 109.25 74.0452 40 | 95 74.0452 41 | 80.75 74.0452 42 | 66.5 74.0452 43 | 52.25 74.0452 44 | 38 74.0452 45 | 23.75 74.0452 46 | 9.5 74.0452 47 | 9.5 98.7269 48 | -4.75 98.7269 49 | 181.688 22.6249 50 | 171 41.1362 51 | 152 49.3634 52 | 137.75 49.3634 53 | 123.5 49.3634 54 | 109.25 49.3634 55 | 95 49.3634 56 | 80.75 49.3634 57 | 66.5 49.3634 58 | 52.25 49.3634 59 | 38 49.3634 60 | 23.75 49.3634 61 | 52.25 24.6817 62 | 38 24.6817 63 | 23.75 24.6817 64 | 9.5 24.6817 65 | 166.25 24.6817 66 | 152 24.6817 67 | 137.75 24.6817 68 | 123.5 24.6817 69 | 109.25 24.6817 70 | 95 24.6817 71 | 80.75 24.6817 72 | 66.5 24.6817 73 | 180.5 0 74 | 166.25 0 75 | 152 0 76 | 137.75 0 77 | 123.5 0 78 | 109.25 0 79 | 95 0 80 | 80.75 0 81 | 52.25 -123.409 82 | 38 -123.409 83 | 23.75 -123.409 84 | 9.5 -123.409 85 | 38 -148.09 86 | 23.75 -148.09 87 | 9.5 -148.09 88 | -4.75 -148.09 89 | 79.5625 -162.488 90 | 63.65 -171.127 91 | 66.5 -148.09 92 | 52.25 -148.09 93 | 33.25 -172.772 94 | 47.5 -172.772 95 | 4.75 -172.772 96 | 19 -172.772 97 | 52.25 -74.0452 98 | 38 -74.0452 99 | 66.5 -49.3634 100 | 52.25 -49.3634 101 | 38 -98.7269 102 | 23.75 -98.7269 103 | 23.75 -74.0452 104 | 9.5 -74.0452 105 | 163.875 -82.2724 106 | 146.3 -90.4997 107 | 137.75 -74.0452 108 | 123.5 -74.0452 109 | 109.25 -74.0452 110 | 95 -74.0452 111 | 80.75 -74.0452 112 | 66.5 -74.0452 113 | 144.875 -102.841 114 | 137.75 -115.181 115 | 123.5 -98.7269 116 | 109.25 -98.7269 117 | 95 -98.7269 118 | 80.75 -98.7269 119 | 66.5 -98.7269 120 | 52.25 -98.7269 121 | 125.875 -123.409 122 | 114 -139.863 123 | 109.25 -123.409 124 | 95 -123.409 125 | 97.375 -156.318 126 | 85.5 -143.154 127 | 80.75 -123.409 128 | 66.5 -123.409 129 | 66.5 0 130 | 52.25 0 131 | 38 0 132 | 23.75 0 133 | 23.75 -24.6817 134 | 9.5 -24.6817 135 | 9.5 0 136 | -4.75 0 137 | 80.75 -24.6817 138 | 66.5 -24.6817 139 | 52.25 -24.6817 140 | 38 -24.6817 141 | 38 -49.3634 142 | 23.75 -49.3634 143 | 9.5 -49.3634 144 | -4.75 -49.3634 145 | 181.688 -22.6249 146 | 171 -41.1362 147 | 166.25 -24.6817 148 | 152 -24.6817 149 | 137.75 -24.6817 150 | 123.5 -24.6817 151 | 109.25 -24.6817 152 | 95 -24.6817 153 | 172.188 -59.6475 154 | 155.8 -67.4634 155 | 152 -49.3634 156 | 137.75 -49.3634 157 | 123.5 -49.3634 158 | 109.25 -49.3634 159 | 95 -49.3634 160 | 80.75 -49.3634 161 | -52.25 -172.772 162 | -38 -172.772 163 | -23.75 -172.772 164 | -9.5 -172.772 165 | -47.5 -148.09 166 | -61.75 -148.09 167 | -19 -148.09 168 | -33.25 -148.09 169 | -90.25 -123.409 170 | -104.5 -123.409 171 | -115.188 -137.806 172 | -122.55 -121.763 173 | -78.375 -143.977 174 | -99.75 -148.09 175 | -68.875 -168.658 176 | -85.5 -159.608 177 | -104.5 -98.7269 178 | -118.75 -98.7269 179 | -133 -98.7269 180 | -140.6 -116.827 181 | -76 -98.7269 182 | -90.25 -98.7269 183 | -47.5 -98.7269 184 | -61.75 -98.7269 185 | -61.75 -123.409 186 | -76 -123.409 187 | -33.25 -123.409 188 | -47.5 -123.409 189 | -19 -98.7269 190 | -33.25 -98.7269 191 | -4.75 -123.409 192 | -19 -123.409 193 | -154.375 -69.9316 194 | -170.05 -69.1088 195 | -147.25 -82.2724 196 | -153.9 -97.0814 197 | -118.75 -74.0452 198 | -133 -74.0452 199 | -90.25 -74.0452 200 | -104.5 -74.0452 201 | -61.75 -74.0452 202 | -76 -74.0452 203 | -33.25 -74.0452 204 | -47.5 -74.0452 205 | -4.75 -74.0452 206 | -19 -74.0452 207 | 9.5 -98.7269 208 | -4.75 -98.7269 209 | -161.5 -49.3634 210 | -177.65 -39.4908 211 | -133 -49.3634 212 | -147.25 -49.3634 213 | -104.5 -49.3634 214 | -118.75 -49.3634 215 | -76 -49.3634 216 | -90.25 -49.3634 217 | -47.5 -49.3634 218 | -61.75 -49.3634 219 | -19 -49.3634 220 | -33.25 -49.3634 221 | -33.25 -24.6817 222 | -47.5 -24.6817 223 | -4.75 -24.6817 224 | -19 -24.6817 225 | -147.25 -24.6817 226 | -161.5 -24.6817 227 | -118.75 -24.6817 228 | -133 -24.6817 229 | -90.25 -24.6817 230 | -104.5 -24.6817 231 | -61.75 -24.6817 232 | -76 -24.6817 233 | -161.5 0 234 | -176.7 18.0999 235 | -174.562 -18.5113 236 | -181.45 -3.2909 237 | -133 0 238 | -147.25 0 239 | -104.5 0 240 | -118.75 0 241 | -33.25 123.409 242 | -47.5 123.409 243 | -4.75 123.409 244 | -19 123.409 245 | -19 148.09 246 | -33.25 148.09 247 | 9.5 148.09 248 | -4.75 148.09 249 | -68.875 168.658 250 | -85.5 159.608 251 | -47.5 148.09 252 | -61.75 148.09 253 | -52.25 172.772 254 | -38 172.772 255 | -23.75 172.772 256 | -9.5 172.772 257 | -33.25 74.0452 258 | -47.5 74.0452 259 | -47.5 49.3634 260 | -61.75 49.3634 261 | -19 98.7269 262 | -33.25 98.7269 263 | -4.75 74.0452 264 | -19 74.0452 265 | -147.25 82.2724 266 | -153.9 97.0814 267 | -118.75 74.0452 268 | -133 74.0452 269 | -90.25 74.0452 270 | -104.5 74.0452 271 | -61.75 74.0452 272 | -76 74.0452 273 | -133 98.7269 274 | -140.6 116.827 275 | -104.5 98.7269 276 | -118.75 98.7269 277 | -76 98.7269 278 | -90.25 98.7269 279 | -47.5 98.7269 280 | -61.75 98.7269 281 | -115.188 137.806 282 | -122.55 121.763 283 | -90.25 123.409 284 | -104.5 123.409 285 | -78.375 143.977 286 | -99.75 148.09 287 | -61.75 123.409 288 | -76 123.409 289 | -76 0 290 | -90.25 0 291 | -47.5 0 292 | -61.75 0 293 | -19 0 294 | -33.25 0 295 | -4.75 24.6817 296 | -19 24.6817 297 | -61.75 24.6817 298 | -76 24.6817 299 | -33.25 24.6817 300 | -47.5 24.6817 301 | -19 49.3634 302 | -33.25 49.3634 303 | 9.5 49.3634 304 | -4.75 49.3634 305 | -161.5 49.3634 306 | -177.65 39.4908 307 | -147.25 24.6817 308 | -161.5 24.6817 309 | -118.75 24.6817 310 | -133 24.6817 311 | -90.25 24.6817 312 | -104.5 24.6817 313 | -154.375 69.9316 314 | -170.05 69.1088 315 | -133 49.3634 316 | -147.25 49.3634 317 | -104.5 49.3634 318 | -118.75 49.3634 319 | -76 49.3634 320 | -90.25 49.3634 321 | -------------------------------------------------------------------------------- /resources/gapd-offset-20140321.txt: -------------------------------------------------------------------------------- 1 | -0.020431 2 | 0.00832178 3 | 0.0212958 4 | 0.0104781 5 | 0.0430267 6 | 0.0319688 7 | 0.035354 8 | 0.00926146 9 | -0.00976893 10 | 0.00909934 11 | 0.0134534 12 | 0.0277669 13 | 0.00178259 14 | 0.0210581 15 | 0.011653 16 | 0.00651999 17 | 0.00199355 18 | -0.0235251 19 | 0.0167261 20 | 0.0136575 21 | 0.00355753 22 | 0.00970004 23 | 0.0109303 24 | 0.0164663 25 | 0.0319319 26 | 0.0520658 27 | 0.0275055 28 | 0.0286959 29 | -0.00637385 30 | 0.0340139 31 | 0.0525328 32 | 0.0312172 33 | -0.0388423 34 | 0.0124285 35 | 0.00651913 36 | 0.0013613 37 | 0.0148968 38 | -0.0127411 39 | -0.0330295 40 | -0.0136592 41 | -0.00243429 42 | -0.0282888 43 | -0.0543325 44 | -0.0111605 45 | -0.00337782 46 | -0.0247642 47 | -0.029027 48 | -0.0185733 49 | 0.00168492 50 | -0.00422595 51 | -0.0294628 52 | 0.0123533 53 | -0.0747024 54 | -0.0238504 55 | -0.0157757 56 | -0.0132158 57 | -0.0595174 58 | -0.0213799 59 | -0.00206118 60 | -0.0334503 61 | -0.0156386 62 | -0.0141058 63 | -0.021393 64 | -0.0268912 65 | -0.0193007 66 | -0.0163052 67 | -0.0823194 68 | -0.00633968 69 | 1.60147e-05 70 | -0.0139976 71 | 0.00134486 72 | -0.00758914 73 | -0.0368231 74 | -0.0395706 75 | -0.0163913 76 | -0.0179613 77 | 0.0163562 78 | 0.00111914 79 | 0.0102105 80 | -0.00855684 81 | -0.00283223 82 | 0.0245416 83 | 0.0306119 84 | 0.000882214 85 | 0.0209501 86 | 0.0178442 87 | -0.017706 88 | -0.00342062 89 | 0.0215015 90 | -0.0145192 91 | -0.0202725 92 | 0.00918373 93 | -0.00516552 94 | 0.02369 95 | -0.0297331 96 | 0.00068445 97 | -0.021089 98 | -0.00290268 99 | 0.00364881 100 | -0.00443009 101 | 0.000777697 102 | 0.0325424 103 | -0.0247599 104 | -0.016867 105 | 0.0458985 106 | 0.0560158 107 | 0.0291322 108 | -0.00382527 109 | 0.0021502 110 | -0.00608887 111 | 0.0122985 112 | 0.0191414 113 | 0.040414 114 | 0.0417088 115 | 0.0348876 116 | 0.00310331 117 | 0.0102153 118 | 0.012024 119 | 0.00333001 120 | -0.00600177 121 | 0.0433295 122 | 0.0261174 123 | 0.0250391 124 | 0.0225199 125 | -0.0137955 126 | 0.0397019 127 | 0.0104806 128 | 0.0436427 129 | -0.0109636 130 | -0.0221554 131 | 6.70098e-05 132 | 0.00766414 133 | 0.00759124 134 | -0.00621469 135 | 0.00626345 136 | -0.00979076 137 | -0.0135179 138 | -0.0241251 139 | 0.0123371 140 | 0.0169208 141 | -0.0175259 142 | 0.0200299 143 | 0.0383607 144 | 0.0197718 145 | 0.0147154 146 | 0.0510904 147 | 0.0067266 148 | 0.0440807 149 | 0.0494315 150 | 0.0357181 151 | -0.0187549 152 | -0.0235434 153 | 0.0477957 154 | -0.00119858 155 | 0.0105862 156 | 0.00397173 157 | 0.00439129 158 | -0.0162241 159 | -0.032654 160 | -0.0133396 161 | -0.00931341 162 | 0.0186235 163 | -0.0385081 164 | -0.00514364 165 | -0.00876729 166 | 0.0244995 167 | -0.0386815 168 | 0.0172906 169 | -0.019895 170 | 0.0110898 171 | 0.00827848 172 | 0.014279 173 | -0.0264893 174 | 0.015907 175 | 0.0175138 176 | 0.0110661 177 | -0.0225485 178 | -0.00665218 179 | -0.0083265 180 | -0.015179 181 | -0.0370569 182 | -0.0284921 183 | -0.023819 184 | -0.0557762 185 | -0.00801725 186 | -0.0220271 187 | -0.0265118 188 | -0.00373133 189 | -0.0230875 190 | -0.0284073 191 | -0.0406358 192 | -0.0435639 193 | 0.0633702 194 | -0.0135716 195 | 0.0811934 196 | 0.026761 197 | 0.0123395 198 | -0.00217267 199 | -0.0181588 200 | 0.012032 201 | -0.027645 202 | 0.00697095 203 | 0.057609 204 | 0.00171696 205 | 0.0381898 206 | -0.00430594 207 | 0.0169919 208 | 0.0139389 209 | 0.0311843 210 | 0.0285714 211 | -0.0172519 212 | 0.00767744 213 | -0.00573722 214 | 0.00544325 215 | 0.00636473 216 | -0.0141013 217 | -0.00132153 218 | 0.0034358 219 | -0.0161899 220 | -0.00739848 221 | 0.00373393 222 | 0.031644 223 | 0.0250695 224 | 0.0305736 225 | 0.0365152 226 | 0.0910813 227 | 0.0643829 228 | 0.0654931 229 | 0.0488657 230 | 0.0562799 231 | 0.0511648 232 | 0.0871616 233 | 0.0421892 234 | 0.0396756 235 | 0.0556898 236 | 0.0558264 237 | 0.0426915 238 | 0.0476795 239 | 0.0431677 240 | 0.0765429 241 | 0.0568724 242 | 0.0514678 243 | 0.0299583 244 | -0.0240588 245 | 0.00851646 246 | 0.0503675 247 | 0.0278908 248 | 0.048996 249 | -0.0140513 250 | 0.0446122 251 | 0.0281399 252 | 0.0263761 253 | -0.00753335 254 | 0.0279369 255 | 0.00408955 256 | 0.0311142 257 | -0.0925525 258 | -0.0822275 259 | -0.100653 260 | -0.0687962 261 | -0.0696676 262 | -0.0907778 263 | -0.0816401 264 | -0.0968508 265 | -0.0326367 266 | -0.0191343 267 | -0.0581186 268 | -0.0893147 269 | -0.111563 270 | -0.0614747 271 | -0.0419339 272 | -0.0553427 273 | -0.0354486 274 | -0.0314234 275 | -0.0683172 276 | -0.0607189 277 | -0.0545848 278 | -0.0421635 279 | -0.0464067 280 | -0.0130253 281 | -0.0688494 282 | -0.0232554 283 | -0.0666288 284 | -0.0919203 285 | -0.0538624 286 | -0.0999725 287 | -0.0386876 288 | -0.0523205 289 | -0.0328958 290 | 0.00136952 291 | 0.0169666 292 | 0.0325062 293 | 0.00398554 294 | 0.0345107 295 | 0.0317108 296 | 0.0167895 297 | -0.0228043 298 | 0.000825192 299 | -0.00800472 300 | 0.0168252 301 | -0.0190985 302 | -0.0165942 303 | -0.0273626 304 | -0.0155579 305 | 0.0217321 306 | 0.0347696 307 | 0.0150308 308 | 0.0381533 309 | -0.0456614 310 | -0.0252239 311 | -0.0177059 312 | -0.0249018 313 | 0.00481917 314 | 0.00946615 315 | -0.0200785 316 | -0.0325183 317 | 0.00207208 318 | 0.00306146 319 | -0.00913241 320 | -0.0163615 321 | -------------------------------------------------------------------------------- /resources/sensor-pos.txt: -------------------------------------------------------------------------------- 1 | 42.75 175.514 2 | 99.75 120.666 3 | 57 95.9845 4 | 99.75 71.3028 5 | 42.75 21.9393 6 | 156.75 21.9393 7 | 85.5 -2.74241 8 | 0 -2.74241 9 | 42.75 -27.4241 10 | 156.75 -27.4241 11 | 99.75 -76.7876 12 | 57 -101.469 13 | 99.75 -126.151 14 | 42.75 -170.03 15 | -42.75 -170.03 16 | -99.75 -126.151 17 | -57 -101.469 18 | -14.25 -126.151 19 | -99.75 -76.7876 20 | -14.25 -76.7876 21 | -156.75 -27.4241 22 | -71.25 -27.4241 23 | -114 -2.74241 24 | -42.75 21.9393 25 | -156.75 21.9393 26 | -14.25 71.3028 27 | -99.75 71.3028 28 | -57 95.9845 29 | -99.75 120.666 30 | -14.25 120.666 31 | -42.75 175.514 32 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | [aliases] 2 | test=pytest 3 | 4 | [tool:pytest] 5 | addopts = -v 6 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup, find_packages 2 | 3 | with open('fact/VERSION', 'r') as f: 4 | __version__ = f.read().strip() 5 | 6 | with open('README.rst') as f: 7 | long_description = f.read() 8 | 9 | 10 | setup( 11 | name='pyfact', 12 | version=__version__, 13 | description='A module containing useful methods for working with fact', 14 | long_description=long_description, 15 | url='http://github.com/fact-project/pyfact', 16 | author='Maximilian Noethe, Dominik Neise', 17 | author_email='maximilian.noethe@tu-dortmund.de', 18 | license='MIT', 19 | packages=find_packages(), 20 | package_data={ 21 | '': [ 22 | 'VERSION', 23 | 'resources/*', 24 | 'credentials/credentials.encrypted', 25 | ] 26 | }, 27 | entry_points={ 28 | 'console_scripts': [ 29 | 'fact_calculate_theta = fact.analysis.scripts.theta:main', 30 | 'fact_calculate_radec = fact.analysis.scripts.radec:main', 31 | ] 32 | }, 33 | tests_require=['pytest>=3.0.0'], 34 | setup_requires=['pytest-runner'], 35 | install_requires=[ 36 | 'astropy', 37 | 'click', 38 | 'h5py', 39 | 'matplotlib>=1.4', 40 | 'numpy', 41 | 'pandas', 42 | 'peewee>=3', 43 | 'pymysql', 44 | 'python-dateutil', 45 | 'scipy', 46 | 'setuptools', 47 | 'simple-crypt', 48 | 'sqlalchemy', 49 | 'tables>=3.3', # pytables in anaconda 50 | 'wrapt', 51 | ], 52 | zip_safe=False, 53 | ) 54 | -------------------------------------------------------------------------------- /tests/resources/gammas.hdf5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fact-project/pyfact/2f204dbf4a90bfa84899e2b8d6aaed891d2b09bc/tests/resources/gammas.hdf5 -------------------------------------------------------------------------------- /tests/resources/proton_header_test.hdf5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/fact-project/pyfact/2f204dbf4a90bfa84899e2b8d6aaed891d2b09bc/tests/resources/proton_header_test.hdf5 -------------------------------------------------------------------------------- /tests/test_all.py: -------------------------------------------------------------------------------- 1 | def test_cameraplot(): 2 | from fact.plotting import camera 3 | from numpy.random import uniform 4 | 5 | data = uniform(0, 20, 1440) 6 | camera(data) 7 | 8 | 9 | 10 | def test_patch_indices(): 11 | from fact.instrument.camera import patch_indices 12 | 13 | pi = patch_indices 14 | assert pi[pi.bias_patch_id==1].trigger_patch_id.iloc[0] == 0 15 | assert pi[pi.bias_patch_id==128].trigger_patch_id.iloc[0] == 40 16 | assert pi[pi.bias_patch_id==129].trigger_patch_id.iloc[0] == 40 17 | 18 | assert pi[pi.trigger_patch_id == 47].bias_patch_id.iloc[0] == 142 19 | assert pi[pi.trigger_patch_id == 47].bias_patch_id.iloc[1] == 143 20 | assert (pi[pi.trigger_patch_id == 47].bias_patch_id == [142, 143]).all() 21 | 22 | 23 | def test_easier_use_of_patch_indices(): 24 | from fact.instrument.camera import patch_indices 25 | pi = patch_indices 26 | 27 | bias_patch_ids = pi.bias_patch_id.values 28 | 29 | # find out which are the bias patches for trigger patch 47 30 | trigger_patch = 47 31 | # double the number and add 0 or 1 to find both bias patch ids 32 | assert bias_patch_ids[2 * trigger_patch + 0] == 142 33 | assert bias_patch_ids[2 * trigger_patch + 1] == 143 34 | 35 | pi.sort_values('bias_patch_id').trigger_patch_id.values 36 | 37 | 38 | def test_bias_patch_values_into_trigger_patches(): 39 | # assume you have values sorted by bias_patch_id, e.g. currents like this: 40 | # currents = np.random.normal(loc=40, scale=10, size=320) 41 | # i.e. you have 320 values and want to know which two of them 42 | # should be combined into one trigger_patch. 43 | 44 | # so you might start by sorting by the index you have and get the one you 45 | # want, like this: 46 | from fact.instrument.camera import patch_indices 47 | pi = patch_indices 48 | 49 | t_id_BY_b_id = pi.sort_values('bias_patch_id').trigger_patch_id.values 50 | 51 | # but what does this help you? sure you can say: 52 | assert t_id_BY_b_id[40] == 20 53 | assert t_id_BY_b_id[41] == 20 54 | assert t_id_BY_b_id[80] == 72 55 | assert t_id_BY_b_id[81] == 72 56 | # thus you learn that the bias patches 40 and 41 belong to 57 | # trigger patch 20 (as one might have guessed) 58 | # and bias_patches 80;81 belong to trigger patch 72, which one would 59 | # *not* have guessed, which is why we need this mapping table. 60 | 61 | # However it does not help you. Since you probably want this: 62 | # convert the two bias currents I have for each trigger patch, into a combined 63 | # value for this trigger patch. 64 | 65 | 66 | def test_coords_relation_to_pos_from_dataframe(): 67 | 68 | from fact.instrument.camera import get_pixel_dataframe 69 | from fact.instrument.camera import get_pixel_coords 70 | import numpy as np 71 | 72 | pc = get_pixel_coords() 73 | pd = get_pixel_dataframe() 74 | 75 | assert np.allclose(pc[0], -pd.pos_Y.values*9.5) 76 | assert np.allclose(pc[1], pd.pos_X.values*9.5) 77 | -------------------------------------------------------------------------------- /tests/test_analysis.py: -------------------------------------------------------------------------------- 1 | import astropy.units as u 2 | from pytest import approx, raises 3 | 4 | 5 | def test_proton_obstime(): 6 | from fact.analysis.statistics import calc_proton_obstime 7 | n_simulated = 780046520 8 | t = calc_proton_obstime( 9 | n_events=n_simulated, 10 | spectral_index=-2.7, 11 | scatter_radius=400 * u.m, 12 | viewcone=5 * u.deg, 13 | e_min=100 * u.GeV, 14 | e_max=200 * u.TeV, 15 | ) 16 | assert t.to(u.s).value == approx(15397.82) 17 | 18 | 19 | def test_power(): 20 | from fact.analysis.statistics import random_power 21 | 22 | a = random_power(-2.7, e_min=5 * u.GeV, e_max=10 * u.TeV, size=1000) 23 | 24 | assert a.shape == (1000, ) 25 | assert a.unit == u.TeV 26 | 27 | with raises(ValueError): 28 | random_power(2.7, 5 * u.GeV, 10 * u.GeV, e_ref=1 * u.GeV, size=1) 29 | 30 | 31 | def test_power_law_integral(): 32 | from fact.analysis.statistics import power_law_integral, FLUX_UNIT 33 | import astropy.units as u 34 | import numpy as np 35 | 36 | # wolfram alpha result https://www.wolframalpha.com/input/?i=int_100%5E1000+x%5E-2 37 | result = power_law_integral( 38 | flux_normalization=1 * FLUX_UNIT, 39 | spectral_index=-2, 40 | e_min=100 * u.GeV, 41 | e_max=1000 * u.GeV, 42 | e_ref=1 * u.GeV, 43 | ) 44 | assert np.isclose(result.value, 0.009) 45 | assert result.unit == (FLUX_UNIT * u.GeV) 46 | -------------------------------------------------------------------------------- /tests/test_analysis_source.py: -------------------------------------------------------------------------------- 1 | import astropy.units as u 2 | from pytest import approx 3 | 4 | 5 | def test_off_position(): 6 | from fact.analysis.source import calc_off_position 7 | 8 | source_x = 50 9 | source_y = 0 10 | 11 | x_off, y_off = calc_off_position(source_x, source_y, 3, 5) 12 | assert x_off == approx(-50) 13 | assert y_off == approx(0) 14 | 15 | 16 | def test_off_position_units(): 17 | from fact.analysis.source import calc_off_position 18 | 19 | source_x = 50 * u.mm 20 | source_y = 0 * u.mm 21 | 22 | x_off, y_off = calc_off_position(source_x, source_y, 3, 5) 23 | assert x_off.unit == u.mm 24 | assert y_off.unit == u.mm 25 | assert x_off.to(u.mm).value == approx(-50) 26 | assert y_off.to(u.mm).value == approx(0) 27 | 28 | 29 | def test_theta(): 30 | from fact.io import read_data 31 | from fact.analysis import calc_theta_camera 32 | 33 | df = read_data('tests/resources/gammas.hdf5', key='events') 34 | 35 | theta = calc_theta_camera( 36 | df.source_x_prediction, df.source_y_prediction, 37 | df.zd_source_calc, df.az_source_calc, 38 | df.zd_tracking, df.az_tracking, 39 | ) 40 | 41 | assert len(theta) == len(df) 42 | 43 | 44 | def test_theta_offs(): 45 | from fact.io import read_data 46 | from fact.analysis import calc_theta_offs_camera 47 | 48 | df = read_data('tests/resources/gammas.hdf5', key='events') 49 | 50 | theta_offs = calc_theta_offs_camera( 51 | df.source_x_prediction, df.source_y_prediction, 52 | df.zd_source_calc, df.az_source_calc, 53 | df.zd_tracking, df.az_tracking, 54 | n_off=5 55 | ) 56 | 57 | assert len(theta_offs) == 5 58 | assert all(len(theta_off) == len(df) for theta_off in theta_offs) 59 | 60 | 61 | if __name__ == '__main__': 62 | test_theta() 63 | -------------------------------------------------------------------------------- /tests/test_camera.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | 4 | def test_neighbors(): 5 | from fact.instrument.camera import get_neighbor_matrix 6 | 7 | neighbors = get_neighbor_matrix() 8 | 9 | assert neighbors[1144, 259] 10 | assert neighbors[1144, 1143] 11 | assert neighbors[1144, 1146] 12 | assert neighbors[1144, 1147] 13 | assert neighbors[1144, 287] 14 | assert neighbors[1144, 284] 15 | assert not neighbors[1144, 256] 16 | assert not neighbors[1144, 281] 17 | 18 | assert np.all(neighbors.diagonal() == 0) 19 | 20 | 21 | def test_n_neighbors(): 22 | from fact.instrument.camera import get_num_neighbors 23 | 24 | n_neighbors = get_num_neighbors() 25 | 26 | assert n_neighbors[54] == 3 27 | assert n_neighbors[86] == 3 28 | assert n_neighbors[81] == 4 29 | -------------------------------------------------------------------------------- /tests/test_coordinate_trafos.py: -------------------------------------------------------------------------------- 1 | # coding: utf-8 2 | from astropy.coordinates import SkyCoord, AltAz 3 | import astropy.units as u 4 | from astropy.time import Time 5 | from fact.coordinates import CameraFrame 6 | from fact.instrument.constants import PIXEL_SPACING_MM, LOCATION, FOV_PER_PIXEL_DEG 7 | from pytest import approx 8 | 9 | 10 | obstime = Time('2014-01-01 00:00') 11 | altaz_frame = AltAz(location=LOCATION, obstime=obstime) 12 | pointing_direction = SkyCoord( 13 | alt=80 * u.deg, az=-260 * u.deg, 14 | frame=altaz_frame, 15 | ) 16 | cam_frame = CameraFrame(pointing_direction=pointing_direction, obstime=obstime) 17 | 18 | 19 | def test_camera_to_altaz1(): 20 | c = SkyCoord( 21 | x=10 * PIXEL_SPACING_MM * u.mm, 22 | y=0 * u.mm, 23 | frame=cam_frame, 24 | ) 25 | 26 | h = c.transform_to(altaz_frame) 27 | print(h.zen.deg, h.az.deg) 28 | print(h.separation(pointing_direction).deg) 29 | print(10 * FOV_PER_PIXEL_DEG) 30 | assert h.separation(pointing_direction).deg == approx(10 * FOV_PER_PIXEL_DEG, 1e-3) 31 | 32 | 33 | def test_camera_to_altaz2(): 34 | c = SkyCoord( 35 | x=-10 * PIXEL_SPACING_MM * u.mm, 36 | y=0 * u.mm, 37 | frame=cam_frame, 38 | ) 39 | 40 | h = c.transform_to(altaz_frame) 41 | print(h.zen.deg, h.az.deg) 42 | print(h.separation(pointing_direction).deg) 43 | assert h.separation(pointing_direction).deg == approx(10 * FOV_PER_PIXEL_DEG, 1e-3) 44 | 45 | 46 | def test_camera_to_altaz3(): 47 | c = SkyCoord( 48 | x=0 * u.mm, 49 | y=10 * PIXEL_SPACING_MM * u.mm, 50 | frame=cam_frame, 51 | ) 52 | 53 | h = c.transform_to(altaz_frame) 54 | print(h.zen.deg, h.az.deg) 55 | print(h.separation(pointing_direction).deg) 56 | assert h.separation(pointing_direction).deg == approx(10 * FOV_PER_PIXEL_DEG, 1e-3) 57 | 58 | 59 | def test_camera_to_altaz4(): 60 | c = SkyCoord( 61 | x=-10 * u.mm, 62 | y=-10 * PIXEL_SPACING_MM * u.mm, 63 | frame=cam_frame, 64 | ) 65 | 66 | h = c.transform_to(altaz_frame) 67 | print(h.zen.deg, h.az.deg) 68 | print(h.separation(pointing_direction).deg) 69 | 70 | 71 | def test_altaz_to_camera(): 72 | pointing_direction = SkyCoord( 73 | alt=60.667 * u.deg, az=96.790 * u.deg, 74 | frame=altaz_frame, 75 | ) 76 | 77 | h = SkyCoord( 78 | alt=60.367 * u.deg, az=95.731 * u.deg, 79 | frame=altaz_frame, 80 | ) 81 | 82 | c = h.transform_to(CameraFrame(pointing_direction=pointing_direction)) 83 | 84 | print(c.x, c.y) 85 | 86 | 87 | if __name__ == '__main__': 88 | test_camera_to_altaz1() 89 | test_camera_to_altaz2() 90 | test_camera_to_altaz3() 91 | test_camera_to_altaz4() 92 | test_altaz_to_camera() 93 | -------------------------------------------------------------------------------- /tests/test_coordinates.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | from astropy.time import Time 4 | from datetime import datetime 5 | 6 | 7 | def test_to_astropy_time(): 8 | from fact.coordinates.utils import to_astropy_time 9 | 10 | t = Time('2013-01-01 00:00') 11 | 12 | assert to_astropy_time(datetime(2013, 1, 1)) == t 13 | assert to_astropy_time(pd.Timestamp(t.unix * 1e9)) == t 14 | assert to_astropy_time(pd.to_datetime('2013-01-01 00:00')) == t 15 | assert to_astropy_time(np.array('2013-01-01 00:00', dtype='datetime64[ns]')) == t 16 | 17 | 18 | def test_transforms(): 19 | from fact.coordinates import camera_to_equatorial 20 | 21 | df = pd.DataFrame({ 22 | 'az_tracking': [0, 90, 270], 23 | 'zd_tracking': [0, 10, 20], 24 | 'timestamp': pd.date_range('2017-10-01 22:00Z', periods=3, freq='10min'), 25 | 'x': [-100, 0, 150], 26 | 'y': [0, 100, 0], 27 | }) 28 | 29 | df['ra'], df['dec'] = camera_to_equatorial( 30 | df.x, 31 | df.y, 32 | df.zd_tracking, 33 | df.az_tracking, 34 | df.timestamp, 35 | ) 36 | 37 | 38 | def test_there_and_back_again(): 39 | from fact.coordinates import camera_to_equatorial, equatorial_to_camera 40 | 41 | df = pd.DataFrame({ 42 | 'az_tracking': [0, 90, 270], 43 | 'zd_tracking': [0, 10, 20], 44 | 'timestamp': pd.date_range('2017-10-01 22:00Z', periods=3, freq='10min'), 45 | 'x': [-100, 0, 150], 46 | 'y': [0, 100, 0], 47 | }) 48 | 49 | ra, dec = camera_to_equatorial( 50 | df.x, 51 | df.y, 52 | df.zd_tracking, 53 | df.az_tracking, 54 | df.timestamp, 55 | ) 56 | 57 | x, y = equatorial_to_camera(ra, dec, df.zd_tracking, df.az_tracking, df.timestamp) 58 | 59 | assert np.allclose(x, df.x) 60 | assert np.allclose(y, df.y) 61 | 62 | 63 | def test_there_and_back_again_horizontal(): 64 | from fact.coordinates import camera_to_horizontal, horizontal_to_camera 65 | 66 | df = pd.DataFrame({ 67 | 'az_tracking': [0, 90, 270], 68 | 'zd_tracking': [0, 10, 20], 69 | 'timestamp': pd.date_range('2017-10-01 22:00Z', periods=3, freq='10min'), 70 | 'x': [-100, 0, 150], 71 | 'y': [0, 100, 0], 72 | }) 73 | 74 | zd, az = camera_to_horizontal(df.x, df.y, df.zd_tracking, df.az_tracking) 75 | x, y = horizontal_to_camera(zd, az, df.zd_tracking, df.az_tracking) 76 | 77 | assert np.allclose(x, df.x) 78 | assert np.allclose(y, df.y) 79 | -------------------------------------------------------------------------------- /tests/test_io.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import tempfile 3 | import numpy as np 4 | import h5py 5 | import pytest 6 | 7 | 8 | 9 | def test_to_h5py(): 10 | from fact.io import to_h5py, read_h5py 11 | 12 | df = pd.DataFrame({ 13 | 'x': np.random.normal(size=50), 14 | 'N': np.random.randint(0, 10, dtype='uint8') 15 | }) 16 | 17 | with tempfile.NamedTemporaryFile() as f: 18 | to_h5py(df, f.name, key='test') 19 | 20 | with h5py.File(f.name, 'r') as hf: 21 | 22 | assert 'test' in hf.keys() 23 | 24 | g = hf['test'] 25 | 26 | assert 'x' in g.keys() 27 | assert 'N' in g.keys() 28 | 29 | df2 = read_h5py(f.name, key='test') 30 | df2.sort_index(1, inplace=True) 31 | df.sort_index(1, inplace=True) 32 | 33 | assert all(df.dtypes == df2.dtypes) 34 | assert all(df['x'] == df2['x']) 35 | assert all(df['N'] == df2['N']) 36 | 37 | 38 | def test_to_h5py_string(): 39 | from fact.io import to_h5py, read_h5py 40 | 41 | df = pd.DataFrame({ 42 | 'name': ['Mrk 501', 'Mrk 421', 'Crab'], 43 | }) 44 | 45 | with tempfile.NamedTemporaryFile() as f: 46 | to_h5py(df, f.name, key='test') 47 | df2 = read_h5py(f.name, key='test') 48 | 49 | assert all(df.dtypes == df2.dtypes) 50 | assert all(df['name'] == df2['name']) 51 | 52 | 53 | def test_to_h5py_datetime(): 54 | from fact.io import to_h5py, read_h5py 55 | 56 | df = pd.DataFrame({ 57 | 't_ns': pd.date_range('2017-01-01', freq='1ns', periods=100), 58 | 't_us': pd.date_range('2017-01-01', freq='1us', periods=100), 59 | 't_ms': pd.date_range('2017-01-01', freq='1ms', periods=100), 60 | 't_s': pd.date_range('2017-01-01', freq='1s', periods=100), 61 | 't_d': pd.date_range('2017-01-01', freq='1d', periods=100), 62 | }) 63 | 64 | with tempfile.NamedTemporaryFile() as f: 65 | to_h5py(df, f.name, key='test') 66 | df2 = read_h5py(f.name, key='test') 67 | 68 | for col in df.columns: 69 | assert all(df[col] == df2[col]) 70 | 71 | 72 | def test_to_h5py_append(): 73 | from fact.io import to_h5py, read_h5py 74 | 75 | df1 = pd.DataFrame({ 76 | 'x': np.random.normal(size=50), 77 | 'N': np.random.randint(0, 10, dtype='uint8') 78 | }) 79 | df2 = pd.DataFrame({ 80 | 'x': np.random.normal(size=50), 81 | 'N': np.random.randint(0, 10, dtype='uint8') 82 | }) 83 | 84 | with tempfile.NamedTemporaryFile() as f: 85 | to_h5py(df1, f.name, key='test', index=False) 86 | to_h5py(df2, f.name, key='test', mode='a', index=False) 87 | 88 | df_read = read_h5py(f.name, key='test') 89 | df_written = pd.concat([df1, df2], ignore_index=True) 90 | 91 | for col in df_written.columns: 92 | assert all(df_read[col] == df_written[col]) 93 | 94 | 95 | def test_to_h5py_append_second_group(): 96 | from fact.io import to_h5py, read_h5py 97 | 98 | df1 = pd.DataFrame({ 99 | 'x': np.random.normal(size=50), 100 | 'N': np.random.randint(0, 10, dtype='uint8') 101 | }) 102 | df2 = pd.DataFrame({ 103 | 'x': np.random.normal(size=50), 104 | 'N': np.random.randint(0, 10, dtype='uint8') 105 | }) 106 | 107 | with tempfile.NamedTemporaryFile() as f: 108 | to_h5py(df1, f.name, key='g1', index=False) 109 | to_h5py(df2, f.name, key='g2', index=False) 110 | 111 | df_g1 = read_h5py(f.name, key='g1') 112 | df_g2 = read_h5py(f.name, key='g2') 113 | 114 | for col in df_g1.columns: 115 | assert all(df_g1[col] == df1[col]) 116 | 117 | for col in df_g2.columns: 118 | assert all(df_g2[col] == df2[col]) 119 | 120 | 121 | def test_write_data_csv(): 122 | from fact.io import write_data 123 | 124 | df = pd.DataFrame({ 125 | 'x': np.random.normal(size=50), 126 | 'N': np.random.randint(0, 10, dtype='uint8') 127 | }) 128 | 129 | with tempfile.NamedTemporaryFile(suffix='.csv') as f: 130 | write_data(df, f.name) 131 | 132 | 133 | def test_write_data_json(): 134 | from fact.io import write_data 135 | 136 | df = pd.DataFrame({ 137 | 'x': np.random.normal(size=50), 138 | 'N': np.random.randint(0, 10, dtype='uint8') 139 | }) 140 | 141 | with tempfile.NamedTemporaryFile(suffix='.json') as f: 142 | write_data(df, f.name) 143 | 144 | 145 | def test_write_data_jsonlines(): 146 | from fact.io import write_data 147 | 148 | df = pd.DataFrame({ 149 | 'x': np.random.normal(size=50), 150 | 'N': np.random.randint(0, 10, dtype='uint8') 151 | }) 152 | 153 | with tempfile.NamedTemporaryFile(suffix='.jsonl') as f: 154 | write_data(df, f.name) 155 | 156 | 157 | def test_write_data_pandas_hdf(): 158 | from fact.io import write_data 159 | 160 | df = pd.DataFrame({ 161 | 'x': np.random.normal(size=50), 162 | 'N': np.random.randint(0, 10, dtype='uint8') 163 | }) 164 | 165 | with tempfile.NamedTemporaryFile(suffix='.hdf5') as f: 166 | write_data(df, f.name, use_h5py=False) 167 | 168 | 169 | def test_initialize_h5py(): 170 | from fact.io import initialize_h5py 171 | 172 | df = pd.DataFrame({ 173 | 'x': [1, 2, 3], 174 | 'name': ['Crab', 'Mrk 501', 'Test'], 175 | 't': ['2017-10-01 21:22', '2017-10-01 21:23', '2017-10-01 21:24'], 176 | 's': [[0, 1], [0, 2], [0, 3]], 177 | }) 178 | df['t'] = pd.to_datetime(df['t']) 179 | 180 | with tempfile.NamedTemporaryFile(suffix='.hdf5') as f: 181 | with h5py.File(f.name, 'w') as h5file: 182 | initialize_h5py(h5file, df.to_records(index=False), key='events') 183 | 184 | with h5py.File(f.name, 'r') as h5file: 185 | assert h5file['events']['name'].dtype == np.dtype('O') 186 | assert h5file['events']['x'].dtype == np.dtype('int64') 187 | assert h5file['events']['t'].dtype == np.dtype('S48') 188 | assert h5file['events']['s'].shape == (0, 2) 189 | 190 | 191 | def test_append_h5py(): 192 | from fact.io import initialize_h5py, append_to_h5py 193 | 194 | df = pd.DataFrame({ 195 | 'x': [1, 2, 3], 196 | 'name': ['Crab', 'Mrk 501', 'Test'], 197 | 't': ['2017-10-01 21:22', '2017-10-01 21:23', '2017-10-01 21:24'], 198 | 's': [[0, 1], [0, 2], [0, 3]], 199 | }) 200 | df['t'] = pd.to_datetime(df['t']) 201 | 202 | with tempfile.NamedTemporaryFile(suffix='.hdf5') as f: 203 | with h5py.File(f.name, 'w') as h5file: 204 | array = df.to_records(index=False) 205 | initialize_h5py(h5file, array, key='events') 206 | append_to_h5py(h5file, array, key='events') 207 | 208 | with h5py.File(f.name, 'r') as h5file: 209 | assert h5file['events']['name'].dtype == np.dtype('O') 210 | assert h5file['events']['x'].dtype == np.dtype('int64') 211 | assert h5file['events']['t'].dtype == np.dtype('S48') 212 | assert h5file['events']['s'].shape == (3, 2) 213 | assert h5file['events']['s'][0, 1] == 1 214 | assert h5file['events']['s'][2, 1] == 3 215 | 216 | 217 | def test_append_3d(): 218 | from fact.io import initialize_h5py, append_to_h5py 219 | 220 | array = np.zeros(100, dtype=[('x', 'float64', (3, 3))]) 221 | 222 | with tempfile.NamedTemporaryFile(suffix='.hdf5') as f: 223 | with h5py.File(f.name, 'w') as h5file: 224 | initialize_h5py(h5file, array, key='events') 225 | 226 | append_to_h5py(h5file, array, key='events') 227 | append_to_h5py(h5file, array, key='events') 228 | 229 | 230 | def test_write_data_h5py(): 231 | from fact.io import write_data 232 | 233 | df = pd.DataFrame({ 234 | 'x': np.random.normal(size=50), 235 | 'N': np.random.randint(0, 10, dtype='uint8') 236 | }) 237 | 238 | with tempfile.NamedTemporaryFile(suffix='.hdf5') as f: 239 | write_data(df, f.name, use_h5py=True) 240 | 241 | 242 | def test_write_lists_h5py(): 243 | from fact.io import to_h5py, read_h5py 244 | 245 | df = pd.DataFrame({ 246 | 'x': [[1.0, 2.0], [3.0, 4.0]] 247 | }) 248 | 249 | with tempfile.NamedTemporaryFile(suffix='.hdf5') as f: 250 | to_h5py(df, f.name) 251 | 252 | df = read_h5py(f.name, columns=['x']) 253 | 254 | assert df['x_0'].iloc[0] == 1.0 255 | 256 | 257 | def test_write_data_root(): 258 | from fact.io import write_data 259 | 260 | df = pd.DataFrame({ 261 | 'x': np.random.normal(size=50), 262 | 'N': np.random.randint(0, 10, dtype='uint8') 263 | }) 264 | 265 | with pytest.raises(IOError): 266 | with tempfile.NamedTemporaryFile(suffix='.root') as f: 267 | write_data(df, f.name) 268 | 269 | 270 | def test_read_data_csv(): 271 | ''' 272 | Write a csv file from a dataframe and then read it back again. 273 | ''' 274 | from fact.io import write_data, read_data 275 | 276 | df = pd.DataFrame({ 277 | 'x': np.random.normal(size=50).astype('float32'), 278 | 'N': np.random.randint(0, 10, dtype='uint8', size=50) 279 | }) 280 | 281 | with tempfile.NamedTemporaryFile(suffix='.csv') as f: 282 | write_data(df, f.name) 283 | 284 | dtypes = {'x': 'float32', 'N': 'uint8'} 285 | df_from_file = read_data(f.name, dtype=dtypes) 286 | 287 | assert df.equals(df_from_file) 288 | 289 | 290 | def test_read_data_h5py(): 291 | ''' 292 | Create a h5py hdf5 file from a dataframe and read it back. 293 | ''' 294 | from fact.io import write_data, read_data 295 | 296 | df = pd.DataFrame({ 297 | 'x': np.random.normal(size=50).astype('float32'), 298 | 'N': np.random.randint(0, 10, dtype='uint8', size=50), 299 | 'name': [f"s{i}" for i in range(50)], 300 | }).sort_index(1) 301 | 302 | with tempfile.NamedTemporaryFile(suffix='.hdf5') as f: 303 | write_data(df, f.name, use_h5py=True, key='lecker_daten') 304 | 305 | df_from_file = read_data(f.name, key='lecker_daten').sort_index(1) 306 | assert set(df.columns) == set(df_from_file.columns) 307 | assert df.equals(df_from_file) 308 | 309 | 310 | def test_compression(): 311 | from fact.io import to_h5py, read_h5py 312 | 313 | df = pd.DataFrame({ 314 | 'x': np.random.normal(size=50), 315 | 'N': np.random.randint(0, 10, dtype='uint8'), 316 | 'idx': np.arange(50), 317 | }) 318 | 319 | with tempfile.NamedTemporaryFile() as f: 320 | to_h5py(df, f.name, key='test', compression=None) 321 | 322 | with h5py.File(f.name, 'r') as hf: 323 | 324 | assert 'test' in hf.keys() 325 | 326 | g = hf['test'] 327 | 328 | assert 'x' in g.keys() 329 | assert 'N' in g.keys() 330 | 331 | df2 = read_h5py(f.name, key='test') 332 | df2.sort_index(1, inplace=True) 333 | df.sort_index(1, inplace=True) 334 | 335 | assert all(df.dtypes == df2.dtypes) 336 | assert all(df['x'] == df2['x']) 337 | assert all(df['N'] == df2['N']) 338 | 339 | 340 | def test_read_simulated_spectrum(): 341 | from fact.io import read_simulated_spectrum 342 | import astropy.units as u 343 | 344 | s = read_simulated_spectrum('tests/resources/proton_header_test.hdf5') 345 | 346 | assert s['n_showers'] == 20000 347 | assert s['n_reuse'] == 20 348 | assert s['energy_min'] == 100 * u.GeV 349 | assert s['energy_max'] == 200 * u.TeV 350 | assert s['energy_spectrum_slope'] == -2.0 351 | -------------------------------------------------------------------------------- /tests/test_path.py: -------------------------------------------------------------------------------- 1 | from fact.path import parse, template_to_path, tree_path 2 | from functools import partial 3 | 4 | 5 | def test_parse(): 6 | 7 | input_paths = [ 8 | '/fact/raw/2016/01/01/20160101_011.fits.fz', 9 | '/fact/aux/2016/01/01/20160101.FSC_CONTROL_TEMPERATURE.fits', 10 | '/fact/aux/2016/01/01/20160101.log', 11 | '/hackypateng/20140115_079_079.root' 12 | ] 13 | 14 | result_dicts = [ 15 | {'prefix': '/fact/raw', 16 | 'night': 20160101, 17 | 'run': 11, 18 | 'suffix': '.fits.fz'}, 19 | {'prefix': '/fact/aux', 20 | 'night': 20160101, 21 | 'run': None, 22 | 'suffix': '.FSC_CONTROL_TEMPERATURE.fits'}, 23 | {'prefix': '/fact/aux', 24 | 'night': 20160101, 25 | 'run': None, 26 | 'suffix': '.log'}, 27 | {'prefix': 28 | '/hackypateng', 29 | 'night': 20140115, 30 | 'run': 79, 31 | 'suffix': '_079.root'}, 32 | ] 33 | 34 | for path, expected in zip(input_paths, result_dicts): 35 | parsed = parse(path) 36 | assert parsed == expected 37 | 38 | 39 | def test_tree_path(): 40 | 41 | night_run_tuples = [ 42 | (20160101, 1), 43 | (20160101, 2), 44 | (20130506, 3), 45 | ] 46 | 47 | result_paths = [ 48 | '/bar/2016/01/01/20160101_001.phs.jsonl.gz', 49 | '/bar/2016/01/01/20160101_002.phs.jsonl.gz', 50 | '/bar/2013/05/06/20130506_003.phs.jsonl.gz', 51 | ] 52 | 53 | photon_stream_path = partial( 54 | tree_path, 55 | prefix='/bar', 56 | suffix='.phs.jsonl.gz' 57 | ) 58 | for night_run, result in zip(night_run_tuples, result_paths): 59 | assert result == photon_stream_path(*night_run) 60 | 61 | 62 | def test_template_to_path(): 63 | night_run_tuples = [ 64 | (20160101, 1), 65 | (20160101, 2), 66 | (20130506, 3), 67 | ] 68 | 69 | single_pe_path_2runs = partial( 70 | template_to_path, 71 | template='/foo/{N}_{R}_{run2:03d}.root' 72 | ) 73 | 74 | result_paths = [ 75 | '/foo/20160101_001_003.root', 76 | '/foo/20160101_002_004.root', 77 | '/foo/20130506_003_005.root', 78 | ] 79 | 80 | for night_run, result in zip(night_run_tuples, result_paths): 81 | assert result == single_pe_path_2runs( 82 | *night_run, 83 | run2=night_run[1]+2) 84 | -------------------------------------------------------------------------------- /tests/test_time.py: -------------------------------------------------------------------------------- 1 | from datetime import datetime, timezone 2 | import pandas as pd 3 | import numpy as np 4 | 5 | 6 | def test_to_night_int_datetime(): 7 | """ In FACT a day goes from noon to noon and is sometimes referred by a 8 | single integer: 20151231 for example refers to the night beginning 9 | on 31.12.2015 evening and lasting until 01.01.2016 morning. 10 | """ 11 | from fact.time import to_night_int 12 | from datetime import datetime 13 | 14 | assert to_night_int(datetime(2015, 12, 31, 18, 0)) == 20151231 15 | assert to_night_int(datetime(2016, 1, 1, 4, 0)) == 20151231 16 | 17 | 18 | def test_night_integer_pandas(): 19 | """ docu see test above 20 | """ 21 | from fact.time import to_night_int 22 | from pandas import to_datetime 23 | 24 | dates = to_datetime([ 25 | "2015-12-31 14:40:15", # afternoon --> 20151231 26 | "2016-01-01 04:40:15", # early morning --> 20151231 27 | "'2016-01-01 12:40:15" # next day after noon --> 20160101 28 | ]) 29 | df = pd.DataFrame({'dates': dates}) 30 | 31 | assert (to_night_int(dates) == [20151231, 20151231, 20160101]).all() 32 | assert (to_night_int(df['dates']) == [20151231, 20151231, 20160101]).all() 33 | 34 | 35 | def test_datetime_to_mjd(): 36 | from fact.time import datetime_to_mjd, MJD_EPOCH 37 | 38 | assert datetime_to_mjd(MJD_EPOCH) == 0.0 39 | 40 | dt = datetime(2017, 1, 1, 12, 0) 41 | assert datetime_to_mjd(dt) == 57754.5 42 | 43 | 44 | def test_datetime_to_mjd_pandas(): 45 | from fact.time import datetime_to_mjd 46 | 47 | dates = pd.date_range('2017-05-17 12:00', freq='1d', periods=5) 48 | mjd = pd.Series([57890.5, 57891.5, 57892.5, 57893.5, 57894.5]) 49 | 50 | df = pd.DataFrame({'dates': dates}) 51 | 52 | assert all(datetime_to_mjd(dates) == mjd) 53 | assert all(datetime_to_mjd(df['dates']) == mjd) 54 | 55 | 56 | def test_datetime_to_mjd_numpy(): 57 | from fact.time import datetime_to_mjd 58 | 59 | dates = np.array(['2017-05-17 12:00', '2017-05-18 12:00'], dtype='datetime64') 60 | mjd = np.array([57890.5, 57891.5]) 61 | 62 | assert all(datetime_to_mjd(dates) == mjd) 63 | 64 | 65 | def test_mjd_to_datetime_float(): 66 | from fact.time import mjd_to_datetime, MJD_EPOCH 67 | 68 | assert mjd_to_datetime(0.0) == MJD_EPOCH 69 | 70 | dt = datetime(2017, 5, 17, 18, 0, tzinfo=timezone.utc) 71 | assert mjd_to_datetime(57890.75) == dt 72 | 73 | 74 | def test_mjd_to_datetime_numpy(): 75 | from fact.time import mjd_to_datetime 76 | 77 | mjd = np.arange(57890.0, 57891, 0.25) 78 | 79 | dt = np.array( 80 | ['2017-05-17 00:00', '2017-05-17 06:00', 81 | '2017-05-17 12:00', '2017-05-17 18:00'], 82 | dtype='datetime64[us]' 83 | ) 84 | 85 | assert all(mjd_to_datetime(mjd) == dt) 86 | 87 | 88 | def test_mjd_to_datetime_pandas(): 89 | from fact.time import mjd_to_datetime 90 | 91 | mjd = pd.Series(np.arange(57890.0, 57891, 0.25)) 92 | 93 | dt = pd.Series(pd.date_range('2017-05-17 00:00Z', freq='6h', periods=4)) 94 | 95 | assert all(mjd_to_datetime(mjd) == dt) 96 | 97 | 98 | def test_fjd_to_datetime(): 99 | from fact.time import fjd_to_datetime 100 | 101 | assert fjd_to_datetime(16000.0) == datetime(2013, 10, 22, 0, 0, tzinfo=timezone.utc) 102 | fjds = pd.Series([0, 365, 18000.5]) 103 | dates = pd.to_datetime(['1970-01-01T00:00Z', '1971-01-01T00:00Z', '2019-04-14T12:00Z']) 104 | df = pd.DataFrame({'fjds': fjds}) 105 | assert (fjd_to_datetime(fjds) == dates).all() 106 | assert (fjd_to_datetime(df['fjds']) == pd.Series(dates)).all() 107 | 108 | 109 | def test_iso_to_datetime(): 110 | from fact.time import iso_to_datetime 111 | 112 | assert iso_to_datetime('2017-01-01T00:00') == datetime(2017, 1, 1, 0, 0, tzinfo=timezone.utc) 113 | assert iso_to_datetime('2017-01-01T00:00Z') == datetime(2017, 1, 1, 0, 0, tzinfo=timezone.utc) 114 | assert iso_to_datetime('2017-01-01T00:00+2') == datetime(2016, 12, 31, 22, 0, tzinfo=timezone.utc) 115 | 116 | timestamps = ['2017-01-01T20:00', '2017-01-01T22:00'] 117 | dates = pd.date_range(start='2017-01-01T20:00', freq='2h', periods=2) 118 | assert (iso_to_datetime(timestamps) == dates).all() 119 | --------------------------------------------------------------------------------