├── km3pipe ├── tests │ ├── __init__.py │ ├── test_decorators.py │ ├── test_dataclasses.py │ ├── test_core.py │ ├── test_tools.py │ └── test_hardware.py ├── pumps │ ├── tests │ │ ├── __init__.py │ │ ├── test_aanet.py │ │ ├── test_daq.py │ │ ├── test_clb.py │ │ └── test_evt.py │ ├── __init__.py │ ├── aanet.py │ ├── jpp.py │ ├── ch.py │ ├── clb.py │ ├── daq.py │ └── evt.py ├── units.py ├── testing │ └── __init__.py ├── constants.py ├── __init__.py ├── decorators.py ├── __version__.py ├── logger.py ├── dataclasses.py ├── hardware.py ├── core.py └── tools.py ├── pipeinspector ├── __init__.py ├── tests │ ├── __init__.py │ └── test_widgets.py ├── playground │ ├── __init__.py │ ├── list.py │ ├── foo.py │ └── prototyping.py ├── settings.py ├── gui.py ├── app.py └── widgets.py ├── sphinx_requirements.txt ├── requirements.txt ├── dev-requirements.txt ├── docs ├── api │ ├── modules.rst │ ├── km3pipe.testing.rst │ ├── km3pipe.pumps.rst │ └── km3pipe.rst ├── index.rst ├── pipeinspector.rst ├── examples.rst ├── help_pumps.rst ├── help_modules.rst ├── data_structures.rst ├── Makefile └── conf.py ├── MANIFEST.in ├── .travis.yml ├── scripts ├── testrunner.py └── logging.conf ├── setenv.sh ├── README.rst ├── examples ├── evt_pump.py ├── aanet_pump.py ├── ch_pump.py ├── clb_pump.py ├── ch_event_dump.py ├── module_workflow.py └── daq_pump.py ├── .gitignore ├── setup.py ├── LICENSE ├── km3modules └── __init__.py └── pylint.rc /km3pipe/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pipeinspector/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /km3pipe/pumps/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pipeinspector/tests/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /pipeinspector/playground/__init__.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /sphinx_requirements.txt: -------------------------------------------------------------------------------- 1 | sphinxcontrib-napoleon 2 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | docopt 2 | controlhost 3 | urwid 4 | numpy 5 | ipython 6 | 7 | -------------------------------------------------------------------------------- /dev-requirements.txt: -------------------------------------------------------------------------------- 1 | docopt 2 | controlhost 3 | sphinxcontrib-napoleon 4 | mock 5 | numpy 6 | py 7 | pytest 8 | urwid 9 | -------------------------------------------------------------------------------- /docs/api/modules.rst: -------------------------------------------------------------------------------- 1 | API Documentation 2 | ================= 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | km3pipe 8 | -------------------------------------------------------------------------------- /km3pipe/units.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: units.py 3 | """ 4 | ... 5 | 6 | """ 7 | from __future__ import division, absolute_import, print_function 8 | -------------------------------------------------------------------------------- /docs/api/km3pipe.testing.rst: -------------------------------------------------------------------------------- 1 | km3pipe.testing package 2 | ======================= 3 | 4 | Module contents 5 | --------------- 6 | 7 | .. automodule:: km3pipe.testing 8 | :members: 9 | :undoc-members: 10 | :show-inheritance: 11 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include LICENSE 3 | recursive-include docs * 4 | recursive-include km3pipe/tests *.py 5 | recursive-include km3pipe/pumps/tests *.py 6 | 7 | recursive-exclude docs *.pyc 8 | recursive-exclude docs *.pyo 9 | prune docs/_build 10 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "2.7" 4 | - "3.2" 5 | - "3.3" 6 | - "3.4" 7 | # - "nightly" 8 | # 9 | # command to install dependencies 10 | install: "pip install -r requirements.txt" 11 | # 12 | # command to run tests 13 | script: py.test 14 | -------------------------------------------------------------------------------- /scripts/testrunner.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | 3 | import sys 4 | #sys.path.append('..') 5 | import unittest 6 | 7 | 8 | loader = unittest.TestLoader() 9 | tests = loader.discover('..') 10 | testRunner = unittest.runner.TextTestRunner() 11 | testRunner.run(tests) 12 | -------------------------------------------------------------------------------- /setenv.sh: -------------------------------------------------------------------------------- 1 | #!/bin/bash 2 | echo "Setting environment for KM3Pipe" 3 | 4 | ENV_DIR=$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd ) 5 | 6 | if ! ( echo ${ENV_DIR} | egrep ${PYTHONPATH} > /dev/null ); then 7 | export PYTHONPATH=${ENV_DIR}:${PYTHONPATH} 8 | fi 9 | 10 | export KM3PIPEDIR=${ENV_DIR} 11 | 12 | alias pipeinspector='python ${ENV_DIR}/pipeinspector/app.py' 13 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | km3pipe 2 | ======= 3 | 4 | Extemporary analysis framework for KM3NeT 5 | 6 | Read the docs at http://km3pipe.readthedocs.org 7 | 8 | .. image:: https://travis-ci.org/tamasgal/km3pipe.svg?branch=develop 9 | :target: https://travis-ci.org/tamasgal/km3pipe 10 | 11 | .. image:: https://img.shields.io/badge/docs-latest-brightgreen.svg?style=flat 12 | :target: http://km3pipe.readthedocs.org/en/latest/ 13 | -------------------------------------------------------------------------------- /examples/evt_pump.py: -------------------------------------------------------------------------------- 1 | __author__ = 'tamasgal' 2 | 3 | from km3pipe import Module, Pipeline 4 | from km3pipe.pumps import EvtPump 5 | 6 | class PrintBlob(Module): 7 | def process(self, blob): 8 | print(blob.keys()) 9 | return blob 10 | 11 | pipeline = Pipeline() 12 | pipeline.attach(EvtPump, 'evtpump', filename='files/example_numuNC.evt') 13 | pipeline.attach(PrintBlob, 'printer') 14 | pipeline.drain(1) 15 | 16 | -------------------------------------------------------------------------------- /km3pipe/pumps/__init__.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: __init__.py 3 | """ 4 | A collection of pumps for different kinds of data formats. 5 | 6 | """ 7 | from __future__ import division, absolute_import, print_function 8 | 9 | from km3pipe.pumps.evt import EvtPump 10 | from km3pipe.pumps.daq import DAQPump 11 | from km3pipe.pumps.clb import CLBPump 12 | from km3pipe.pumps.aanet import AanetPump 13 | from km3pipe.pumps.jpp import JPPPump 14 | from km3pipe.pumps.ch import CHPump 15 | -------------------------------------------------------------------------------- /km3pipe/testing/__init__.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: __init__.py 3 | """ 4 | Common unit testing support for km3pipe. 5 | 6 | """ 7 | from __future__ import division, absolute_import, print_function 8 | 9 | try: 10 | from unittest2 import TestCase, skip, skipIf 11 | except ImportError: 12 | from unittest import TestCase, skip, skipIf 13 | 14 | try: 15 | from cStringIO import StringIO 16 | except ImportError: 17 | try: 18 | from StringIO import StringIO 19 | except ImportError: 20 | from io import StringIO 21 | 22 | from mock import MagicMock 23 | -------------------------------------------------------------------------------- /examples/aanet_pump.py: -------------------------------------------------------------------------------- 1 | from __future__ import division, absolute_import, print_function 2 | 3 | __author__ = 'tamasgal' 4 | 5 | from km3pipe import Pipeline, Module 6 | from km3pipe.pumps import AanetPump 7 | 8 | 9 | class PrintBlob(Module): 10 | def process(self, blob): 11 | print(blob) 12 | hit = blob['a_hit'] 13 | print(hit) 14 | print(hit.t) 15 | return blob 16 | 17 | pipeline = Pipeline() 18 | pipeline.attach(AanetPump, 'aanet_pump', filename='foo.aa.root') 19 | pipeline.attach(PrintBlob, 'print_blob') 20 | pipeline.drain(1) 21 | 22 | 23 | -------------------------------------------------------------------------------- /examples/ch_pump.py: -------------------------------------------------------------------------------- 1 | import time 2 | 3 | from km3pipe import Pipeline, Module 4 | from km3pipe.pumps import CHPump 5 | 6 | 7 | class CHPrinter(Module): 8 | def process(self, blob): 9 | print("New blob:") 10 | print blob['CHPrefix'] 11 | print blob['CHData'] 12 | return blob 13 | 14 | 15 | pipe = Pipeline() 16 | pipe.attach(CHPump, host='127.0.0.1', 17 | port=5553, 18 | tags="foo, narf", 19 | timeout=1000, 20 | max_queue=42) 21 | pipe.attach(CHPrinter) 22 | pipe.drain() 23 | 24 | -------------------------------------------------------------------------------- /scripts/logging.conf: -------------------------------------------------------------------------------- 1 | [formatters] 2 | keys: detailed,simple 3 | 4 | [handlers] 5 | keys: console 6 | 7 | [loggers] 8 | keys: root,core,pumps 9 | 10 | [formatter_simple] 11 | format: %(name)s:%(levelname)s: %(message)s 12 | 13 | [formatter_detailed] 14 | format: %(name)s:%(levelname)s %(module)s:$(lineno)d: %(message)s 15 | 16 | [handler_console] 17 | class: StreamHandler 18 | args: [] 19 | formatter: simple 20 | 21 | [logger_root] 22 | level: CRITICAL 23 | handlers: console 24 | 25 | [logger_core] 26 | level: CRITICAL 27 | qualname: km3pipe.core 28 | handlers: console 29 | 30 | [logger_pumps] 31 | level: CRITICAL 32 | qualname: km3pipe.pumps 33 | handlers: console 34 | -------------------------------------------------------------------------------- /km3pipe/pumps/tests/test_aanet.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: test_aanet.py 3 | # pylint: disable=C0111,R0904,R0201 4 | """ 5 | ... 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | from km3pipe.testing import * 11 | 12 | from km3pipe.pumps.aanet import AanetPump 13 | 14 | #try: 15 | # # pylint: disable=F0401,W0611 16 | # import aa 17 | #except ImportError: 18 | # NO_AA = True 19 | #else: 20 | # NO_AA = False 21 | 22 | #import aa 23 | #import ROOT 24 | 25 | #@skipIf(NO_AA, "Skipping tests for aanet") 26 | #class TestAanetPump(TestCase): 27 | 28 | # def test_aanetpump_init(self): 29 | # pump = AanetPump() 30 | 31 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. KM3Pipe documentation master file, created by 2 | sphinx-quickstart on Sat Oct 4 19:16:43 2014. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to KM3Pipe's documentation! 7 | =================================== 8 | 9 | Contents: 10 | 11 | .. toctree:: 12 | :maxdepth: 3 13 | 14 | data_structures 15 | help_modules 16 | help_pumps 17 | examples 18 | pipeinspector 19 | api/modules 20 | 21 | .. include:: ../README.rst 22 | 23 | Indices and tables 24 | ================== 25 | 26 | * :ref:`genindex` 27 | * :ref:`modindex` 28 | * :ref:`search` 29 | 30 | -------------------------------------------------------------------------------- /km3pipe/constants.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: constants.py 3 | # pylint: disable=C0103 4 | """ 5 | The constants used in KM3Pipe. 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | #TODO: this module should be refactored soon! 11 | 12 | import math 13 | 14 | 15 | c = 2.99792458e8 # m/s 16 | 17 | n_water_antares_phase = 1.3499 18 | n_water_antares_group = 1.3797 19 | n_water_antares = n_water_antares_group 20 | theta_cherenkov_water_antares = math.acos(1 / n_water_antares_phase) 21 | c_water_antares = c / n_water_antares_group 22 | 23 | # Math 24 | pi = math.pi 25 | e = math.e 26 | 27 | # Default values for time residuals 28 | dt_window_l = -15 # ns 29 | dt_window_h = +25 # ns 30 | -------------------------------------------------------------------------------- /km3pipe/__init__.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: __init__.py 3 | """ 4 | The extemporary KM3NeT analysis framework. 5 | 6 | """ 7 | from __future__ import division, absolute_import, print_function 8 | 9 | from km3pipe.__version__ import version, version_info 10 | 11 | try: 12 | from km3pipe.core import Pipeline, Module, Pump, Blob, Geometry 13 | except ImportError: 14 | print("Numpy is needed for KM3Pipe") 15 | 16 | __author__ = "Tamas Gal" 17 | __copyright__ = ("Copyright 2015, Tamas Gal and the KM3NeT collaboration " 18 | "(http://km3net.org)") 19 | __credits__ = [] 20 | __license__ = "MIT" 21 | __version__ = version 22 | __maintainer__ = "Tamas Gal" 23 | __email__ = "tgal@km3net.de" 24 | __status__ = "Development" 25 | -------------------------------------------------------------------------------- /docs/pipeinspector.rst: -------------------------------------------------------------------------------- 1 | .. _pipeinspector: 2 | 3 | PipeInspector 4 | ============= 5 | 6 | PipeInspector is a tool to inspect different kinds of data formats used 7 | within the KM3NeT collaboration. It utilises the KM3Pipe framework to 8 | deal with data I/O and allows easy access to the stored information. 9 | 10 | .. image:: http://tamasgal.com/km3net/PipeInspector_Screenshot.png 11 | :alt: PipeInspector 12 | :width: 700 13 | :align: center 14 | 15 | It is currently in an early alpha status, but already able to handle the 16 | DAQ binary data, ROOT and Aanet-ROOT format. 17 | 18 | If you installed KM3Pipe via `pip`, you'll be able to launch `pipeinspector` 19 | directly from the terminal:: 20 | 21 | pipeinspector /path/to/data/file.ext 22 | 23 | -------------------------------------------------------------------------------- /docs/api/km3pipe.pumps.rst: -------------------------------------------------------------------------------- 1 | km3pipe.pumps package 2 | ===================== 3 | 4 | Submodules 5 | ---------- 6 | 7 | km3pipe.pumps.daq module 8 | ------------------------ 9 | 10 | .. automodule:: km3pipe.pumps.daq 11 | :members: 12 | :undoc-members: 13 | :show-inheritance: 14 | 15 | km3pipe.pumps.evt module 16 | ------------------------ 17 | 18 | .. automodule:: km3pipe.pumps.evt 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | km3pipe.pumps.clb module 24 | ------------------------ 25 | 26 | .. automodule:: km3pipe.pumps.clb 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | Module contents 32 | --------------- 33 | 34 | .. automodule:: km3pipe.pumps 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | -------------------------------------------------------------------------------- /km3pipe/decorators.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: decorators.py 3 | # pylint: disable=locally-disabled 4 | """ 5 | Function decorators. 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | __author__ = 'tamasgal' 11 | 12 | 13 | def remain_file_pointer(function): 14 | """Remain the file pointer position after calling the decorated function 15 | 16 | This decorator assumes that the last argument is the file handler. 17 | 18 | """ 19 | def wrapper(*args, **kwargs): 20 | """Wrap the function and remain its parameters and return values""" 21 | file_obj = args[-1] 22 | old_position = file_obj.tell() 23 | return_value = function(*args, **kwargs) 24 | file_obj.seek(old_position, 0) 25 | return return_value 26 | return wrapper 27 | 28 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | 5 | # C extensions 6 | *.so 7 | 8 | # Distribution / packaging 9 | .Python 10 | env/ 11 | bin/ 12 | build/ 13 | develop-eggs/ 14 | dist/ 15 | eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | *.egg-info/ 22 | .installed.cfg 23 | *.egg 24 | 25 | # Installer logs 26 | pip-log.txt 27 | pip-delete-this-directory.txt 28 | 29 | # Unit test / coverage reports 30 | htmlcov/ 31 | .tox/ 32 | .coverage 33 | .cache 34 | nosetests.xml 35 | coverage.xml 36 | 37 | # Translations 38 | *.mo 39 | 40 | # Mr Developer 41 | .mr.developer.cfg 42 | .project 43 | .pydevproject 44 | 45 | # Rope 46 | .ropeproject 47 | 48 | # Django stuff: 49 | *.log 50 | *.pot 51 | 52 | # Sphinx documentation 53 | docs/_build/ 54 | 55 | # VI swap files 56 | *.swp 57 | 58 | # PyCharm files 59 | .idea 60 | 61 | -------------------------------------------------------------------------------- /docs/examples.rst: -------------------------------------------------------------------------------- 1 | .. _examples: 2 | 3 | Examples 4 | ======== 5 | 6 | KM3Pipe Workflow 7 | ---------------- 8 | 9 | KM3Pipe is a basic framework which tries to give you a lose structure and 10 | workflow for data analysis. It has a simple, yet powerful module system 11 | which allows you to organise and reuse code. 12 | 13 | The main structure is a ``Pipeline`` which is meant to hold everything 14 | together. The building blocks are simply called ``Modules``. 15 | 16 | To setup a workflow, you first create a pipeline, attach the modules to it 17 | and to fire up the analysis chain, you call ``.drain()`` on your pipeline 18 | and let the flow go. 19 | 20 | 21 | The following script shows the module system of km3pipe. 22 | There is a ``Pump`` which is in this case a dummy data generator. The other 23 | Modules do some modifications on the data and pass them through to the next 24 | module in the pipeline. 25 | 26 | .. literalinclude:: ../examples/module_workflow.py 27 | :language: python 28 | :linenos: 29 | -------------------------------------------------------------------------------- /km3pipe/__version__.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | # Filename: __version__.py 4 | # pylint: disable=C0103 5 | """ 6 | Pep 386 compliant version info. 7 | 8 | (major, minor, micro, alpha/beta/rc/final, #) 9 | (1, 1, 2, 'alpha', 0) => "1.1.2.dev" 10 | (1, 2, 0, 'beta', 2) => "1.2b2" 11 | 12 | """ 13 | version_info = (0, 9, 14, 'final', 0) 14 | 15 | def _get_version(version_info): 16 | """Return a PEP 386-compliant version number.""" 17 | assert len(version_info) == 5 18 | assert version_info[3] in ('alpha', 'beta', 'rc', 'final') 19 | 20 | parts = 2 if version_info[2] == 0 else 3 21 | main = '.'.join(map(str, version_info[:parts])) 22 | 23 | sub = '' 24 | if version_info[3] == 'alpha' and version_info[4] == 0: 25 | sub = '.dev' 26 | elif version_info[3] != 'final': 27 | mapping = {'alpha': 'a', 'beta': 'b', 'rc': 'c'} 28 | sub = mapping[version_info[3]] + str(version_info[4]) 29 | 30 | return str(main + sub) 31 | 32 | version = _get_version(version_info) 33 | -------------------------------------------------------------------------------- /km3pipe/logger.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: logger.py 3 | # pylint: disable=locally-disabled,C0103 4 | """ 5 | The logging facility. 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | __author__ = 'tamasgal' 11 | 12 | import logging 13 | import logging.config 14 | try: 15 | logging.config.fileConfig('logging.conf') 16 | except Exception: 17 | logging.basicConfig() 18 | 19 | logging.addLevelName(logging.INFO, "\033[1;32m%s\033[1;0m" % 20 | logging.getLevelName(logging.INFO)) 21 | logging.addLevelName(logging.DEBUG, "\033[1;34m%s\033[1;0m" % 22 | logging.getLevelName(logging.DEBUG)) 23 | logging.addLevelName(logging.WARNING, "\033[1;33m%s\033[1;0m" % 24 | logging.getLevelName(logging.WARNING)) 25 | logging.addLevelName(logging.ERROR, "\033[1;31m%s\033[1;0m" % 26 | logging.getLevelName(logging.ERROR)) 27 | 28 | # pylint: disable=C0103 29 | formatter = logging.Formatter('[%(levelname)s] %(name)s: %(message)s') 30 | 31 | 32 | 33 | -------------------------------------------------------------------------------- /docs/help_pumps.rst: -------------------------------------------------------------------------------- 1 | .. _help_pumps: 2 | 3 | Pumps 4 | ===== 5 | 6 | The pump is a special type of ``Module`` and is usually the first one to be 7 | attached to a pipeline. It is responsible for data generation by reading data 8 | files or streams from socket connections. 9 | 10 | ``Pump`` inherits from the ``Module`` class. The ``__init__()`` method should 11 | be used to set up the file or socket handler and the ``finish()`` has to 12 | close them. The actual data is passed via the ``process()`` method. A 13 | data chunk is internally called ``Blob`` and usually represents an event. 14 | 15 | To end the data pumping, the pump has to raise a ``StopIteration`` exception. 16 | One elegant way to implement this in Python is using a generator. 17 | 18 | The following example shows a very basic pump, which simply initialises a 19 | list of dictionaries and "pumps" one blob after another on 20 | each ``process()`` call to the next module in the pipeline. 21 | 22 | 23 | .. literalinclude:: ../examples/module_workflow.py 24 | :pyobject: DummyPump 25 | :linenos: 26 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | from setuptools import setup 2 | 3 | from km3pipe import version 4 | 5 | setup(name='km3pipe', 6 | version=version, 7 | url='http://github.com/tamasgal/km3pipe/', 8 | description='An analysis framework for KM3NeT', 9 | author='Tamas Gal', 10 | author_email='tgal@km3net.de', 11 | packages=['km3pipe', 'km3pipe.testing', 'km3pipe.pumps', 12 | 'km3modules', 'pipeinspector'], 13 | include_package_data=True, 14 | platforms='any', 15 | install_requires=[ 16 | 'numpy', 17 | 'controlhost', 18 | 'urwid', 19 | 'docopt', 20 | ], 21 | entry_points={ 22 | 'console_scripts': [ 23 | 'pipeinspector=pipeinspector.app:main', 24 | ], 25 | }, 26 | classifiers=[ 27 | 'Development Status :: 3 - Alpha', 28 | 'Intended Audience :: Developers', 29 | 'Intended Audience :: Science/Research', 30 | 'Programming Language :: Python', 31 | ], 32 | ) 33 | 34 | __author__ = 'Tamas Gal' 35 | -------------------------------------------------------------------------------- /pipeinspector/settings.py: -------------------------------------------------------------------------------- 1 | __author__ = 'tamasgal' 2 | 3 | 4 | class UI(object): 5 | """Represents the settings for the UI.""" 6 | fg = 'light gray' 7 | bg = 'black' 8 | 9 | palette = [ 10 | ('default', fg, bg), 11 | ('highlight', fg+',standout', bg), 12 | ('header', 'white', 'dark cyan'), 13 | ('footer', 'light gray', 'dark blue'), 14 | ('body', 'dark cyan', '', 'standout'), 15 | ('focus', 'dark red', '', 'standout'), 16 | ('head', 'light red', 'black'), 17 | ('blob', 'yellow', 'dark cyan'), 18 | ('blob_selected', 'dark cyan', 'yellow'), 19 | ('blob_scale', 'dark cyan', 'black'), 20 | ] 21 | 22 | keys = { 23 | 'select': ('return', 'enter'), 24 | 'inspect': ('x', 'X'), 25 | 'escape': ('esc', 'q', 'Q'), 26 | 'left': ('left', 'h'), 27 | 'right': ('right', 'l'), 28 | 'up': ('up', 'k'), 29 | 'down': ('down', 'j'), 30 | 'home': ('0', '^'), 31 | 'end': ('$',), 32 | 'goto': ('g', 'G'), 33 | 'help': ('?',), 34 | } 35 | -------------------------------------------------------------------------------- /examples/clb_pump.py: -------------------------------------------------------------------------------- 1 | from __future__ import division, absolute_import, print_function 2 | 3 | __author__ = 'tamasgal' 4 | 5 | import matplotlib.pyplot as plt 6 | 7 | from km3pipe import Pipeline, Module 8 | from km3pipe.pumps import CLBPump 9 | from km3modules import StatusBar 10 | 11 | class TOTHisto(Module): 12 | def __init__(self, **context): 13 | super(self.__class__, self).__init__(**context) 14 | self.tots = [] 15 | 16 | def process(self, blob): 17 | for pmt_data in blob['PMTData']: 18 | self.tots.append(pmt_data.tot) 19 | return blob 20 | 21 | def finish(self): 22 | plt.hist(self.tots, 80) 23 | plt.xlabel("ToT [ns]") 24 | plt.ylabel('count') 25 | plt.show() 26 | 27 | class PrintCLBHeader(Module): 28 | def process(self, blob): 29 | print(blob['CLBHeader']) 30 | return blob 31 | 32 | pipeline = Pipeline() 33 | pipeline.attach(CLBPump, 34 | filename='/Users/tamasgal/Data/KM3NeT/du1-clb/DOM2_run23.dat') 35 | pipeline.attach(StatusBar) 36 | pipeline.attach(PrintCLBHeader) 37 | pipeline.attach(TOTHisto) 38 | pipeline.drain(30) 39 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | The MIT License (MIT) 2 | 3 | Copyright (c) 2014 Tamas Gal 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | 23 | -------------------------------------------------------------------------------- /docs/help_modules.rst: -------------------------------------------------------------------------------- 1 | .. _help_modules: 2 | 3 | Modules 4 | ======= 5 | 6 | A module is a configurable building block which can be attached to a pipeline. 7 | It has a ``process()`` method, which is called every time with the current 8 | data ("blob") in the pipeline cycle. This piece of data can be analysed, 9 | manipulated and finally returned to allow the handover to the next module 10 | in the pipeline system. 11 | 12 | Instance variables can be initialised within the ``__init__()`` method. 13 | User defined parameters are accessible via the ``get()`` method, which either 14 | returns the actual value or ``None`` if not defined. 15 | This allows an easy way to define default values as seen in the example below. 16 | 17 | .. literalinclude:: ../examples/module_workflow.py 18 | :pyobject: Foo 19 | :emphasize-lines: 5-6 20 | :linenos: 21 | 22 | To override the default parameters, the desired values can be set when 23 | attaching the module to the pipeline. Always use the class itself, since 24 | the ``attach()`` method of the pipeline will care about the initialisation:: 25 | 26 | pipe.attach(Foo, 'foo_module', foo='dummyfoo', bar='dummybar') 27 | -------------------------------------------------------------------------------- /examples/ch_event_dump.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | import time 3 | 4 | from km3pipe import Pipeline, Module 5 | from km3pipe.pumps import CHPump 6 | 7 | 8 | class CHPrinter(Module): 9 | def process(self, blob): 10 | print("New blob:") 11 | print blob['CHPrefix'] 12 | return blob 13 | 14 | 15 | class Dumper(Module): 16 | def __init__(self, **context): 17 | super(self.__class__, self).__init__(**context) 18 | self.counter = 0 19 | 20 | def process(self, blob): 21 | if 'CHData' in blob: 22 | tag = str(blob['CHPrefix'].tag) 23 | data = blob['CHData'] 24 | self.dump(data, tag) 25 | return blob 26 | 27 | def dump(self, data, tag): 28 | with open('{0}-{1:06}.dat'.format(tag, self.counter), 'w') as f: 29 | self.counter += 1 30 | f.write(data) 31 | 32 | 33 | 34 | pipe = Pipeline() 35 | pipe.attach(CHPump, host='127.0.0.1', 36 | port=5553, 37 | tags='IO_EVT, IO_TSL, IO_SUM, TRG_PARS', 38 | timeout=60*60*24, 39 | max_queue=42) 40 | pipe.attach(CHPrinter) 41 | pipe.attach(Dumper) 42 | pipe.drain() 43 | -------------------------------------------------------------------------------- /examples/module_workflow.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | from __future__ import division, absolute_import, print_function 3 | 4 | __author__ = 'tamasgal' 5 | 6 | from km3pipe.core import Pipeline, Module, Pump 7 | 8 | 9 | class Pump(Pump): 10 | """A pump demonstration with a dummy list as data.""" 11 | def __init__(self, **context): 12 | super(self.__class__, self).__init__(**context) 13 | self.data = [{'nr': 1}, {'nr': 2}] 14 | self.blobs = self.blob_generator() 15 | 16 | def process(self, blob): 17 | return next(self.blobs) 18 | 19 | def blob_generator(self): 20 | """Create a blob generator.""" 21 | for blob in self.data: 22 | yield blob 23 | 24 | 25 | class Foo(Module): 26 | 27 | def __init__(self, **context): 28 | super(self.__class__, self).__init__(**context) 29 | self.foo = self.get('foo') or 'default_foo' 30 | self.bar = self.get('bar') or 23 31 | 32 | def process(self, blob): 33 | print("This is the current blob: " + str(blob)) 34 | blob['foo_entry'] = self.foo 35 | return blob 36 | 37 | 38 | class Moo(Module): 39 | def process(self, blob): 40 | blob['moo_entry'] = 42 41 | return blob 42 | 43 | 44 | class PrintBlob(Module): 45 | def process(self, blob): 46 | print(blob) 47 | return blob 48 | 49 | 50 | pipe = Pipeline() 51 | pipe.attach(Pump, 'the_pump') 52 | pipe.attach(Foo, 'foo_module', foo='dummyfoo', bar='dummybar') 53 | pipe.attach(Moo, 'moo_module') 54 | pipe.attach(PrintBlob, 'print_blob') 55 | pipe.drain() 56 | -------------------------------------------------------------------------------- /docs/api/km3pipe.rst: -------------------------------------------------------------------------------- 1 | km3pipe package 2 | =============== 3 | 4 | Subpackages 5 | ----------- 6 | 7 | .. toctree:: 8 | 9 | km3pipe.pumps 10 | km3pipe.testing 11 | 12 | Submodules 13 | ---------- 14 | 15 | km3pipe.constants module 16 | ------------------------ 17 | 18 | .. automodule:: km3pipe.constants 19 | :members: 20 | :undoc-members: 21 | :show-inheritance: 22 | 23 | km3pipe.core module 24 | ------------------- 25 | 26 | .. automodule:: km3pipe.core 27 | :members: 28 | :undoc-members: 29 | :show-inheritance: 30 | 31 | km3pipe.dataclasses module 32 | -------------------------- 33 | 34 | .. automodule:: km3pipe.dataclasses 35 | :members: 36 | :undoc-members: 37 | :show-inheritance: 38 | 39 | km3pipe.decorators module 40 | ------------------------- 41 | 42 | .. automodule:: km3pipe.decorators 43 | :members: 44 | :undoc-members: 45 | :show-inheritance: 46 | 47 | km3pipe.hardware module 48 | ----------------------- 49 | 50 | .. automodule:: km3pipe.hardware 51 | :members: 52 | :undoc-members: 53 | :show-inheritance: 54 | 55 | km3pipe.logger module 56 | --------------------- 57 | 58 | .. automodule:: km3pipe.logger 59 | :members: 60 | :undoc-members: 61 | :show-inheritance: 62 | 63 | km3pipe.tools module 64 | -------------------- 65 | 66 | .. automodule:: km3pipe.tools 67 | :members: 68 | :undoc-members: 69 | :show-inheritance: 70 | 71 | km3pipe.units module 72 | -------------------- 73 | 74 | .. automodule:: km3pipe.units 75 | :members: 76 | :undoc-members: 77 | :show-inheritance: 78 | 79 | 80 | Module contents 81 | --------------- 82 | 83 | .. automodule:: km3pipe 84 | :members: 85 | :undoc-members: 86 | :show-inheritance: 87 | -------------------------------------------------------------------------------- /pipeinspector/playground/list.py: -------------------------------------------------------------------------------- 1 | import urwid 2 | import random 3 | 4 | class ItemWidget (urwid.WidgetWrap): 5 | 6 | def __init__ (self, id, description): 7 | self.id = id 8 | self.content = 'item %s: %s...' % (str(id), description[:25]) 9 | self.item = [ 10 | ('fixed', 15, urwid.Padding(urwid.AttrWrap( 11 | urwid.Text('item %s' % str(id)), 'body', 'focus'), left=2)), 12 | urwid.AttrWrap(urwid.Text('%s' % description), 'body', 'focus'), 13 | ] 14 | w = urwid.Columns(self.item) 15 | self.__super.__init__(w) 16 | 17 | def selectable (self): 18 | return True 19 | 20 | def keypress(self, size, key): 21 | return key 22 | 23 | def main (): 24 | 25 | palette = [ 26 | ('body','dark cyan', '', 'standout'), 27 | ('focus','dark red', '', 'standout'), 28 | ('head','light red', 'black'), 29 | ] 30 | 31 | lorem = [ 32 | 'Lorem ipsum dolor sit amet, consectetur adipiscing elit.', 33 | 'Sed sollicitudin, nulla id viverra pulvinar.', 34 | 'Cras a magna sit amet felis fringilla lobortis.', 35 | ] 36 | 37 | def keystroke (input): 38 | if input in ('q', 'Q'): 39 | raise urwid.ExitMainLoop() 40 | 41 | if input is 'enter': 42 | focus = listbox.get_focus()[0].content 43 | view.set_header(urwid.AttrWrap(urwid.Text( 44 | 'selected: %s' % str(focus)), 'head')) 45 | 46 | items = [] 47 | for i in range(100): 48 | items.append(ItemWidget(i, random.choice(lorem))) 49 | 50 | header = urwid.AttrMap(urwid.Text('selected:'), 'head') 51 | listbox = urwid.ListBox(urwid.SimpleListWalker(items)) 52 | view = urwid.Frame(urwid.AttrWrap(listbox, 'body'), header=header) 53 | loop = urwid.MainLoop(view, palette, unhandled_input=keystroke) 54 | loop.run() 55 | 56 | if __name__ == '__main__': 57 | main() 58 | -------------------------------------------------------------------------------- /examples/daq_pump.py: -------------------------------------------------------------------------------- 1 | from __future__ import division, absolute_import, print_function 2 | 3 | __author__ = 'tamasgal' 4 | 5 | from km3pipe import Pipeline, Module 6 | from km3pipe.pumps import DAQPump 7 | 8 | 9 | class DAQEventPrinter(Module): 10 | def process(self, blob): 11 | try: 12 | print(blob['DAQEvent']) 13 | except KeyError: 14 | pass 15 | return blob 16 | 17 | 18 | class DAQSummaryslicePrinter(Module): 19 | def process(self, blob): 20 | try: 21 | print(blob['DAQSummaryslice']) 22 | except KeyError: 23 | pass 24 | return blob 25 | 26 | 27 | class MeanHits(Module): 28 | def __init__(self, **context): 29 | super(self.__class__, self).__init__(**context) 30 | self.hits = [] 31 | 32 | def process(self, blob): 33 | try: 34 | event = blob['DAQEvent'] 35 | self.hits.append(event.n_snapshot_hits) 36 | except KeyError: 37 | pass 38 | return blob 39 | 40 | def finish(self): 41 | mean_hits = sum(self.hits) / len(self.hits) 42 | print("Number of entries: {0}\nMean hits: {1}" 43 | .format(len(self.hits), mean_hits)) 44 | 45 | 46 | class MeanRates(Module): 47 | def __init__(self, **context): 48 | super(self.__class__, self).__init__(**context) 49 | self.rates = {} 50 | 51 | def process(self, blob): 52 | try: 53 | summaryslice = blob['DAQSummaryslice'] 54 | print(summaryslice.summary_frames) 55 | except KeyError: 56 | pass 57 | return blob 58 | 59 | def finish(self): 60 | pass 61 | 62 | 63 | pipeline = Pipeline() 64 | pipeline.attach(DAQPump, 'daq_pump', 65 | filename='/Users/tamasgal/Desktop/RUN-PPM_DU-00430-20140730-121124_detx.dat') 66 | #pipeline.attach(DAQEventPrinter, 'moo') 67 | #pipeline.attach(DAQSummaryslicePrinter, 'summaryslice_printer') 68 | #pipeline.attach(MeanRates, 'mean_rates') 69 | pipeline.attach(MeanHits, 'mean_hits') 70 | pipeline.drain() 71 | 72 | 73 | -------------------------------------------------------------------------------- /pipeinspector/gui.py: -------------------------------------------------------------------------------- 1 | __author__ = 'tamasgal' 2 | 3 | import urwid 4 | 5 | from pipeinspector.widgets import BlobWidget, BlobBrowser 6 | from pipeinspector.settings import UI 7 | 8 | 9 | 10 | 11 | 12 | 13 | class MainFrame(urwid.Frame): 14 | """ 15 | Represents the main GUI 16 | 17 | """ 18 | def __init__(self, pump): 19 | self.header = urwid.AttrWrap(urwid.Text("PipeInspector", align='center'), 20 | 'header') 21 | 22 | self.blob_browser = BlobBrowser() 23 | 24 | self.info_area = urwid.Text('') 25 | self.blobs = BlobWidget() 26 | self.footer = urwid.Columns([self.info_area, self.blobs]) 27 | 28 | self.frame = urwid.AttrWrap(urwid.Frame(self.blob_browser, 29 | header=self.header, 30 | footer=self.footer), 'default') 31 | urwid.Frame.__init__(self, self.frame) 32 | self.overlay = None 33 | 34 | self.pump = pump 35 | 36 | urwid.connect_signal(self.blobs, 'blob_selected', self.blob_selected) 37 | self.blobs.goto_blob(0) 38 | 39 | def blob_selected(self, index): 40 | self.info_area.set_text("Blob: {0}".format(index)) 41 | 42 | #blob = self.pump.process(None) 43 | blob = self.pump.get_blob(index) 44 | self.blob_browser.load(blob) 45 | 46 | def keypress(self, size, key): 47 | input = urwid.Frame.keypress(self, size, key) 48 | if input is None: 49 | return 50 | if input in UI.keys['left']: 51 | self.blobs.previous_blob() 52 | elif input in UI.keys['right']: 53 | self.blobs.next_blob() 54 | elif input in [key.upper() for key in UI.keys['left']]: 55 | self.blobs.previous_blob(step=10) 56 | elif input in [key.upper() for key in UI.keys['right']]: 57 | self.blobs.next_blob(step=10) 58 | elif input in UI.keys['home']: 59 | self.blobs.goto_blob(0) 60 | else: 61 | return self.body.keypress(size, input) 62 | -------------------------------------------------------------------------------- /pipeinspector/tests/test_widgets.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: test_widgets.py 3 | """ 4 | ... 5 | 6 | """ 7 | from __future__ import division, absolute_import, print_function 8 | 9 | from km3pipe.testing import * 10 | 11 | from pipeinspector.widgets import BlobWidget 12 | 13 | __author__ = 'tamasgal' 14 | 15 | 16 | class TestBlobWidget(TestCase): 17 | 18 | def test_make_scale_labels(self): 19 | blobs = BlobWidget() 20 | blobs.width = 25 21 | scale_labels = blobs._make_scale_labels(0) 22 | self.assertEqual("0 10 20 ", scale_labels) 23 | scale_labels = blobs._make_scale_labels(2) 24 | self.assertEqual("0 10 20 ", scale_labels) 25 | scale_labels = blobs._make_scale_labels(10) 26 | self.assertEqual("0 10 20 ", scale_labels) 27 | scale_labels = blobs._make_scale_labels(11) 28 | self.assertEqual(" 10 20 ", scale_labels) 29 | scale_labels = blobs._make_scale_labels(4589) 30 | self.assertEqual(" 4580 4590 ", scale_labels) 31 | 32 | def test_make_ruler(self): 33 | blobs = BlobWidget() 34 | blobs.width = 25 35 | ruler = blobs._make_ruler(0) 36 | self.assertEqual("| ' | ' | ", ruler) 37 | ruler = blobs._make_ruler(2) 38 | self.assertEqual("| ' | ' | ", ruler) 39 | ruler = blobs._make_ruler(10) 40 | self.assertEqual("| ' | ' | ", ruler) 41 | ruler = blobs._make_ruler(11) 42 | self.assertEqual(" ' | ' | '", ruler) 43 | ruler = blobs._make_ruler(12) 44 | self.assertEqual(" ' | ' | ' ", ruler) 45 | ruler = blobs._make_ruler(19) 46 | self.assertEqual(" | ' | ' | ", ruler) 47 | ruler = blobs._make_ruler(20) 48 | self.assertEqual("| ' | ' | ", ruler) 49 | ruler = blobs._make_ruler(23) 50 | self.assertEqual(" ' | ' | ' ", ruler) 51 | ruler = blobs._make_ruler(109) 52 | self.assertEqual(" | ' | ' | ", ruler) -------------------------------------------------------------------------------- /km3pipe/pumps/aanet.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: aanet.py 3 | # pylint: disable=locally-disabled 4 | """ 5 | Pump for the Aanet data format. 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | from km3pipe import Pump 11 | from km3pipe.logger import logging 12 | 13 | log = logging.getLogger(__name__) # pylint: disable=C0103 14 | 15 | 16 | class AanetPump(Pump): 17 | """A pump for binary Aanet files.""" 18 | 19 | def __init__(self, **context): 20 | super(self.__class__, self).__init__(**context) 21 | 22 | self.filename = self.get('filename') 23 | self.filenames = self.get('filenames') or [] 24 | self.indices = self.get('indices') 25 | 26 | if not self.filename and not self.filenames: 27 | raise ValueError("No filename(s) defined") 28 | 29 | if self.filename: 30 | if "[index]" in self.filename and self.indices: 31 | self._parse_filenames() 32 | else: 33 | self.filenames.append(self.filename) 34 | 35 | self.blobs = self.blob_generator() 36 | 37 | def _parse_filenames(self): 38 | prefix, suffix = self.filename.split('[index]') 39 | self.filenames += [prefix + str(i) + suffix for i in self.indices] 40 | 41 | def get_blob(self, index): 42 | NotImpelementedYet("Aanet currently does not support indexing.") 43 | 44 | def blob_generator(self): 45 | """Create a blob generator.""" 46 | # pylint: disable:F0401,W0612 47 | import aa 48 | from ROOT import EventFile 49 | 50 | for filename in self.filenames: 51 | print("Reading from file: {0}".format(filename)) 52 | event_file = EventFile(filename) 53 | for event in event_file: 54 | blob = {'Evt': event, 55 | 'Hits': event.hits, 56 | 'MCHits': event.mc_hits, 57 | 'Tracks': event.trks, 58 | 'MCTracks': event.mc_trks} 59 | yield blob 60 | del event_file 61 | 62 | def process(self, blob): 63 | return next(self.blobs) 64 | 65 | -------------------------------------------------------------------------------- /km3modules/__init__.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: __init__.py 3 | # pylint: disable=locally-disabled 4 | """ 5 | A collection of commonly used modules. 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | import timeit 11 | 12 | from km3pipe import Module 13 | 14 | 15 | class HitCounter(Module): 16 | """Prints the number of hits and raw hits in an Evt file""" 17 | def process(self, blob): 18 | try: 19 | print("Number of hits: {0}".format(len(blob['hit']))) 20 | except KeyError: 21 | pass 22 | try: 23 | print("Number of raw hits: {0}".format(len(blob['hit_raw']))) 24 | except KeyError: 25 | pass 26 | return blob 27 | 28 | 29 | class BlobIndexer(Module): 30 | """Puts an incremented index in each blob for the key 'blob_index'""" 31 | def __init__(self, **context): 32 | super(self.__class__, self).__init__(**context) 33 | self.blob_index = 0 34 | 35 | def process(self, blob): 36 | blob['blob_index'] = self.blob_index 37 | self.blob_index += 1 38 | return blob 39 | 40 | 41 | class StatusBar(Module): 42 | """Displays the current blob number""" 43 | def __init__(self, **context): 44 | super(self.__class__, self).__init__(**context) 45 | self.blob_index = 0 46 | self.start = timeit.default_timer() 47 | 48 | def process(self, blob): 49 | print("------------[Blob {0:>7}]-------------".format(self.blob_index)) 50 | self.blob_index += 1 51 | return blob 52 | 53 | def finish(self): 54 | """Display some basic statistics like elapsed time""" 55 | elapsed_time = timeit.default_timer() - self.start 56 | print("\n" + '='*42) 57 | print("Processed {0} blobs in {1} s." 58 | .format(self.blob_index, elapsed_time)) 59 | 60 | 61 | class MemoryObserver(Module): 62 | """Shows the maximum memory usage""" 63 | def __init__(self, **context): 64 | super(self.__class__, self).__init__(**context) 65 | import resource 66 | 67 | def process(self, blob): 68 | memory = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss 69 | print("Memory peak usage: {0} kb".format(memory)) 70 | -------------------------------------------------------------------------------- /pipeinspector/app.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: app.py 3 | """ 4 | PipeInspector 5 | 6 | Usage: 7 | pipeinspector FILE 8 | pipeinspector (-h | --help) 9 | pipeinspector --version 10 | 11 | Options: 12 | -h --help Show this screen. 13 | 14 | """ 15 | from __future__ import division, absolute_import, print_function 16 | 17 | import os 18 | 19 | import urwid 20 | 21 | from pipeinspector.gui import MainFrame 22 | from pipeinspector.settings import UI 23 | from km3pipe.pumps import EvtPump, DAQPump, AanetPump, CLBPump 24 | 25 | __version__ = "1.0.0" 26 | 27 | 28 | def handle_input(input): 29 | """Handle any unhandled input.""" 30 | if input in UI.keys['escape']: 31 | raise urwid.ExitMainLoop 32 | 33 | def filter_input(keys, raw): 34 | """Adds fancy mouse wheel functionality and VI navigation to ListBox""" 35 | if len(keys) == 1: 36 | if keys[0] in UI.keys['up']: 37 | keys[0] = 'up' 38 | elif keys[0] in UI.keys['down']: 39 | keys[0] = 'down' 40 | elif len(keys[0]) == 4 and keys[0][0] == 'mouse press': 41 | if keys[0][1] == 4: 42 | keys[0] = 'up' 43 | elif keys[0][1] == 5: 44 | keys[0] = 'down' 45 | return keys 46 | 47 | 48 | def get_pump(input_file): 49 | extension = os.path.splitext(input_file)[1][1:] 50 | if extension == 'evt': 51 | pump = EvtPump(filename=input_file, cache_enabled=True) 52 | elif extension == 'dat': 53 | pump = DAQPump(filename=input_file) 54 | elif extension == 'dqd': 55 | pump = CLBPump(filename=input_file, cache_enabled=True) 56 | elif extension == 'root': 57 | pump = AanetPump(filename=input_file) 58 | else: 59 | raise SystemExit("No pump found for '{0}' files.".format(extension)) 60 | return pump 61 | 62 | 63 | def main(): 64 | from docopt import docopt 65 | arguments = docopt(__doc__, version=__version__) 66 | input_file = arguments['FILE'] 67 | pump = get_pump(input_file) 68 | main_frame = MainFrame(pump) 69 | #main_frame.header.set_text("Inspecting {0}".format(input_file)) 70 | loop = urwid.MainLoop(main_frame, UI.palette, 71 | input_filter=filter_input, 72 | unhandled_input=handle_input) 73 | loop.run() 74 | 75 | 76 | if __name__ == '__main__': 77 | main() 78 | -------------------------------------------------------------------------------- /km3pipe/pumps/jpp.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | # Filename: jpp.py 4 | # pylint: disable= 5 | """ 6 | Pump for the jpp file read through aanet interface. 7 | 8 | """ 9 | from __future__ import division, absolute_import, print_function 10 | 11 | from km3pipe import Pump 12 | from km3pipe.logger import logging 13 | 14 | log = logging.getLogger(__name__) # pylint: disable=C0103 15 | 16 | 17 | class JPPPump(Pump): 18 | """A pump for JPP ROOT files.""" 19 | 20 | def __init__(self, **context): 21 | super(self.__class__, self).__init__(**context) 22 | 23 | self.index = self.get('index') or 0 24 | 25 | import aa 26 | import ROOT 27 | if self.get('index'): 28 | self.index = self.get('index') 29 | else: 30 | self.index = 0 31 | 32 | self.index_start = self.get('index_start') or 1 33 | self.index_stop = self.get('index_stop') or 1 34 | 35 | self.filename = self.get('filename') 36 | self.basename = self.get('basename') 37 | if not self.filename and not self.basename: 38 | raise ValueError("No filename defined") 39 | 40 | self.file_index = self.index_start 41 | 42 | if self.basename: 43 | self.rootfile = ROOT.EventFile(self.basename + str(self.file_index) + ".JTE.root") 44 | 45 | else: 46 | self.rootfile = ROOT.EventFile(self.filename) 47 | 48 | self.evt = ROOT.Evt() 49 | 50 | def get_blob(self, index): 51 | """Return the blob""" 52 | self.rootfile.set_index(index) 53 | self.evt = self.rootfile.evt 54 | return {'Evt': self.evt} 55 | 56 | def process(self, blob): 57 | if self.rootfile.set_index(self.index): 58 | self.evt = self.rootfile.evt 59 | self.index += 1 60 | return {'Evt': self.evt} 61 | else: 62 | self.file_index += 1 63 | if self.basename and self.file_index <= self.index_stop: 64 | import aa 65 | import ROOT 66 | print("open next file") 67 | self.rootfile = ROOT.EventFile(self.basename + str(self.file_index) + ".JTE.root") 68 | self.index = 0 69 | self.process(blob) 70 | else: 71 | raise StopIteration 72 | 73 | 74 | def finish(self): 75 | self.rootfile.Close() 76 | -------------------------------------------------------------------------------- /docs/data_structures.rst: -------------------------------------------------------------------------------- 1 | Data Structures 2 | =============== 3 | 4 | This section describes the basic data structures which a **pump** 5 | provides via the **blob** dictionary. The pump is responsible to parse 6 | the data and create a **blob** (a simple Python dictionary) for each 7 | event in the file. When processing a data file with KM3Pipe, a module 8 | chain is being utilised to cycle through the events. Each module within 9 | the chain recieves the original, unaltered data from the pump and 10 | further also additional information created by the preceeding modules. 11 | 12 | Hits 13 | ---- 14 | 15 | There are two kinds of basic hit types: a **raw hit** representing either an 16 | actual hit measured by the detector hardware or a calibrated MC hit which 17 | does not contain MC information anyomre, and a **MC hit**, which 18 | was created by a Monte Carlo simulation. The dictonary key naming 19 | conventions for raw hits and MC hits are the following: 20 | 21 | +---------------+------------+------------------------+ 22 | | information | dict key | container type | 23 | +===============+============+========================+ 24 | | Raw Hits | Hits | list (Python stdlib) | 25 | +---------------+------------+------------------------+ 26 | | MC Hits | MCHits | list (Python sdtlib) | 27 | +---------------+------------+------------------------+ 28 | 29 | Both hit types have attributes which can be accessed through the 30 | following getters: 31 | 32 | +---------------------+----------+-----------+-----------+----------+ 33 | | information | getter | type | raw hit | MC hit | 34 | +=====================+==========+===========+===========+==========+ 35 | | hit id | .id | numeric | X | X | 36 | +---------------------+----------+-----------+-----------+----------+ 37 | | hit time | .time | numeric | X | X | 38 | +---------------------+----------+-----------+-----------+----------+ 39 | | time over threshold | .tot | numeric | X | | 40 | +---------------------+----------+-----------+-----------+----------+ 41 | | PMT id | .pmt_id | numeric | X | X | 42 | +---------------------+----------+-----------+-----------+----------+ 43 | | trigger information | ... | ... | X | | 44 | +---------------------+----------+-----------+-----------+----------+ 45 | 46 | to be continued... 47 | 48 | 49 | Tracks 50 | ------ 51 | 52 | MC Tracks 53 | ~~~~~~~~~ 54 | 55 | Track Fits 56 | ~~~~~~~~~~ 57 | -------------------------------------------------------------------------------- /km3pipe/tests/test_decorators.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: test_decorators.py 3 | # pylint: disable=C0111,R0904,C0103,R0903,R0201 4 | from __future__ import division, absolute_import, print_function 5 | 6 | from km3pipe.testing import TestCase, StringIO 7 | from km3pipe.decorators import remain_file_pointer 8 | 9 | 10 | class TestRemainFilePointer(TestCase): 11 | 12 | def test_remains_file_pointer_in_function(self): 13 | dummy_file = StringIO('abcdefg') 14 | 15 | @remain_file_pointer 16 | def seek_into_file(file_obj): 17 | file_obj.seek(1, 0) 18 | 19 | dummy_file.seek(2, 0) 20 | self.assertEqual(2, dummy_file.tell()) 21 | seek_into_file(dummy_file) 22 | self.assertEqual(2, dummy_file.tell()) 23 | 24 | def test_remains_file_pointer_and_return_value_in_function(self): 25 | dummy_file = StringIO('abcdefg') 26 | 27 | @remain_file_pointer 28 | def seek_into_file(file_obj): 29 | file_obj.seek(1, 0) 30 | return 1 31 | 32 | dummy_file.seek(2, 0) 33 | self.assertEqual(2, dummy_file.tell()) 34 | return_value = seek_into_file(dummy_file) 35 | self.assertEqual(2, dummy_file.tell()) 36 | self.assertEqual(1, return_value) 37 | 38 | def test_remains_file_pointer_in_class_method(self): 39 | 40 | class FileSeekerClass(object): 41 | def __init__(self): 42 | self.dummy_file = StringIO('abcdefg') 43 | 44 | @remain_file_pointer 45 | def seek_into_file(self, file_obj): 46 | file_obj.seek(1, 0) 47 | 48 | fileseeker = FileSeekerClass() 49 | fileseeker.dummy_file.seek(2, 0) 50 | self.assertEqual(2, fileseeker.dummy_file.tell()) 51 | fileseeker.seek_into_file(fileseeker.dummy_file) 52 | self.assertEqual(2, fileseeker.dummy_file.tell()) 53 | 54 | def test_remains_file_pointer_and_return_value_in_class_method(self): 55 | 56 | class FileSeekerClass(object): 57 | def __init__(self): 58 | self.dummy_file = StringIO('abcdefg') 59 | 60 | @remain_file_pointer 61 | def seek_into_file(self, file_obj): 62 | file_obj.seek(1, 0) 63 | return 1 64 | 65 | fileseeker = FileSeekerClass() 66 | fileseeker.dummy_file.seek(2, 0) 67 | self.assertEqual(2, fileseeker.dummy_file.tell()) 68 | return_value = fileseeker.seek_into_file(fileseeker.dummy_file) 69 | self.assertEqual(2, fileseeker.dummy_file.tell()) 70 | self.assertEqual(1, return_value) 71 | -------------------------------------------------------------------------------- /km3pipe/dataclasses.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: dataclasses.py 3 | # pylint: disable=W0232,C0103,C0111 4 | """ 5 | ... 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | __all__ = ('Point', 'Position', 'Direction') 11 | 12 | 13 | import numpy as np 14 | 15 | from km3pipe.tools import angle_between 16 | 17 | class Point(np.ndarray): 18 | """Represents a point in a 3D space""" 19 | def __new__(cls, input_array=(np.nan, np.nan, np.nan)): 20 | """Add x, y and z to the ndarray""" 21 | obj = np.asarray(input_array).view(cls) 22 | return obj 23 | 24 | @property 25 | def x(self): 26 | return self[0] 27 | 28 | @x.setter 29 | def x(self, value): 30 | self[0] = value 31 | 32 | @property 33 | def y(self): 34 | return self[1] 35 | 36 | @y.setter 37 | def y(self, value): 38 | self[1] = value 39 | 40 | @property 41 | def z(self): 42 | return self[2] 43 | 44 | @z.setter 45 | def z(self, value): 46 | self[2] = value 47 | 48 | 49 | class Position(Point): 50 | """Represents a point in a 3D space""" 51 | pass 52 | 53 | 54 | class Direction(Point): 55 | """Represents a direction in a 3D space 56 | 57 | The direction vector always normalises itself when an attribute is changed. 58 | 59 | """ 60 | def __new__(cls, input_array=(1, 0, 0)): 61 | """Add x, y and z to the ndarray""" 62 | normed_array = np.array(input_array) / np.linalg.norm(input_array) 63 | obj = np.asarray(normed_array).view(cls) 64 | return obj 65 | 66 | def _normalise(self): 67 | normed_array = self / np.linalg.norm(self) 68 | self[0] = normed_array[0] 69 | self[1] = normed_array[1] 70 | self[2] = normed_array[2] 71 | 72 | @property 73 | def x(self): 74 | return self[0] 75 | 76 | @x.setter 77 | def x(self, value): 78 | self[0] = value 79 | self._normalise() 80 | 81 | @property 82 | def y(self): 83 | return self[1] 84 | 85 | @y.setter 86 | def y(self, value): 87 | self[1] = value 88 | self._normalise() 89 | 90 | @property 91 | def z(self): 92 | return self[2] 93 | 94 | @z.setter 95 | def z(self, value): 96 | self[2] = value 97 | self._normalise() 98 | 99 | @property 100 | def zenith(self): 101 | return angle_between(self, (0, 0, -1)) 102 | 103 | def __str__(self): 104 | return "({0:.4}, {1:.4}, {2:.4})".format(self.x, self.y, self.z) 105 | 106 | 107 | -------------------------------------------------------------------------------- /pipeinspector/playground/foo.py: -------------------------------------------------------------------------------- 1 | import curses 2 | 3 | class PipeInspector(object): 4 | def __init__(self, stdscr): 5 | print("Initialising PipeInspector") 6 | self.stdscr = stdscr 7 | self.max_row = None 8 | self.max_col = None 9 | self.origin = (0, 0) 10 | 11 | self._update_dimensions() 12 | 13 | self._windows = [] 14 | self._pads = [] 15 | 16 | self._setup_curses() 17 | self._create_windows() 18 | #self._create_pads() 19 | self.refresh() 20 | 21 | def test(self): 22 | self.stdscr.addstr("Pretty text", curses.color_pair(1)) 23 | 24 | def run(self): 25 | self.refresh() 26 | while True: 27 | try: 28 | c = self.stdscr.getkey() 29 | if c == 'q': 30 | raise SystemExit 31 | except curses.error: 32 | self.refresh() 33 | 34 | def refresh(self): 35 | #resize = curses.is_term_resized(self.max_row, self.max_col) 36 | #if resize: 37 | # self._update_dimensions() 38 | # self.stdscr.clear() 39 | # curses.resizeterm(self.max_row, self.max_col) 40 | for pad in self._pads: 41 | pad.refresh(0, 0, 1, 1, self.max_row - 1, self.max_col - 1) 42 | for window in self._windows: 43 | window.box() 44 | window.refresh() 45 | self.stdscr.refresh() 46 | 47 | 48 | def _setup_curses(self): 49 | curses.noecho() 50 | curses.cbreak() 51 | self.stdscr.keypad(True) 52 | curses.init_pair(1, curses.COLOR_RED, curses.COLOR_WHITE) 53 | self.stdscr.border(0) 54 | self.stdscr.immedok(True) 55 | 56 | def _update_dimensions(self): 57 | self.max_row, self.max_col = self.stdscr.getmaxyx() 58 | self.max_row -= 1 59 | self.max_col -= 1 60 | 61 | def _create_windows(self): 62 | x = 20 63 | y = 7 64 | height = 5 65 | width = 40 66 | win = curses.newwin(height, width, y, x) 67 | win.box() 68 | win.immedok(True) 69 | self._windows.append(win) 70 | 71 | def _create_pads(self): 72 | pad = curses.newpad(100, 100) 73 | pad.box() 74 | pad.immedok(True) 75 | for y in range(0, 100): 76 | for x in range(0, 100): 77 | try: 78 | pad.addch(y,x, ord('a') + (x*x+y*y) % 26) 79 | except curses.error: 80 | pass 81 | self._pads.append(pad) 82 | 83 | 84 | def main(stdscr): 85 | stdscr.clear() 86 | pipe_inspector = PipeInspector(stdscr) 87 | pipe_inspector.run() 88 | 89 | if __name__ == '__main__': 90 | curses.wrapper(main) 91 | -------------------------------------------------------------------------------- /km3pipe/tests/test_dataclasses.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: test_dataclasses.py 3 | # pylint: disable=C0111,R0904,C0103 4 | """ 5 | ... 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | import numpy as np 11 | 12 | from km3pipe.testing import TestCase 13 | from km3pipe.dataclasses import Position, Direction 14 | 15 | 16 | class TestPosition(TestCase): 17 | 18 | def test_position(self): 19 | position = Position((1, 2, 3)) 20 | self.assertEqual(1, position.x) 21 | self.assertEqual(2, position.y) 22 | self.assertEqual(3, position.z) 23 | 24 | def test_attributes_can_be_changed(self): 25 | position = Position() 26 | position.x = 1 27 | position.y = 2 28 | position.z = 3 29 | self.assertEqual(1, position.x) 30 | self.assertEqual(1, position[0]) 31 | self.assertEqual(2, position.y) 32 | self.assertEqual(2, position[1]) 33 | self.assertEqual(3, position.z) 34 | self.assertEqual(3, position[2]) 35 | 36 | def test_position_is_ndarray_like(self): 37 | pos = Position((1, 2, 3)) 38 | pos *= 2 39 | self.assertEqual(4, pos[1]) 40 | self.assertEqual(3, pos.size) 41 | self.assertTupleEqual((3,), pos.shape) 42 | 43 | 44 | class TestDirection(TestCase): 45 | 46 | def test_direction(self): 47 | direction = Direction((1, 0, 0)) 48 | self.assertAlmostEqual(1, np.linalg.norm(direction)) 49 | self.assertEqual(1, direction.x) 50 | self.assertEqual(0, direction.y) 51 | self.assertEqual(0, direction.z) 52 | 53 | def test_direction_normalises_on_init(self): 54 | direction = Direction((1, 2, 3)) 55 | self.assertAlmostEqual(0.26726124, direction.x) 56 | self.assertAlmostEqual(0.53452248, direction.y) 57 | self.assertAlmostEqual(0.80178372, direction.z) 58 | 59 | def test_direction_normalises_on_change_attribute(self): 60 | direction = Direction((1, 2, 3)) 61 | self.assertAlmostEqual(1, np.linalg.norm(direction)) 62 | direction.x = 10 63 | self.assertAlmostEqual(1, np.linalg.norm(direction)) 64 | direction.y = 20 65 | self.assertAlmostEqual(1, np.linalg.norm(direction)) 66 | direction.z = 30 67 | self.assertAlmostEqual(1, np.linalg.norm(direction)) 68 | 69 | def test_direction_zenith(self): 70 | direction = Direction((0, 0, -1)) 71 | self.assertAlmostEqual(0, direction.zenith) 72 | direction = Direction((0, 0, 1)) 73 | self.assertAlmostEqual(np.pi, direction.zenith) 74 | direction = Direction((0, 1, 0)) 75 | self.assertAlmostEqual(np.pi/2, direction.zenith) 76 | 77 | def test_direction_str(self): 78 | direction = Direction((1, 2, 3)) 79 | self.assertEqual("(0.2673, 0.5345, 0.8018)", str(direction)) 80 | 81 | 82 | -------------------------------------------------------------------------------- /km3pipe/pumps/ch.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | # coding=utf-8 3 | # Filename: jpp.py 4 | # pylint: disable= 5 | """ 6 | Pump for the jpp file read through aanet interface. 7 | 8 | """ 9 | from __future__ import division, absolute_import, print_function 10 | 11 | import socket 12 | 13 | from km3pipe import Pump 14 | from km3pipe.logger import logging 15 | import threading 16 | try: 17 | from Queue import Queue, Empty 18 | except ImportError: 19 | from queue import Queue, Empty 20 | 21 | log = logging.getLogger(__name__) # pylint: disable=C0103 22 | 23 | 24 | class CHPump(Pump): 25 | """A pump for ControlHost data.""" 26 | def __init__(self, **context): 27 | super(self.__class__, self).__init__(**context) 28 | 29 | self.host = self.get('host') or '127.0.0.1' 30 | self.port = self.get('port') or 5553 31 | self.tags = self.get('tags') or "MSG" 32 | self.timeout = self.get('timeout') or 60*60*24 33 | self.max_queue = self.get('max_queue') or 50 34 | self.key_for_data = self.get('key_for_data') or 'CHData' 35 | self.key_for_prefix = self.get('key_for_prefix') or 'CHPrefix' 36 | 37 | self.queue = Queue() 38 | self.client = None 39 | self.thread = None 40 | 41 | print("Connecting to {0} on port {1}\n" 42 | "Subscribed tags: {2}\n" 43 | "Connection timout: {3}s\n" 44 | "Maximum queue size for incoming data: {4}" 45 | .format(self.host, self.port, self.tags, self.timeout, 46 | self.max_queue)) 47 | 48 | self._init_controlhost() 49 | self._start_thread() 50 | 51 | def _start_thread(self): 52 | self.thread = threading.Thread(target=self._run, args=()) 53 | self.thread.daemon = True 54 | self.thread.start() 55 | 56 | def _init_controlhost(self): 57 | """Set up the controlhost connection""" 58 | from controlhost import Client 59 | self.client = Client(self.host, self.port) 60 | self.client._connect() 61 | for tag in self.tags.split(','): 62 | self.client.subscribe(tag.strip()) 63 | 64 | def _run(self): 65 | while True: 66 | prefix, data = self.client.get_message() 67 | if self.queue.qsize() > self.max_queue: 68 | log.warn("Maximum queue size ({0}) reached, dropping data." 69 | .format(self.max_queue)) 70 | else: 71 | self.queue.put((prefix, data)) 72 | 73 | def process(self, blob): 74 | """Wait for the next packet and put it in the blob""" 75 | try: 76 | prefix, data = self.queue.get(timeout=self.timeout) 77 | except Empty: 78 | log.warn("ControlHost timeout ({0}s) exceeded".format(self.timeout)) 79 | raise StopIteration("ControlHost timeout exceeded.") 80 | blob[self.key_for_prefix] = prefix 81 | blob[self.key_for_data] = data 82 | return blob 83 | 84 | def finish(self): 85 | """Clean up the JLigier controlhost connection""" 86 | self.client._disconnect() 87 | -------------------------------------------------------------------------------- /km3pipe/tests/test_core.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: test_core.py 3 | # pylint: disable=C0111,E1003,R0904,C0103,R0201,C0102 4 | from __future__ import division, absolute_import, print_function 5 | 6 | __author__ = 'tamasgal' 7 | 8 | from km3pipe.testing import TestCase, StringIO, MagicMock 9 | from km3pipe.core import Pipeline, Module, Pump, Blob 10 | 11 | 12 | class TestPipeline(TestCase): 13 | """Tests for the main pipeline""" 14 | 15 | def setUp(self): 16 | self.pl = Pipeline() 17 | 18 | def test_attach(self): 19 | self.pl.attach(Module, 'module1') 20 | self.pl.attach(Module, 'module2') 21 | self.assertEqual('module1', self.pl.modules[0].name) 22 | self.assertEqual('module2', self.pl.modules[1].name) 23 | 24 | def test_drain_calls_process_method_on_each_attached_module(self): 25 | pl = Pipeline(blob=1) 26 | pl.attach(Module, 'module1') 27 | pl.attach(Module, 'module2') 28 | for module in pl.modules: 29 | module.process = MagicMock(return_value=1) 30 | pl.drain(1) 31 | for module in pl.modules: 32 | module.process.assert_called_once_with(1) 33 | 34 | def test_finish(self): 35 | self.pl.finish() 36 | 37 | def test_drain_calls_finish_on_each_attached_module(self): 38 | self.pl.attach(Module, 'module1') 39 | self.pl.attach(Module, 'module2') 40 | for module in self.pl.modules: 41 | module.finish = MagicMock() 42 | self.pl.drain(4) 43 | for module in self.pl.modules: 44 | module.finish.assert_called_once_with() 45 | 46 | 47 | class TestModule(TestCase): 48 | """Tests for the pipeline module""" 49 | 50 | def test_name_can_be_set_on_init(self): 51 | name = 'foo' 52 | module = Module(name=name) 53 | self.assertEqual(name, module.name) 54 | 55 | def test_name_is_read_only(self): 56 | module = Module(name='foo') 57 | with self.assertRaises(AttributeError): 58 | module.name = 'narf' 59 | 60 | def test_process(self): 61 | blob = Blob() 62 | module = Module(name='foo') 63 | processed_blob = module.process(blob) 64 | self.assertIs(blob, processed_blob) 65 | 66 | def test_add_parameter(self): 67 | module = Module() 68 | module.add('foo', 'default') 69 | self.assertDictEqual({'foo': 'default'}, module.parameters) 70 | 71 | def test_get_parameter(self): 72 | module = Module() 73 | module.add('foo', 'default') 74 | self.assertEqual('default', module.get('foo')) 75 | 76 | def test_default_parameter_value_can_be_overwritten(self): 77 | class Foo(Module): 78 | def __init__(self, **context): 79 | super(self.__class__, self).__init__(**context) 80 | self.foo = self.get('foo') or 'default_foo' 81 | module = Foo() 82 | self.assertEqual('default_foo', module.foo) 83 | module = Foo(foo='overwritten') 84 | self.assertEqual('overwritten', module.foo) 85 | 86 | def test_finish(self): 87 | module = Module() 88 | module.finish() 89 | 90 | 91 | class TestPump(TestCase): 92 | """Tests for the pump""" 93 | 94 | def test_rewind_file(self): 95 | pump = Pump() 96 | test_file = StringIO("Some content") 97 | pump.blob_file = test_file 98 | pump.blob_file.read(1) 99 | self.assertEqual(1, pump.blob_file.tell()) 100 | pump.rewind_file() 101 | self.assertEqual(0, pump.blob_file.tell()) 102 | 103 | 104 | class TestBlob(TestCase): 105 | """Tests for the blob holding the data""" 106 | 107 | def test_field_can_be_added(self): 108 | blob = Blob() 109 | blob['foo'] = 1 110 | self.assertEqual(1, blob['foo']) 111 | -------------------------------------------------------------------------------- /pipeinspector/playground/prototyping.py: -------------------------------------------------------------------------------- 1 | try: 2 | import urwid 3 | except ImportError: 4 | print("Could not import the Python module 'urwid'.") 5 | raise SystemExit 6 | 7 | import random 8 | 9 | 10 | def handle_input(input): 11 | if input in UI.keys['escape']: 12 | raise urwid.ExitMainLoop() 13 | if input in UI.keys['left'] + UI.keys['right']: 14 | main_frame.set_focus('footer') 15 | next_blob() 16 | if input in UI.keys['down'] + UI.keys['up']: 17 | main_frame.set_focus('body') 18 | 19 | 20 | class UI(object): 21 | """Represents the settings for the UI.""" 22 | fg = 'light gray' 23 | bg = 'black' 24 | 25 | palette = [ 26 | ('default', fg, bg), 27 | ('highlight', fg+',standout', bg), 28 | ('header', 'white', 'dark cyan'), 29 | ('footer', 'light gray', 'dark blue'), 30 | ('body','dark cyan', '', 'standout'), 31 | ('focus','dark red', '', 'standout'), 32 | ('head','light red', 'black'), 33 | ('blob', 'yellow', 'dark cyan'), 34 | ('blob_scale', 'dark cyan', 'black'), 35 | ] 36 | 37 | keys = { 38 | 'select': ('return','enter'), 39 | 'inspect': ('x','X'), 40 | 'escape': ('esc','q','Q'), 41 | 'left': ('left','h'), 42 | 'right': ('right','l'), 43 | 'up': ('up','k'), 44 | 'down': ('down','j'), 45 | 'goto':('g','G'), 46 | 'help':('?',), 47 | } 48 | 49 | 50 | class ItemWidget (urwid.WidgetWrap): 51 | 52 | def __init__ (self, id, description): 53 | self.id = id 54 | self.content = 'item %s: %s...' % (str(id), description[:25]) 55 | self.item = [ 56 | ('fixed', 15, urwid.Padding(urwid.AttrWrap( 57 | urwid.Text('item %s' % str(id)), 'body', 'focus'), left=2)), 58 | urwid.AttrWrap(urwid.Text('%s' % description), 'body', 'focus'), 59 | ] 60 | w = urwid.Columns(self.item) 61 | self.__super.__init__(w) 62 | 63 | def selectable (self): 64 | return True 65 | 66 | def keypress(self, size, key): 67 | return key 68 | 69 | 70 | class BlobSelector(urwid.WidgetWrap): 71 | def __init__ (self, description): 72 | self.content = description 73 | self.item = [ 74 | urwid.AttrWrap(urwid.Text('%s' % description), 'blob', 'focus'), 75 | ] 76 | w = urwid.Columns(self.item) 77 | self.__super.__init__(w) 78 | 79 | def selectable (self): 80 | return True 81 | 82 | def keypress(self, size, key): 83 | return key 84 | 85 | 86 | class BlobWidget(urwid.Pile): 87 | def __init__(self): 88 | self.width = 20 89 | self.size = (0,) 90 | urwid.Pile.__init__(self, [urwid.Text('', wrap='clip'), 91 | urwid.Text('', wrap='clip'), 92 | urwid.Text('', wrap='clip')]) 93 | 94 | def draw(self): 95 | self.widget_list[0].set_text(".OOOOOOOOOOOOOOOOOOOO") 96 | self.widget_list[1].set_text("| ' | ' |") 97 | self.widget_list[2].set_text("0 10 20") 98 | 99 | def render(self, size, focus): 100 | self.size = size 101 | self.draw() 102 | return urwid.Pile.render(self, size, focus) 103 | 104 | 105 | def next_blob(): 106 | pass 107 | 108 | header = urwid.AttrWrap(urwid.Text("The header!", align='center'), 'header') 109 | footer = urwid.AttrWrap(urwid.Text("The footer"), 'footer') 110 | 111 | 112 | items = [] 113 | for i in range(100): 114 | items.append(ItemWidget(i, "Item {0}".format(i))) 115 | 116 | browser_header = urwid.AttrMap(urwid.Text('selected:'), 'head') 117 | browser_listbox = urwid.ListBox(urwid.SimpleListWalker(items)) 118 | browser_view = urwid.Frame(urwid.AttrWrap(browser_listbox, 'body'), header=browser_header) 119 | 120 | blobs = BlobWidget() 121 | footer = urwid.Columns([urwid.Text('Info\nSecond kube'), blobs]) 122 | 123 | main_frame = urwid.AttrWrap(urwid.Frame(browser_view, focus_part='body'), 'default') 124 | main_frame.set_header(header) 125 | main_frame.set_footer(footer) 126 | 127 | loop = urwid.MainLoop(main_frame, UI.palette, unhandled_input=handle_input) 128 | loop.run() 129 | -------------------------------------------------------------------------------- /km3pipe/tests/test_tools.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: test_tools.py 3 | # pylint: disable=locally-disabled,C0111,R0904,C0103 4 | from __future__ import division, absolute_import, print_function 5 | 6 | import numpy as np 7 | import itertools 8 | 9 | from km3pipe.testing import TestCase 10 | from km3pipe.tools import (unpack_nfirst, split, namedtuple_with_defaults, 11 | angle_between, geant2pdg, pdg2name, PMTReplugger) 12 | 13 | 14 | class TestTools(TestCase): 15 | 16 | def test_unpack_nfirst(self): 17 | a_tuple = (1, 2, 3, 4, 5) 18 | a, b, c, rest = unpack_nfirst(a_tuple, 3) 19 | self.assertEqual(1, a) 20 | self.assertEqual(2, b) 21 | self.assertEqual(3, c) 22 | self.assertTupleEqual((4, 5), rest) 23 | 24 | def test_split_splits_strings(self): 25 | string = "1 2 3 4" 26 | parts = split(string) 27 | self.assertListEqual(['1', '2', '3', '4'], parts) 28 | 29 | def test_split_callback_converts_correctly(self): 30 | string = "1 2 3 4" 31 | parts = split(string, int) 32 | self.assertListEqual([1, 2, 3, 4], parts) 33 | 34 | string = "1.0 2.1 3.2 4.3" 35 | parts = split(string, float) 36 | self.assertListEqual([1.0, 2.1, 3.2, 4.3], parts) 37 | 38 | def test_namedtuple_with_defaults_initialises_with_none(self): 39 | Node = namedtuple_with_defaults('Node', 'val left right') 40 | node = Node() 41 | self.assertIsNone(node.val) 42 | self.assertIsNone(node.left) 43 | self.assertIsNone(node.right) 44 | 45 | def test_namedtuple_with_defaults_initialises_with_given_values(self): 46 | Node = namedtuple_with_defaults('Node', 'val left right', [1, 2, 3]) 47 | node = Node() 48 | self.assertEqual(1, node.val) 49 | self.assertEqual(2, node.left) 50 | self.assertEqual(3, node.right) 51 | 52 | def test_angle_between(self): 53 | v1 = (1, 0, 0) 54 | v2 = (0, 1, 0) 55 | v3 = (-1, 0, 0) 56 | self.assertAlmostEqual(0, angle_between(v1, v1)) 57 | self.assertAlmostEqual(np.pi/2, angle_between(v1, v2)) 58 | self.assertAlmostEqual(np.pi, angle_between(v1, v3)) 59 | 60 | def test_angle_between_returns_nan_for_zero_length_vectors(self): 61 | v1 = (0, 0, 0) 62 | v2 = (1, 0, 0) 63 | self.assertTrue(np.isnan(angle_between(v1, v2))) 64 | 65 | def test_geant2pdg(self): 66 | self.assertEqual(22, geant2pdg(1)) 67 | self.assertEqual(-13, geant2pdg(5)) 68 | 69 | def test_geant2pdg_returns_0_for_unknown_particle_id(self): 70 | self.assertEqual(0, geant2pdg(-999)) 71 | 72 | def test_pdg2name(self): 73 | self.assertEqual('mu-', pdg2name(13)) 74 | self.assertEqual('anu_tau', pdg2name(-16)) 75 | 76 | def test_pdg2name_returns_NA_for_unknown_particle(self): 77 | self.assertEqual('N/A', pdg2name(0)) 78 | 79 | 80 | # [(0, 1), (0, 2), (0, 3), (1, 2), (1, 3), (2, 3)] 81 | PMT_COMBS = list(itertools.combinations(range(4), 2)) 82 | ANGLES = range(len(PMT_COMBS)) 83 | 84 | class TestPMTReplugger(TestCase): 85 | 86 | def setUp(self): 87 | self.replugger = PMTReplugger(PMT_COMBS, ANGLES, []) 88 | 89 | def test_angle_for(self): 90 | #self.assertEqual(0, self.replugger.angle_for((0, 1))) 91 | #self.assertEqual(1, self.replugger.angle_for((0, 2))) 92 | pass 93 | 94 | def test_switch(self): 95 | self.replugger.switch([0, 1], [1, 0]) 96 | self.assertEqual(self.replugger._new_combs, 97 | [(0, 1), (1, 2), (1, 3), (0, 2), (0, 3), (2, 3)]) 98 | 99 | def test_switch_three_indicies(self): 100 | self.replugger.switch([0, 1, 2], [1, 2, 0]) 101 | self.assertEqual(self.replugger._new_combs, 102 | [(1, 2), (0, 1), (1, 3), (0, 2), (2, 3), (0, 3)]) 103 | 104 | def test_angle_is_correct_if_two_pmts_are_switched(self): 105 | self.replugger.switch([0, 1], [1, 0]) 106 | self.assertEqual(0, self.replugger.angle_for((0, 1))) 107 | self.assertEqual(3, self.replugger.angle_for((0, 2))) 108 | self.assertEqual(4, self.replugger.angle_for((0, 3))) 109 | 110 | def test_angles_are_ordered_correctly_after_switch(self): 111 | self.replugger.switch([0, 1, 2], [1, 2, 0]) 112 | self.assertListEqual([1, 3, 5, 0, 2, 4], self.replugger.angles) 113 | -------------------------------------------------------------------------------- /km3pipe/pumps/clb.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: clb.py 3 | """ 4 | Pumps for the CLB data formats. 5 | 6 | """ 7 | from __future__ import division, absolute_import, print_function 8 | 9 | import struct 10 | from struct import unpack 11 | from binascii import hexlify 12 | from collections import namedtuple 13 | import datetime 14 | 15 | from km3pipe import Pump 16 | from km3pipe.logger import logging 17 | 18 | log = logging.getLogger(__name__) # pylint: disable=C0103 19 | 20 | 21 | class CLBPump(Pump): 22 | """A pump for binary CLB files.""" 23 | 24 | def __init__(self, **context): 25 | super(self.__class__, self).__init__(**context) 26 | self.filename = self.get('filename') 27 | self.cache_enabled = self.get('cache_enabled') or False 28 | self.packet_positions = [] 29 | self.index = 0 30 | 31 | if self.filename: 32 | self.open_file(self.filename) 33 | if self.cache_enabled: 34 | self.determine_packet_positions() 35 | else: 36 | log.warn("No filename specified. Take care of the file handling!") 37 | 38 | def determine_packet_positions(self): 39 | """Record the file pointer position of each frame""" 40 | print("Analysing file...") 41 | self.rewind_file() 42 | try: 43 | while True: 44 | pointer_position = self.blob_file.tell() 45 | length = struct.unpack('cic', 70 | self.blob_file.read(6)) 71 | pmt_data.append(PMTData(ord(channel_id), timestamp, ord(tot))) 72 | blob['PMTData'] = pmt_data 73 | return blob 74 | 75 | def get_blob(self, index): 76 | """Return blob at given index.""" 77 | self.seek_to_packet(index) 78 | return self.next_blob() 79 | 80 | def process(self, blob): 81 | blob = self.next_blob() 82 | return blob 83 | 84 | def __iter__(self): 85 | return self 86 | 87 | def next(self): 88 | """Python 2/3 compatibility for iterators""" 89 | return self.__next__() 90 | 91 | def __next__(self): 92 | return self.next_blob() 93 | 94 | def finish(self): 95 | """Clean everything up""" 96 | self.blob_file.close() 97 | 98 | class CLBHeader(object): 99 | """Wrapper for the CLB Common Header binary format. 100 | 101 | Args: 102 | file_obj (file): The binary file, where the file pointer is at the 103 | beginning of the header. 104 | 105 | Attributes: 106 | size (int): The size of the original DAQ byte representation. 107 | 108 | """ 109 | size = 28 110 | 111 | def __init__(self, byte_data=None, file_obj=None): 112 | self.data_type = None 113 | self.run_number = None 114 | self.udp_sequence = None 115 | self.timestamp = None 116 | self.ns_ticks = None 117 | self.human_readable_timestamp = None 118 | self.dom_id = None 119 | self.dom_status = None 120 | self.time_valid = None 121 | if byte_data: 122 | self._parse_byte_data(byte_data) 123 | if file_obj: 124 | self._parse_file(file_obj) 125 | 126 | def __str__(self): 127 | # pylint: disable=E1124 128 | description = ("CLBHeader\n" 129 | " Data type: {self.data_type}\n" 130 | " Run number: {self.run_number}\n" 131 | " UDP sequence: {self.udp_sequence}\n" 132 | " Time stamp: {self.timestamp}\n" 133 | " {self.human_readable_timestamp}\n" 134 | " Ticks [16ns]: {self.ns_ticks}\n" 135 | " DOM ID: {self.dom_id}\n" 136 | " DOM status: {self.dom_status}\n" 137 | "".format(self=self)) 138 | return description 139 | 140 | def __insp__(self): 141 | return self.__str__() 142 | 143 | def _parse_byte_data(self, byte_data): 144 | """Extract the values from byte string.""" 145 | self.data_type = b''.join(unpack('cccc', byte_data[:4])).decode() 146 | self.run_number = unpack('>i', byte_data[4:8])[0] 147 | self.udp_sequence = unpack('>i', byte_data[8:12])[0] 148 | self.timestamp, self.ns_ticks = unpack('>II', byte_data[12:20]) 149 | self.dom_id = hexlify(b''.join(unpack('cccc', 150 | byte_data[20:24]))).decode() 151 | 152 | dom_status_bits = unpack('>I', byte_data[24:28])[0] 153 | self.dom_status = "{0:032b}".format(dom_status_bits) 154 | 155 | self.human_readable_timestamp = datetime.datetime.fromtimestamp( 156 | int(self.timestamp)).strftime('%Y-%m-%d %H:%M:%S') 157 | 158 | 159 | def _parse_file(self, file_obj): 160 | """Directly read from file handler. 161 | 162 | Note: 163 | This will move the file pointer. 164 | 165 | """ 166 | byte_data = file_obj.read(self.size) 167 | self._parse_byte_data(byte_data) 168 | 169 | PMTData = namedtuple('PMTData', 'channel_id timestamp tot') 170 | -------------------------------------------------------------------------------- /pipeinspector/widgets.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: widgets.py 3 | """ 4 | GUI elements for the pipeinspector. 5 | 6 | """ 7 | from __future__ import division, absolute_import, print_function 8 | 9 | import urwid 10 | import pprint 11 | import math 12 | import sys 13 | 14 | from pipeinspector.settings import UI 15 | 16 | class BlobBrowser(urwid.Frame): 17 | def __init__(self): 18 | self.items = [] 19 | self.cursor_position = 0 20 | 21 | self.header = urwid.AttrMap(urwid.Text('Keys:'), 'head') 22 | 23 | self.listbox = urwid.ListBox(urwid.SimpleListWalker(self.items)) 24 | self.frame = urwid.Frame(self.listbox, header=self.header) 25 | line_box = urwid.AttrMap(urwid.LineBox(self.frame), 'body') 26 | urwid.Frame.__init__(self, line_box) 27 | self.overlay = None 28 | self.popup = None 29 | 30 | def load(self, blob): 31 | del self.listbox.body[:] 32 | new_items = [] 33 | for key in sorted(blob.keys()): 34 | item_widget = ItemWidget(key, blob[key]) 35 | new_items.append(item_widget) 36 | urwid.connect_signal(item_widget, 'key_selected', self.key_selected) 37 | self.listbox.body.extend(new_items) 38 | self.listbox.set_focus(self.cursor_position) 39 | 40 | def key_selected(self, data): 41 | def formatter(obj): 42 | if hasattr(obj, '__insp__'): 43 | return obj.__insp__() 44 | if hasattr(obj, 'size'): 45 | output = "" 46 | for obj in data: 47 | output += str(obj) + "\n" 48 | return output 49 | return pprint.pformat(obj) 50 | 51 | content = [urwid.Text(line) for line in formatter(data).split('\n')] 52 | self.popup = urwid.ListBox(content) 53 | popup_box = urwid.LineBox(self.popup) 54 | self.overlay = urwid.Overlay(popup_box, self.body, 55 | 'center', 56 | ('relative', 80), 57 | 'middle', 58 | ('relative', 80),) 59 | self.body = self.overlay 60 | 61 | def keypress(self, size, key): 62 | input = urwid.Frame.keypress(self, size, key) 63 | self.cursor_position = self.listbox.focus_position 64 | if self.overlay: 65 | if input in UI.keys['escape']: 66 | self.body = self.overlay.bottom_w 67 | self.overlay = None 68 | else: 69 | return input 70 | 71 | 72 | class ItemWidget (urwid.WidgetWrap): 73 | signals = ['key_selected'] 74 | def __init__ (self, key, data): 75 | self.key = key 76 | self.data = data 77 | self.item = [ 78 | ('fixed', 35, urwid.Padding( 79 | urwid.AttrWrap(urwid.Text(key), 'body', 'focus'), left=2)), 80 | urwid.AttrWrap(urwid.Text(str(type(data))), 'body', 'focus'), 81 | #urwid.AttrWrap(urwid.Text(str(len(data))), 'body', 'focus'), 82 | urwid.AttrWrap(urwid.Text(str(sys.getsizeof(data))), 'body', 'focus'), 83 | ] 84 | w = urwid.Columns(self.item) 85 | self.__super.__init__(w) 86 | 87 | def selectable(self): 88 | return True 89 | 90 | def keypress(self, size, key): 91 | if key == 'x': 92 | urwid.emit_signal(self, 'key_selected', self.data) 93 | return key 94 | 95 | 96 | class BlobWidget(urwid.Pile): 97 | signals = ['blob_selected'] 98 | def __init__(self): 99 | self.width = 50 100 | self.size = (0,) 101 | self.index = 0 102 | urwid.Pile.__init__(self, [urwid.Text('', wrap='clip'), 103 | urwid.Text('', wrap='clip'), 104 | urwid.Text('', wrap='clip')]) 105 | 106 | def goto_blob(self, position): 107 | self.index = position 108 | self._emit_blob_selected() 109 | self.draw() 110 | 111 | def previous_blob(self, step=1): 112 | self.index -= step 113 | if self.index <= 0: 114 | self.index = 0 115 | self._emit_blob_selected() 116 | self.draw() 117 | 118 | def next_blob(self, step=1): 119 | self.index += step 120 | self._emit_blob_selected() 121 | self.draw() 122 | 123 | def draw(self): 124 | self.widget_list[0].set_text(self._make_blob_icons(self.index)) 125 | self.widget_list[1].set_text(self._make_ruler(self.index)) 126 | self.widget_list[2].set_text(self._make_scale_labels(self.index)) 127 | 128 | def render(self, size, focus): 129 | self.size = size 130 | self.draw() 131 | return urwid.Pile.render(self, size, focus) 132 | 133 | def _emit_blob_selected(self): 134 | urwid.emit_signal(self, 'blob_selected', self.index) 135 | 136 | def _make_ruler(self, start): 137 | if start <= 10: 138 | start = 0 139 | else: 140 | start -= 10 141 | segment = "| ' " 142 | repeat = int(math.ceil(self.width / len(segment)) + 1) 143 | ruler = segment * repeat 144 | slice_start = (start % len(segment)) 145 | slice_end = (start % len(segment)) + self.width 146 | return ruler[slice_start:slice_end] 147 | 148 | def _make_scale_labels(self, start): 149 | if start <= 10: 150 | start = 0 151 | else: 152 | start -= 10 153 | lowest_tick = int(math.floor(start / 10) * 10) 154 | highest_tick = lowest_tick + self.width 155 | ticks_labels = ['{0:<10}'.format(i) 156 | for i in range(lowest_tick, highest_tick, 10)] 157 | slice_start = (start % 10) 158 | slice_end = (start % 10) + self.width 159 | ticks = ''.join(ticks_labels)[slice_start:slice_end] 160 | return ticks 161 | 162 | def _make_blob_icons(self, start): 163 | icon = 'B' 164 | if start < 10: 165 | icons = '.' + icon * (self.width - 1) 166 | else: 167 | icons = icon * self.width 168 | if start > 10: 169 | start = 10 170 | return [('blob', icons[:start]), 171 | ('blob_selected', icons[start]), 172 | ('blob', icons[start + 1:])] 173 | 174 | 175 | -------------------------------------------------------------------------------- /km3pipe/hardware.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: hardware.py 3 | # pylint: disable=locally-disabled 4 | """ 5 | Classes representing KM3NeT hardware. 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | __author__ = 'tamasgal' 11 | 12 | import os 13 | 14 | from km3pipe.tools import unpack_nfirst, split 15 | from km3pipe.dataclasses import Point, Direction 16 | from km3pipe.logger import logging 17 | 18 | log = logging.getLogger(__name__) # pylint: disable=C0103 19 | 20 | 21 | class Detector(object): 22 | """The KM3NeT detector""" 23 | def __init__(self, filename=None): 24 | self.det_file = None 25 | self.det_id = None 26 | self.n_doms = None 27 | self.n_pmts_per_dom = None 28 | self.doms = {} 29 | self.pmts = [] 30 | self._pmts_by_omkey = {} 31 | self._pmts_by_id = {} 32 | self._pmt_angles = [] 33 | 34 | if filename: 35 | self.init_from_file(filename) 36 | 37 | def init_from_file(self, filename): 38 | """Create detector from detx file.""" 39 | file_ext = os.path.splitext(filename)[1][1:] 40 | if not file_ext == 'detx': 41 | raise NotImplementedError('Only the detx format is supported.') 42 | self.open_file(filename) 43 | self.parse_header() 44 | self.parse_doms() 45 | self.det_file.close() 46 | 47 | def open_file(self, filename): 48 | """Create the file handler""" 49 | self.det_file = open(filename, 'r') 50 | 51 | def parse_header(self): 52 | """Extract information from the header of the detector file""" 53 | self.det_file.seek(0, 0) 54 | first_line = self.det_file.readline() 55 | self.det_id, self.n_doms = split(first_line, int) 56 | 57 | # pylint: disable=C0103 58 | def parse_doms(self): 59 | """Extract dom information from detector file""" 60 | self.det_file.seek(0, 0) 61 | self.det_file.readline() 62 | lines = self.det_file.readlines() 63 | try: 64 | while True: 65 | line = lines.pop(0) 66 | if not line: 67 | continue 68 | try: 69 | dom_id, line_id, floor_id, n_pmts = split(line, int) 70 | except ValueError: 71 | continue 72 | self.n_pmts_per_dom = n_pmts 73 | for i in range(n_pmts): 74 | raw_pmt_info = lines.pop(0) 75 | pmt_info = raw_pmt_info.split() 76 | pmt_id, x, y, z, rest = unpack_nfirst(pmt_info, 4) 77 | dx, dy, dz, t0, rest = unpack_nfirst(rest, 4) 78 | if rest: 79 | log.warn("Unexpected PMT values: '{0}'".format(rest)) 80 | pmt_id = int(pmt_id) 81 | pmt_pos = [float(n) for n in (x, y, z)] 82 | pmt_dir = [float(n) for n in (dx, dy, dz)] 83 | t0 = float(t0) 84 | if floor_id < 0: 85 | _, new_floor_id, _ = self.pmtid2omkey_old(pmt_id) 86 | log.error("Floor ID is negative for PMT {0}.\n" 87 | "Guessing correct id: {1}" 88 | .format(pmt_id, new_floor_id)) 89 | floor_id = new_floor_id 90 | #TODO: following line is here bc of the bad MC floor IDs 91 | # put it outside the for loop in future 92 | self.doms[dom_id] = (line_id, floor_id, n_pmts) 93 | omkey = (line_id, floor_id, i) 94 | pmt = PMT(pmt_id, pmt_pos, pmt_dir, t0, i, omkey) 95 | self.pmts.append(pmt) 96 | self._pmts_by_omkey[(line_id, floor_id, i)] = pmt 97 | self._pmts_by_id[pmt_id] = pmt 98 | except IndexError: 99 | pass 100 | 101 | @property 102 | def dom_positions(self): 103 | """The positions of the DOMs, taken from the PMT with the lowest ID.""" 104 | return [pmt.pos for pmt in self._pmts_by_id.values() 105 | if pmt.daq_channel == 0] 106 | 107 | @property 108 | def pmt_angles(self): 109 | """A list of PMT directions sorted by PMT channel""" 110 | if not self._pmt_angles: 111 | pmts = self.pmts[:self.n_pmts_per_dom] 112 | self._pmt_angles = [pmt.dir for pmt in pmts] 113 | return self._pmt_angles 114 | 115 | @property 116 | def ascii(self): 117 | """The ascii representation of the detector""" 118 | header = "{det.det_id} {det.n_doms}".format(det=self) 119 | doms = "" 120 | for dom_id, (line_id, floor_id, n_pmts) in self.doms.iteritems(): 121 | doms += "{0} {1} {2} {3}\n".format(dom_id, line_id, floor_id, n_pmts) 122 | for i in xrange(n_pmts): 123 | pmt = self._pmts_by_omkey[(line_id, floor_id, i)] 124 | doms += " {0} {1} {2} {3} {4} {5} {6} {7}\n".format( 125 | pmt.id, pmt.pos.x, pmt.pos.y, pmt.pos.z, 126 | pmt.dir.x, pmt.dir.y, pmt.dir.z, 127 | pmt.t0 128 | ) 129 | return header + "\n" + doms 130 | 131 | def write(self, filename): 132 | with open(filename, 'w') as f: 133 | f.write(self.ascii) 134 | print("Detector file saved as '{0}'".format(filename)) 135 | 136 | def pmt_with_id(self, pmt_id): 137 | """Get PMT with pmt_id""" 138 | try: 139 | return self._pmts_by_id[pmt_id] 140 | except KeyError: 141 | raise KeyError("No PMT found for ID: {0}".format(pmt_id)) 142 | 143 | def pmtid2omkey(self, pmt_id): 144 | return self._pmts_by_id[int(pmt_id)].omkey 145 | 146 | def pmtid2omkey_old(self, pmt_id, 147 | first_pmt_id=1, oms_per_line=18, pmts_per_om=31): 148 | """Convert (consecutive) raw PMT IDs to Multi-OMKeys.""" 149 | pmts_per_line = oms_per_line * pmts_per_om 150 | line = ((pmt_id - first_pmt_id) // pmts_per_line) + 1 151 | om = oms_per_line - (pmt_id - first_pmt_id) % pmts_per_line // pmts_per_om 152 | pmt = (pmt_id - first_pmt_id) % pmts_per_om 153 | return int(line), int(om), int(pmt) 154 | 155 | 156 | 157 | class PMT(object): 158 | """Represents a photomultiplier""" 159 | def __init__(self, id, pos, dir, t0, daq_channel, omkey): 160 | self.id = id 161 | self.pos = Point(pos) 162 | self.dir = Direction(dir) 163 | self.t0 = t0 164 | self.daq_channel = daq_channel 165 | self.omkey = omkey 166 | 167 | def __str__(self): 168 | return "PMT id:{0} pos: {1} dir: dir{2} t0: {3} DAQ channel: {4}"\ 169 | .format(self.id, self.pos, self.dir, self.t0, self.daq_channel) 170 | 171 | -------------------------------------------------------------------------------- /km3pipe/core.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: core.py 3 | # pylint: disable=locally-disabled 4 | """ 5 | The core of the KM3Pipe framework. 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | __author__ = 'tamasgal' 11 | 12 | import signal 13 | 14 | from km3pipe.hardware import Detector 15 | 16 | import logging 17 | from km3pipe.logger import logging 18 | 19 | log = logging.getLogger(__name__) # pylint: disable=C0103 20 | 21 | 22 | class Pipeline(object): 23 | """The holy pipeline which holds everything together""" 24 | 25 | def __init__(self, blob=None): 26 | self.modules = [] 27 | self.geometry = None 28 | self.blob = blob or Blob() 29 | self._cycle_count = 0 30 | self._stop = False 31 | self._finished = False 32 | 33 | def attach(self, module_class, name=None, **kwargs): 34 | """Attach a module to the pipeline system""" 35 | if not name: 36 | name = module_class.__name__ 37 | module = module_class(name=name, **kwargs) 38 | log.info("Attaching module '{0}'".format(name)) 39 | try: 40 | module.get_detector() 41 | self.geometry = module 42 | except AttributeError: 43 | if len(self.modules) < 1 and not isinstance(module, Pump): 44 | log.error("The first module to attach to the pipeline should " 45 | "be a Pump!") 46 | self.modules.append(module) 47 | 48 | def _drain(self, cycles=None): 49 | """Activate the pump and let the flow go. 50 | 51 | This will call the process() method on each attached module until 52 | a StopIteration is raised, usually by a pump when it reached the EOF. 53 | 54 | A StopIteration is also raised when self.cycles was set and the 55 | number of cycles has reached that limit. 56 | 57 | """ 58 | if not cycles: 59 | log.info("No cycle count set, the pipeline may be drained forever.") 60 | 61 | if self.geometry: 62 | log.info("Setting up the detector geometry.") 63 | for module in self.modules: 64 | module.detector = self.geometry.get_detector() 65 | 66 | try: 67 | while not self._stop: 68 | self._cycle_count += 1 69 | log.debug("Pumping blob #{0}".format(self._cycle_count)) 70 | self.blob = self.modules[0].process(self.blob) 71 | for module in self.modules[1:]: 72 | if self.blob is None: 73 | log.debug("Skipping {0}, due to empty blob." 74 | .format(module.name)) 75 | continue 76 | log.debug("Processing {0} ".format(module.name)) 77 | self.blob = module.process(self.blob) 78 | if cycles and self._cycle_count >= cycles: 79 | raise StopIteration 80 | except StopIteration: 81 | log.info("Nothing left to pump through.") 82 | self.finish() 83 | 84 | def drain(self, cycles=None): 85 | """Execute _drain while trapping KeyboardInterrupt""" 86 | log.info("Now draining...") 87 | signal.signal(signal.SIGINT, self._handle_ctrl_c) 88 | try: 89 | self._drain(cycles) 90 | except KeyboardInterrupt: 91 | pass 92 | 93 | def finish(self): 94 | """Call finish() on each attached module""" 95 | for module in self.modules: 96 | log.info("Finishing {0}".format(module.name)) 97 | module.pre_finish() 98 | self._finished = True 99 | 100 | def _handle_ctrl_c(self, *args): 101 | """Handle the keyboard interrupts.""" 102 | if self._stop: 103 | print("Forced shutdown...") 104 | raise SystemExit 105 | if not self._stop: 106 | print(42*'=' + "\nGot CTRL+C, waiting for the current cycle...\n" 107 | "Press CTRL+C again if you're in hurry!\n" + 42*'=') 108 | self._stop = True 109 | 110 | 111 | class Module(object): 112 | """The module which can be attached to the pipeline""" 113 | 114 | def __init__(self, name=None, **parameters): 115 | log.debug("Initialising {0}".format(name)) 116 | self._name = name 117 | self.parameters = parameters 118 | self.detector = None 119 | 120 | @property 121 | def name(self): 122 | """The name of the module""" 123 | return self._name 124 | 125 | def add(self, name, value): 126 | """Add the parameter with the desired value to the dict""" 127 | self.parameters[name] = value 128 | 129 | def get(self, name): 130 | """Return the value of the requested parameter""" 131 | return self.parameters.get(name) 132 | 133 | def process(self, blob): # pylint: disable=R0201 134 | """Knead the blob and return it""" 135 | return blob 136 | 137 | def finish(self): 138 | """Clean everything up.""" 139 | pass 140 | 141 | def pre_finish(self): 142 | """Do the last few things before calling finish()""" 143 | self.finish() 144 | 145 | 146 | class Pump(Module): 147 | """The pump with basic file or socket handling.""" 148 | 149 | def __init__(self, **context): 150 | Module.__init__(self, **context) 151 | self.blob_file = None 152 | 153 | def open_file(self, filename): 154 | """Open the file with filename""" 155 | try: 156 | self.blob_file = open(filename, 'rb') 157 | except TypeError: 158 | log.error("Please specify a valid filename.") 159 | raise SystemExit 160 | except IOError as error_message: 161 | log.error(error_message) 162 | raise SystemExit 163 | 164 | def process(self, blob): 165 | """Create a blob""" 166 | raise NotImplementedError("The pump has no process() method!") 167 | 168 | def rewind_file(self): 169 | """Put the file pointer to position 0""" 170 | self.blob_file.seek(0, 0) 171 | 172 | def close_file(self): 173 | """Close file.""" 174 | if self.blob_file: 175 | self.blob_file.close() 176 | 177 | def pre_finish(self): 178 | """Clean up open file or socket-handlers.""" 179 | Module.finish(self) 180 | self.close_file() 181 | 182 | 183 | class Blob(dict): 184 | """A simple dict with a fancy name. This should hold the data.""" 185 | pass 186 | 187 | 188 | class Geometry(Module): 189 | """A very simple, preliminary Module which gives access to the geometry""" 190 | def __init__(self, **context): 191 | super(self.__class__, self).__init__(**context) 192 | filename = self.get('filename') 193 | self.detector = Detector(filename) 194 | 195 | def get_detector(self): 196 | """Return the detector""" 197 | return self.detector 198 | 199 | -------------------------------------------------------------------------------- /km3pipe/pumps/tests/test_daq.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: test_daq.py 3 | # pylint: disable=C0111,R0904,C0103 4 | """ 5 | ... 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | from km3pipe.testing import TestCase, StringIO 11 | from km3pipe.pumps.daq import (DAQPump, DAQPreamble, DAQHeader, 12 | DAQSummaryslice, DAQEvent) 13 | 14 | from binascii import unhexlify 15 | 16 | 17 | HEX_DATA = ''.join("85000000d1070000ae01000001000000000000000000000003000000" + 18 | "65000000000000000000000000000000000000000000000000000000" + 19 | "00000000000000660000000000000000000000000000000000000000" + 20 | "00000000000000000000000000006700000000000000000000000000" + 21 | "00000000000000000000000000000000000000000070000000112700" + 22 | "00ae0100000200000000000000000080000000000000000000020000" + 23 | "000000000000000000020000006700000007f5f40500306700000012" + 24 | "f8f405002704000000660000000a6df50500186700000007f5f40500" + 25 | "306700000012f8f4050027670000000c1bf605002166000000112700" + 26 | "00ae0100000200000000000000000080000100000000000000020000" + 27 | "0000000000000000000200000067000000108b1e0b00266700000011" + 28 | "901e0b0012030000006500000016f41f0b002467000000108b1e0b00" + 29 | "266700000011901e0b00125c00000011270000ae0100000200000000" + 30 | "00000000008000020000000000000002000000000000000000000002" + 31 | "00000066000000088b950f0023660000001884950f001a0200000066" + 32 | "0000001884950f001a66000000088b950f00235c00000011270000ae" + 33 | "01000002000000000000000000800003000000000000000200000000" + 34 | "000000000000000200000065000000040a1a130022650000000a0e1a" + 35 | "1300280200000065000000040a1a130022650000000a0e1a130028") 36 | BINARY_DATA = unhexlify(HEX_DATA.encode()) 37 | try: 38 | TEST_FILE = StringIO(BINARY_DATA) 39 | except TypeError: 40 | from io import BytesIO 41 | TEST_FILE = BytesIO(BINARY_DATA) 42 | 43 | 44 | class TestDAQPump(TestCase): 45 | 46 | def setUp(self): 47 | TEST_FILE.seek(0, 0) 48 | self.pump = DAQPump() 49 | self.pump.blob_file = TEST_FILE 50 | 51 | def test_determine_frame_positions(self): 52 | pump = self.pump 53 | pump.determine_frame_positions() 54 | self.assertListEqual([0, 133, 245, 347, 439], pump.frame_positions) 55 | 56 | def test_next_blob_finds_correct_frame_types(self): 57 | pump = self.pump 58 | blob = pump.next_blob() 59 | self.assertTrue('DAQSummaryslice' in blob) 60 | blob = pump.next_blob() 61 | self.assertTrue('DAQEvent' in blob) 62 | blob = pump.next_blob() 63 | self.assertTrue('DAQEvent' in blob) 64 | blob = pump.next_blob() 65 | self.assertTrue('DAQEvent' in blob) 66 | blob = pump.next_blob() 67 | self.assertTrue('DAQEvent' in blob) 68 | 69 | def test_next_blob_raises_stop_iteration_when_eof_reached(self): 70 | pump = self.pump 71 | with self.assertRaises(StopIteration): 72 | for i in range(6): 73 | pump.next_blob() 74 | 75 | def test_seek_to_frame(self): 76 | pump = self.pump 77 | pump.determine_frame_positions() 78 | pump.seek_to_frame(2) 79 | self.assertEqual(245, pump.blob_file.tell()) 80 | 81 | def test_next_blob_returns_correct_frame_after_seek_to_frame(self): 82 | pump = self.pump 83 | pump.determine_frame_positions() 84 | pump.seek_to_frame(1) 85 | blob = pump.next_blob() 86 | self.assertEqual(4, blob['DAQEvent'].n_snapshot_hits) 87 | 88 | def test_get_blob(self): 89 | pump = self.pump 90 | pump.determine_frame_positions() 91 | blob = pump.get_blob(2) 92 | self.assertEqual(3, blob['DAQEvent'].n_snapshot_hits) 93 | 94 | class TestDAQPreamble(TestCase): 95 | 96 | def setUp(self): 97 | TEST_FILE.seek(0, 0) 98 | 99 | def test_init_with_byte_data(self): 100 | byte_data = unhexlify("85000000D1070000".encode()) 101 | preamble = DAQPreamble(byte_data=byte_data) 102 | self.assertEqual(133, preamble.length) 103 | self.assertEqual(2001, preamble.data_type) 104 | 105 | def test_parse_from_file(self): 106 | self.setUp() 107 | preamble = DAQPreamble(file_obj=TEST_FILE) 108 | self.assertEqual(133, preamble.length) 109 | self.assertEqual(2001, preamble.data_type) 110 | 111 | 112 | 113 | class TestDAQHeader(TestCase): 114 | 115 | def test_init_with_byte_data(self): 116 | byte_data = unhexlify("AE010000010000000000000000000000".encode()) 117 | header = DAQHeader(byte_data=byte_data) 118 | self.assertEqual(430, header.run) 119 | self.assertEqual(1, header.time_slice) 120 | 121 | def test_parse_from_file(self): 122 | TEST_FILE.seek(8, 0) # skip preamble 123 | self.setUp() 124 | header = DAQHeader(file_obj=TEST_FILE) 125 | self.assertEqual(430, header.run) 126 | self.assertEqual(1, header.time_slice) 127 | 128 | 129 | class TestDAQSummaryslice(TestCase): 130 | 131 | def test_init_with_a_slice(self): 132 | TEST_FILE.seek(0, 0) 133 | DAQPreamble(file_obj=TEST_FILE) 134 | sum_slice = DAQSummaryslice(TEST_FILE) 135 | self.assertEqual(3, sum_slice.n_summary_frames) 136 | print(sum_slice.summary_frames.keys()) 137 | self.assertListEqual([101, 102, 103], 138 | list(sum_slice.summary_frames.keys())) 139 | self.assertEqual(31, len(sum_slice.summary_frames[101])) 140 | self.assertEqual(31, len(sum_slice.summary_frames[102])) 141 | self.assertEqual(31, len(sum_slice.summary_frames[103])) 142 | 143 | 144 | class TestDAQEvent(TestCase): 145 | 146 | def setUp(self): 147 | TEST_FILE.seek(245, 0) 148 | DAQPreamble(file_obj=TEST_FILE) 149 | self.event = DAQEvent(TEST_FILE) 150 | 151 | def test_init_with_a_frame(self): 152 | event = self.event 153 | self.assertEqual(1, event.trigger_counter) 154 | self.assertEqual(2, event.trigger_mask) 155 | self.assertEqual(0, event.overlays) 156 | self.assertEqual(2, event.n_triggered_hits) 157 | 158 | def test_triggered_hits(self): 159 | event = self.event 160 | self.assertEqual(2, len(event.triggered_hits)) 161 | self.assertTupleEqual((103, 16, 728715, 38), event.triggered_hits[0]) 162 | self.assertTupleEqual((103, 17, 728720, 18), event.triggered_hits[1]) 163 | 164 | def test_snapshot_hits(self): 165 | event = self.event 166 | self.assertEqual(3, event.n_snapshot_hits) 167 | self.assertEqual(3, len(event.snapshot_hits)) 168 | self.assertTupleEqual((101, 22, 729076, 36), event.snapshot_hits[0]) 169 | self.assertTupleEqual((103, 16, 728715, 38), event.snapshot_hits[1]) 170 | self.assertTupleEqual((103, 17, 728720, 18), event.snapshot_hits[2]) 171 | -------------------------------------------------------------------------------- /km3pipe/tools.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: tools.py 3 | # pylint: disable=C0103 4 | """ 5 | Some frequently used logic. 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | __author__ = 'tamasgal' 11 | 12 | import collections 13 | from collections import namedtuple 14 | from itertools import chain 15 | import time 16 | 17 | import numpy as np 18 | 19 | 20 | def unpack_nfirst(seq, nfirst): 21 | """Unpack the nfrist items from the list and return the rest. 22 | 23 | >>> a, b, c, rest = unpack_nfirst((1, 2, 3, 4, 5), 3) 24 | >>> a, b, c 25 | (1, 2, 3) 26 | >>> rest 27 | (4, 5) 28 | 29 | """ 30 | iterator = iter(seq) 31 | for _ in range(nfirst): 32 | yield next(iterator, None) 33 | yield tuple(iterator) 34 | 35 | 36 | def split(string, callback=None): 37 | """Split the string and execute the callback function on each part. 38 | 39 | >>> string = "1 2 3 4" 40 | >>> parts = split(string, int) 41 | >>> parts 42 | [1, 2, 3, 4] 43 | 44 | """ 45 | if callback: 46 | return [callback(i) for i in string.split()] 47 | else: 48 | return string.split() 49 | 50 | def namedtuple_with_defaults(typename, field_names, default_values=[]): 51 | """Create a namedtuple with default values 52 | 53 | >>> Node = namedtuple_with_defaults('Node', 'val left right') 54 | >>> Node() 55 | Node(val=None, left=None, right=None) 56 | >>> Node = namedtuple_with_defaults('Node', 'val left right', [1, 2, 3]) 57 | >>> Node() 58 | Node(val=1, left=2, right=3) 59 | >>> Node = namedtuple_with_defaults('Node', 'val left right', {'right':7}) 60 | >>> Node() 61 | Node(val=None, left=None, right=7) 62 | >>> Node(4) 63 | Node(val=4, left=None, right=7) 64 | """ 65 | the_tuple = namedtuple(typename, field_names) 66 | the_tuple.__new__.__defaults__ = (None,) * len(the_tuple._fields) 67 | if isinstance(default_values, collections.Mapping): 68 | prototype = the_tuple(**default_values) 69 | else: 70 | prototype = the_tuple(*default_values) 71 | the_tuple.__new__.__defaults__ = tuple(prototype) 72 | return the_tuple 73 | 74 | 75 | def angle_between(v1, v2): 76 | """Returns the angle in radians between vectors 'v1' and 'v2':: 77 | 78 | >>> angle_between((1, 0, 0), (0, 1, 0)) 79 | 1.5707963267948966 80 | >>> angle_between((1, 0, 0), (1, 0, 0)) 81 | 0.0 82 | >>> angle_between((1, 0, 0), (-1, 0, 0)) 83 | 3.141592653589793 84 | 85 | """ 86 | v1_u = unit_vector(v1) 87 | v2_u = unit_vector(v2) 88 | angle = np.arccos(np.dot(v1_u, v2_u)) 89 | return angle 90 | 91 | 92 | def unit_vector(vector): 93 | """ Returns the unit vector of the vector. """ 94 | return vector / np.linalg.norm(vector) 95 | 96 | 97 | def circ_permutation(items): 98 | """Calculate the circular permutation for a given list of items.""" 99 | permutations = [] 100 | for i in range(len(items)): 101 | permutations.append(items[i:] + items[:i]) 102 | return permutations 103 | 104 | 105 | def geant2pdg(geant_code): 106 | """Convert GEANT particle ID to PDG""" 107 | conversion_table = { 108 | 1: 22, # photon 109 | 2: -11, # positron 110 | 3: 11, # electron 111 | 5: -13, # muplus 112 | 6: 13, # muminus 113 | 7: 111, # pi0 114 | 8: 211, # piplus 115 | 9: -211, # piminus 116 | 10: 130, # k0long 117 | 11: 321, # kplus 118 | 12: -321, # kminus 119 | 13: 2112, # neutron 120 | 14: 2212, # proton 121 | 16: 310, # kaon0short 122 | 17: 221, # eta 123 | } 124 | try: 125 | return conversion_table[geant_code] 126 | except KeyError: 127 | return 0 128 | 129 | def pdg2name(pdg_id): 130 | """Convert PDG ID to human readable names""" 131 | # pylint: disable=C0330 132 | conversion_table = { 133 | 11: 'e-', 134 | -11: 'e+', 135 | 12: 'nu_e', 136 | -12: 'anu_e', 137 | 13: 'mu-', 138 | -13: 'mu+', 139 | 14: 'nu_mu', 140 | -14: 'anu_mu', 141 | 15: 'tau-', 142 | -15: 'tau+', 143 | 16: 'nu_tau', 144 | -16: 'anu_tau', 145 | 22: 'photon', 146 | 111: 'pi0', 147 | 130: 'K0L', 148 | 211: 'pi-', 149 | -211: 'pi+', 150 | 310: 'K0S', 151 | 311: 'K0', 152 | 321: 'K+', 153 | -321: 'K-', 154 | 2112: 'n', 155 | 2212: 'p', 156 | -2212: 'p-', 157 | } 158 | try: 159 | return conversion_table[pdg_id] 160 | except KeyError: 161 | return "N/A" 162 | 163 | 164 | class PMTReplugger(object): 165 | """Replugs PMTs and modifies the data according to the new setup.""" 166 | 167 | def __init__(self, pmt_combs, angles, rates): 168 | self._pmt_combs = pmt_combs 169 | self._new_combs = [] 170 | self._angles = angles 171 | self._rates = rates 172 | self._switch = None 173 | 174 | def angle_for(self, pmt_comb): 175 | """Return angle for given PMT combination""" 176 | combs = self.current_combs() 177 | print(combs) 178 | idx = combs.index(pmt_comb) 179 | return self._angles[idx] 180 | 181 | def current_combs(self): 182 | combs = self._new_combs if self._new_combs else self._pmt_combs 183 | return combs 184 | 185 | @property 186 | def angles(self): 187 | combs = self.current_combs() 188 | idxs = [] 189 | for comb in self._pmt_combs: 190 | idxs.append(combs.index(comb)) 191 | angles = [] 192 | for idx in idxs: 193 | angles.append(self._angles[idx]) 194 | return angles 195 | 196 | def switch(self, idxs1, idxs2): 197 | """Switch PMTs""" 198 | flat_combs = np.array(self.flatten(self._pmt_combs)) 199 | operations = [] 200 | for old, new in zip(idxs1, idxs2): 201 | operations.append((self.indices(old, flat_combs), new)) 202 | for idxs, new_value in operations: 203 | flat_combs[idxs] = new_value 204 | it = iter(flat_combs) 205 | self._new_combs = [] 206 | for pmt1, pmt2 in zip(it, it): 207 | if pmt1 > pmt2: 208 | self._new_combs.append((pmt2, pmt1)) 209 | else: 210 | self._new_combs.append((pmt1, pmt2)) 211 | 212 | def reset_switches(self): 213 | """Reset all switches""" 214 | self._new_combs = None 215 | 216 | def indices(self, item, items): 217 | values = np.array(items) 218 | indices = np.where(values == item)[0] 219 | return indices 220 | 221 | def flatten(self, items): 222 | return list(chain.from_iterable(items)) 223 | 224 | 225 | class Timer(object): 226 | """A very simple, accurate and easy to use timer context""" 227 | def __enter__(self): 228 | self.__start = time.time() 229 | 230 | def __exit__(self, type, value, traceback): 231 | self.__finish = time.time() 232 | 233 | def get_seconds(self): 234 | return self.__finish - self.__start 235 | 236 | def log(self): 237 | print("{0}s".format(self.get_seconds())) 238 | -------------------------------------------------------------------------------- /km3pipe/tests/test_hardware.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: test_hardware.py 3 | # pylint: disable=C0111,C0103,R0904 4 | """ 5 | Detector description (detx format v5) 6 | 7 | global_det_id ndoms 8 | dom_id line_id floor_id npmts 9 | pmt_id_global x y z dx dy dz t0 10 | pmt_id_global x y z dx dy dz t0 11 | ... 12 | pmt_id_global x y z dx dy dz t0 13 | dom_id line_id floor_id npmts 14 | ... 15 | 16 | """ 17 | from __future__ import division, absolute_import, print_function 18 | 19 | from km3pipe.testing import TestCase, StringIO, skipIf 20 | from km3pipe.hardware import Detector, PMT 21 | 22 | 23 | EXAMPLE_DETX = StringIO("\n".join(( 24 | "1 3", 25 | "1 1 1 3", 26 | " 1 1.1 1.2 1.3 -1.1 0.2 0.3 10", 27 | " 2 1.4 1.5 1.6 0.1 -1.2 0.3 20", 28 | " 3 1.7 1.8 1.9 0.1 0.2 -1.3 30", 29 | "2 1 2 3", 30 | " 4 2.1 2.2 2.3 -1.1 0.2 0.3 40", 31 | " 5 2.4 2.5 2.6 0.1 -1.2 0.3 50", 32 | " 6 2.7 2.8 2.9 0.1 0.2 -1.3 60", 33 | "3 1 3 3", 34 | " 7 3.1 3.2 3.3 -1.1 0.2 0.3 70", 35 | " 8 3.4 3.5 3.6 0.1 -1.2 0.3 80", 36 | " 9 3.7 3.8 3.9 0.1 0.2 -1.3 90",))) 37 | 38 | EXAMPLE_DETX_MIXED_IDS = StringIO("\n".join(( 39 | "1 3", 40 | "8 1 1 3", 41 | " 83 1.1 1.2 1.3 -1.1 0.2 0.3 10", 42 | " 81 1.4 1.5 1.6 0.1 -1.2 0.3 20", 43 | " 82 1.7 1.8 1.9 0.1 0.2 -1.3 30", 44 | "7 1 2 3", 45 | " 71 2.1 2.2 2.3 -1.1 0.2 0.3 40", 46 | " 73 2.4 2.5 2.6 0.1 -1.2 0.3 50", 47 | " 72 2.7 2.8 2.9 0.1 0.2 -1.3 60", 48 | "6 1 3 3", 49 | " 62 3.1 3.2 3.3 -1.1 0.2 0.3 70", 50 | " 63 3.4 3.5 3.6 0.1 -1.2 0.3 80", 51 | " 61 3.7 3.8 3.9 0.1 0.2 -1.3 90",))) 52 | 53 | EXAMPLE_DETX_WRITE = StringIO("\n".join(( 54 | "1 3", 55 | "1 1 1 3", 56 | " 1 1.1 1.2 1.3 1.0 0.0 0.0 10.0", 57 | " 2 1.4 1.5 1.6 0.0 1.0 0.0 20.0", 58 | " 3 1.7 1.8 1.9 0.0 0.0 1.0 30.0", 59 | "2 1 2 3", 60 | " 4 2.1 2.2 2.3 0.0 1.0 0.0 40.0", 61 | " 5 2.4 2.5 2.6 0.0 0.0 1.0 50.0", 62 | " 6 2.7 2.8 2.9 1.0 0.0 0.0 60.0", 63 | "3 1 3 3", 64 | " 7 3.1 3.2 3.3 0.0 0.0 1.0 70.0", 65 | " 8 3.4 3.5 3.6 0.0 1.0 0.0 80.0", 66 | " 9 3.7 3.8 3.9 1.0 0.0 0.0 90.0\n",))) 67 | 68 | EXAMPLE_MC_DETX_WRITE_MIXED_IDS = StringIO("\n".join(( 69 | "-1 3", 70 | "6 1 1 3", 71 | " 31 1.1 1.2 1.3 1.0 0.0 0.0 10.0", 72 | " 22 1.4 1.5 1.6 0.0 1.0 0.0 20.0", 73 | " 13 1.7 1.8 1.9 0.0 0.0 1.0 30.0", 74 | "3 1 2 3", 75 | " 34 2.1 2.2 2.3 1.0 0.0 0.0 40.0", 76 | " 45 2.4 2.5 2.6 0.0 1.0 0.0 50.0", 77 | " 16 2.7 2.8 2.9 0.0 0.0 1.0 60.0", 78 | "9 1 3 3", 79 | " 17 3.1 3.2 3.3 1.0 0.0 0.0 70.0", 80 | " 48 3.4 3.5 3.6 0.0 1.0 0.0 80.0", 81 | " 39 3.7 3.8 3.9 0.0 0.0 1.0 90.0\n",))) 82 | 83 | class TestDetector(TestCase): 84 | 85 | def setUp(self): 86 | self.det = Detector() 87 | self.det.det_file = EXAMPLE_DETX 88 | 89 | def test_parse_header_extracts_correct_det_id(self): 90 | self.det.parse_header() 91 | self.assertEqual(1, self.det.det_id) 92 | 93 | def test_parse_header_extracts_correct_n_doms(self): 94 | self.det.parse_header() 95 | self.assertEqual(3, self.det.n_doms) 96 | 97 | def test_parse_doms_maps_each_dom_correctly(self): 98 | self.det.parse_doms() 99 | expected = {1: (1, 1, 3), 2: (1, 2, 3), 3: (1, 3, 3)} 100 | self.assertDictEqual(expected, self.det.doms) 101 | 102 | def test_parse_doms_maps_each_dom_correctly_for_mixed_pmt_ids(self): 103 | self.det.det_file = EXAMPLE_DETX_MIXED_IDS 104 | self.det.parse_doms() 105 | expected = {8: (1, 1, 3), 7: (1, 2, 3), 6: (1, 3, 3)} 106 | self.assertDictEqual(expected, self.det.doms) 107 | 108 | @skipIf(True, "Weird one hour bias on date?") 109 | def test_parse_doms_fills_pmts_dict(self): 110 | self.det.parse_doms() 111 | self.assertEqual(9, len(self.det.pmts)) 112 | self.assertTupleEqual((7, 3.1, 3.2, 3.3, -1.1, 0.2, 0.3, 70), 113 | self.det.pmts[(1, 3, 0)]) 114 | 115 | def test_dom_positions(self): 116 | self.det.parse_doms() 117 | for i, position in enumerate(self.det.dom_positions): 118 | self.assertAlmostEqual(i + 1.1, position.x) 119 | self.assertAlmostEqual(i + 1.2, position.y) 120 | self.assertAlmostEqual(i + 1.3, position.z) 121 | 122 | def test_omkeys(self): 123 | self.det.parse_doms() 124 | self.assertEqual((1, 1, 0), self.det.pmt_with_id(1).omkey) 125 | self.assertEqual((1, 2, 1), self.det.pmt_with_id(5).omkey) 126 | 127 | def test_pmt_with_id_raises_exception_for_invalid_id(self): 128 | self.det.parse_doms() 129 | with self.assertRaises(KeyError): 130 | self.det.pmt_with_id(100) 131 | 132 | @skipIf(True, "DOM positions ordering unclear") 133 | def test_dom_positions_with_mixed_pmt_ids(self): 134 | self.det.det_file = EXAMPLE_DETX_MIXED_IDS 135 | self.det.parse_doms() 136 | for i, position in enumerate(self.det.dom_positions): 137 | self.assertAlmostEqual(i + 1.1, position.x) 138 | self.assertAlmostEqual(i + 1.2, position.y) 139 | self.assertAlmostEqual(i + 1.3, position.z) 140 | 141 | @skipIf(True, "DOM ordering is probably not important!") 142 | def test_ascii_detector(self): 143 | self.det.det_file = EXAMPLE_MC_DETX_WRITE_MIXED_IDS 144 | self.det.parse_header() 145 | self.det.parse_doms() 146 | self.assertEqual(self.det.det_file.getvalue(), self.det.ascii) 147 | 148 | def test_pmtid2omkey_old(self): 149 | pmtid2omkey = self.det.pmtid2omkey_old 150 | self.assertEqual((1, 13, 12), tuple(pmtid2omkey(168))) 151 | self.assertEqual((1, 12, 18), tuple(pmtid2omkey(205))) 152 | self.assertEqual((1, 11, 22), tuple(pmtid2omkey(240))) 153 | self.assertEqual((4, 11, 2), tuple(pmtid2omkey(1894))) 154 | self.assertEqual((9, 18, 0), tuple(pmtid2omkey(4465))) 155 | self.assertEqual((95, 7, 16), tuple(pmtid2omkey(52810))) 156 | self.assertEqual((95, 4, 13), tuple(pmtid2omkey(52900))) 157 | 158 | def test_pmtid2omkey_old_handles_floats(self): 159 | pmtid2omkey = self.det.pmtid2omkey_old 160 | self.assertEqual((1, 13, 12), tuple(pmtid2omkey(168.0))) 161 | self.assertEqual((1, 12, 18), tuple(pmtid2omkey(205.0))) 162 | self.assertEqual((1, 11, 22), tuple(pmtid2omkey(240.0))) 163 | self.assertEqual((4, 11, 2), tuple(pmtid2omkey(1894.0))) 164 | self.assertEqual((9, 18, 0), tuple(pmtid2omkey(4465.0))) 165 | self.assertEqual((95, 7, 16), tuple(pmtid2omkey(52810.0))) 166 | self.assertEqual((95, 4, 13), tuple(pmtid2omkey(52900.0))) 167 | 168 | 169 | 170 | 171 | 172 | 173 | class TestPMT(TestCase): 174 | 175 | def test_init(self): 176 | pmt = PMT(1, (1, 2, 3), (4, 5, 6), 7, 8, (9, 10, 11)) 177 | self.assertEqual(1, pmt.id) 178 | self.assertEqual(1, pmt.pos.x) 179 | self.assertEqual(2, pmt.pos.y) 180 | self.assertEqual(3, pmt.pos.z) 181 | self.assertAlmostEqual(0.455842, pmt.dir.x, 6) 182 | self.assertAlmostEqual(0.569803, pmt.dir.y, 6) 183 | self.assertAlmostEqual(0.683763, pmt.dir.z, 6) 184 | self.assertEqual(7, pmt.t0) 185 | self.assertEqual(8, pmt.daq_channel) 186 | self.assertEqual((9, 10, 11), pmt.omkey) 187 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | BUILDDIR = _build 9 | 10 | # User-friendly check for sphinx-build 11 | ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) 12 | $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) 13 | endif 14 | 15 | # Internal variables. 16 | PAPEROPT_a4 = -D latex_paper_size=a4 17 | PAPEROPT_letter = -D latex_paper_size=letter 18 | ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 19 | # the i18n builder cannot share the environment and doctrees with the others 20 | I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 21 | 22 | .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext 23 | 24 | help: 25 | @echo "Please use \`make ' where is one of" 26 | @echo " html to make standalone HTML files" 27 | @echo " dirhtml to make HTML files named index.html in directories" 28 | @echo " singlehtml to make a single large HTML file" 29 | @echo " pickle to make pickle files" 30 | @echo " json to make JSON files" 31 | @echo " htmlhelp to make HTML files and a HTML help project" 32 | @echo " qthelp to make HTML files and a qthelp project" 33 | @echo " devhelp to make HTML files and a Devhelp project" 34 | @echo " epub to make an epub" 35 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 36 | @echo " latexpdf to make LaTeX files and run them through pdflatex" 37 | @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" 38 | @echo " text to make text files" 39 | @echo " man to make manual pages" 40 | @echo " texinfo to make Texinfo files" 41 | @echo " info to make Texinfo files and run them through makeinfo" 42 | @echo " gettext to make PO message catalogs" 43 | @echo " changes to make an overview of all changed/added/deprecated items" 44 | @echo " xml to make Docutils-native XML files" 45 | @echo " pseudoxml to make pseudoxml-XML files for display purposes" 46 | @echo " linkcheck to check all external links for integrity" 47 | @echo " doctest to run all doctests embedded in the documentation (if enabled)" 48 | 49 | clean: 50 | rm -rf $(BUILDDIR)/* 51 | 52 | html: 53 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html 54 | @echo 55 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." 56 | 57 | dirhtml: 58 | $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml 59 | @echo 60 | @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." 61 | 62 | singlehtml: 63 | $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml 64 | @echo 65 | @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." 66 | 67 | pickle: 68 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle 69 | @echo 70 | @echo "Build finished; now you can process the pickle files." 71 | 72 | json: 73 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json 74 | @echo 75 | @echo "Build finished; now you can process the JSON files." 76 | 77 | htmlhelp: 78 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp 79 | @echo 80 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 81 | ".hhp project file in $(BUILDDIR)/htmlhelp." 82 | 83 | qthelp: 84 | $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp 85 | @echo 86 | @echo "Build finished; now you can run "qcollectiongenerator" with the" \ 87 | ".qhcp project file in $(BUILDDIR)/qthelp, like this:" 88 | @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/KM3Pipe.qhcp" 89 | @echo "To view the help file:" 90 | @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/KM3Pipe.qhc" 91 | 92 | devhelp: 93 | $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp 94 | @echo 95 | @echo "Build finished." 96 | @echo "To view the help file:" 97 | @echo "# mkdir -p $$HOME/.local/share/devhelp/KM3Pipe" 98 | @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/KM3Pipe" 99 | @echo "# devhelp" 100 | 101 | epub: 102 | $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub 103 | @echo 104 | @echo "Build finished. The epub file is in $(BUILDDIR)/epub." 105 | 106 | latex: 107 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 108 | @echo 109 | @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." 110 | @echo "Run \`make' in that directory to run these through (pdf)latex" \ 111 | "(use \`make latexpdf' here to do that automatically)." 112 | 113 | latexpdf: 114 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 115 | @echo "Running LaTeX files through pdflatex..." 116 | $(MAKE) -C $(BUILDDIR)/latex all-pdf 117 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 118 | 119 | latexpdfja: 120 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex 121 | @echo "Running LaTeX files through platex and dvipdfmx..." 122 | $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja 123 | @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." 124 | 125 | text: 126 | $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text 127 | @echo 128 | @echo "Build finished. The text files are in $(BUILDDIR)/text." 129 | 130 | man: 131 | $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man 132 | @echo 133 | @echo "Build finished. The manual pages are in $(BUILDDIR)/man." 134 | 135 | texinfo: 136 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 137 | @echo 138 | @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." 139 | @echo "Run \`make' in that directory to run these through makeinfo" \ 140 | "(use \`make info' here to do that automatically)." 141 | 142 | info: 143 | $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo 144 | @echo "Running Texinfo files through makeinfo..." 145 | make -C $(BUILDDIR)/texinfo info 146 | @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." 147 | 148 | gettext: 149 | $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale 150 | @echo 151 | @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." 152 | 153 | changes: 154 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes 155 | @echo 156 | @echo "The overview file is in $(BUILDDIR)/changes." 157 | 158 | linkcheck: 159 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck 160 | @echo 161 | @echo "Link check complete; look for any errors in the above output " \ 162 | "or in $(BUILDDIR)/linkcheck/output.txt." 163 | 164 | doctest: 165 | $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest 166 | @echo "Testing of doctests in the sources finished, look at the " \ 167 | "results in $(BUILDDIR)/doctest/output.txt." 168 | 169 | xml: 170 | $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml 171 | @echo 172 | @echo "Build finished. The XML files are in $(BUILDDIR)/xml." 173 | 174 | pseudoxml: 175 | $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml 176 | @echo 177 | @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." 178 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # KM3Pipe documentation build configuration file, created by 4 | # sphinx-quickstart on Sat Oct 4 19:16:43 2014. 5 | # 6 | # This file is execfile()d with the current directory set to its 7 | # containing dir. 8 | # 9 | # Note that not all possible configuration values are present in this 10 | # autogenerated file. 11 | # 12 | # All configuration values have a default; values that are commented out 13 | # serve to show the default. 14 | 15 | import sys 16 | import os 17 | 18 | sys.path.append('../') 19 | 20 | import km3pipe 21 | 22 | # If extensions (or modules to document with autodoc) are in another directory, 23 | # add these directories to sys.path here. If the directory is relative to the 24 | # documentation root, use os.path.abspath to make it absolute, like shown here. 25 | sys.path.insert(0, os.path.abspath('.')) 26 | 27 | # -- General configuration ------------------------------------------------ 28 | 29 | # If your documentation needs a minimal Sphinx version, state it here. 30 | #needs_sphinx = '1.0' 31 | 32 | # Add any Sphinx extension module names here, as strings. They can be 33 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 34 | # ones. 35 | extensions = [ 36 | 'sphinx.ext.autodoc', 37 | # 'sphinxcontrib.napoleon', 38 | 'sphinx.ext.doctest', 39 | 'sphinx.ext.pngmath', 40 | 'sphinx.ext.viewcode', 41 | ] 42 | 43 | # Napoleon settings 44 | #napoleon_google_docstring = True 45 | #napoleon_numpy_docstring = True 46 | #napoleon_include_private_with_doc = False 47 | #napoleon_include_special_with_doc = True 48 | #napoleon_use_admonition_for_examples = False 49 | #napoleon_use_admonition_for_notes = False 50 | #napoleon_use_admonition_for_references = False 51 | #napoleon_use_ivar = False 52 | #napoleon_use_param = True 53 | #napoleon_use_rtype = True 54 | 55 | # Add any paths that contain templates here, relative to this directory. 56 | templates_path = ['_templates'] 57 | 58 | # The suffix of source filenames. 59 | source_suffix = '.rst' 60 | 61 | # The encoding of source files. 62 | #source_encoding = 'utf-8-sig' 63 | 64 | # The master toctree document. 65 | master_doc = 'index' 66 | 67 | # General information about the project. 68 | project = u'KM3Pipe' 69 | copyright = u'2015, Tamas Gal' 70 | 71 | # The version info for the project you're documenting, acts as replacement for 72 | # |version| and |release|, also used in various other places throughout the 73 | # built documents. 74 | # 75 | # The short X.Y version. 76 | version = '.'.join(km3pipe.__version__.split('.')[:2]) 77 | # The full version, including alpha/beta/rc tags. 78 | release = km3pipe.__version__ 79 | 80 | # The language for content autogenerated by Sphinx. Refer to documentation 81 | # for a list of supported languages. 82 | #language = None 83 | 84 | # There are two options for replacing |today|: either, you set today to some 85 | # non-false value, then it is used: 86 | #today = '' 87 | # Else, today_fmt is used as the format for a strftime call. 88 | #today_fmt = '%B %d, %Y' 89 | 90 | # List of patterns, relative to source directory, that match files and 91 | # directories to ignore when looking for source files. 92 | exclude_patterns = ['_build'] 93 | 94 | # The reST default role (used for this markup: `text`) to use for all 95 | # documents. 96 | #default_role = None 97 | 98 | # If true, '()' will be appended to :func: etc. cross-reference text. 99 | #add_function_parentheses = True 100 | 101 | # If true, the current module name will be prepended to all description 102 | # unit titles (such as .. function::). 103 | #add_module_names = True 104 | 105 | # If true, sectionauthor and moduleauthor directives will be shown in the 106 | # output. They are ignored by default. 107 | #show_authors = False 108 | 109 | # The name of the Pygments (syntax highlighting) style to use. 110 | pygments_style = 'sphinx' 111 | 112 | # A list of ignored prefixes for module index sorting. 113 | #modindex_common_prefix = [] 114 | 115 | # If true, keep warnings as "system message" paragraphs in the built documents. 116 | #keep_warnings = False 117 | 118 | 119 | # -- Options for HTML output ---------------------------------------------- 120 | 121 | # The theme to use for HTML and HTML Help pages. See the documentation for 122 | # a list of builtin themes. 123 | html_theme = 'default' 124 | 125 | # Theme options are theme-specific and customize the look and feel of a theme 126 | # further. For a list of options available for each theme, see the 127 | # documentation. 128 | #html_theme_options = {} 129 | 130 | # Add any paths that contain custom themes here, relative to this directory. 131 | #html_theme_path = [] 132 | 133 | # The name for this set of Sphinx documents. If None, it defaults to 134 | # " v documentation". 135 | #html_title = None 136 | 137 | # A shorter title for the navigation bar. Default is the same as html_title. 138 | #html_short_title = None 139 | 140 | # The name of an image file (relative to this directory) to place at the top 141 | # of the sidebar. 142 | #html_logo = None 143 | 144 | # The name of an image file (within the static path) to use as favicon of the 145 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 146 | # pixels large. 147 | #html_favicon = None 148 | 149 | # Add any paths that contain custom static files (such as style sheets) here, 150 | # relative to this directory. They are copied after the builtin static files, 151 | # so a file named "default.css" will overwrite the builtin "default.css". 152 | html_static_path = ['_static'] 153 | 154 | # Add any extra paths that contain custom files (such as robots.txt or 155 | # .htaccess) here, relative to this directory. These files are copied 156 | # directly to the root of the documentation. 157 | #html_extra_path = [] 158 | 159 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 160 | # using the given strftime format. 161 | #html_last_updated_fmt = '%b %d, %Y' 162 | 163 | # If true, SmartyPants will be used to convert quotes and dashes to 164 | # typographically correct entities. 165 | #html_use_smartypants = True 166 | 167 | # Custom sidebar templates, maps document names to template names. 168 | #html_sidebars = {} 169 | 170 | # Additional templates that should be rendered to pages, maps page names to 171 | # template names. 172 | #html_additional_pages = {} 173 | 174 | # If false, no module index is generated. 175 | #html_domain_indices = True 176 | 177 | # If false, no index is generated. 178 | #html_use_index = True 179 | 180 | # If true, the index is split into individual pages for each letter. 181 | #html_split_index = False 182 | 183 | # If true, links to the reST sources are added to the pages. 184 | #html_show_sourcelink = True 185 | 186 | # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. 187 | #html_show_sphinx = True 188 | 189 | # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. 190 | #html_show_copyright = True 191 | 192 | # If true, an OpenSearch description file will be output, and all pages will 193 | # contain a tag referring to it. The value of this option must be the 194 | # base URL from which the finished HTML is served. 195 | #html_use_opensearch = '' 196 | 197 | # This is the file name suffix for HTML files (e.g. ".xhtml"). 198 | #html_file_suffix = None 199 | 200 | # Output file base name for HTML help builder. 201 | htmlhelp_basename = 'KM3Pipedoc' 202 | 203 | 204 | # -- Options for LaTeX output --------------------------------------------- 205 | 206 | latex_elements = { 207 | # The paper size ('letterpaper' or 'a4paper'). 208 | #'papersize': 'letterpaper', 209 | 210 | # The font size ('10pt', '11pt' or '12pt'). 211 | #'pointsize': '10pt', 212 | 213 | # Additional stuff for the LaTeX preamble. 214 | #'preamble': '', 215 | } 216 | 217 | # Grouping the document tree into LaTeX files. List of tuples 218 | # (source start file, target name, title, 219 | # author, documentclass [howto, manual, or own class]). 220 | latex_documents = [ 221 | ('index', 'KM3Pipe.tex', u'KM3Pipe Documentation', 222 | u'Tamas Gal', 'manual'), 223 | ] 224 | 225 | # The name of an image file (relative to this directory) to place at the top of 226 | # the title page. 227 | #latex_logo = None 228 | 229 | # For "manual" documents, if this is true, then toplevel headings are parts, 230 | # not chapters. 231 | #latex_use_parts = False 232 | 233 | # If true, show page references after internal links. 234 | #latex_show_pagerefs = False 235 | 236 | # If true, show URL addresses after external links. 237 | #latex_show_urls = False 238 | 239 | # Documents to append as an appendix to all manuals. 240 | #latex_appendices = [] 241 | 242 | # If false, no module index is generated. 243 | #latex_domain_indices = True 244 | 245 | 246 | # -- Options for manual page output --------------------------------------- 247 | 248 | # One entry per manual page. List of tuples 249 | # (source start file, name, description, authors, manual section). 250 | man_pages = [ 251 | ('index', 'km3pipe', u'KM3Pipe Documentation', 252 | [u'Tamas Gal'], 1) 253 | ] 254 | 255 | # If true, show URL addresses after external links. 256 | #man_show_urls = False 257 | 258 | 259 | # -- Options for Texinfo output ------------------------------------------- 260 | 261 | # Grouping the document tree into Texinfo files. List of tuples 262 | # (source start file, target name, title, author, 263 | # dir menu entry, description, category) 264 | texinfo_documents = [ 265 | ('index', 'KM3Pipe', u'KM3Pipe Documentation', 266 | u'Tamas Gal', 'KM3Pipe', 'One line description of project.', 267 | 'Miscellaneous'), 268 | ] 269 | 270 | # Documents to append as an appendix to all manuals. 271 | #texinfo_appendices = [] 272 | 273 | # If false, no module index is generated. 274 | #texinfo_domain_indices = True 275 | 276 | # How to display URL addresses: 'footnote', 'no', or 'inline'. 277 | #texinfo_show_urls = 'footnote' 278 | 279 | # If true, do not generate a @detailmenu in the "Top" node's menu. 280 | #texinfo_no_detailmenu = False 281 | -------------------------------------------------------------------------------- /km3pipe/pumps/daq.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: daq.py 3 | # pylint: disable=R0903 4 | """ 5 | Pumps for the DAQ data formats. 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | import struct 11 | from struct import unpack 12 | import pprint 13 | 14 | from km3pipe import Pump, Blob 15 | from km3pipe.logger import logging 16 | 17 | log = logging.getLogger(__name__) # pylint: disable=C0103 18 | 19 | DATA_TYPES = { 20 | 101: 'DAQSuperFrame', 21 | 201: 'DAQSummaryFrame', 22 | 1001: 'DAQTimeslice', 23 | 2001: 'DAQSummaryslice', 24 | 10001: 'DAQEvent', 25 | } 26 | 27 | 28 | class DAQPump(Pump): 29 | """A pump for binary DAQ files.""" 30 | 31 | def __init__(self, **context): 32 | super(self.__class__, self).__init__(**context) 33 | self.filename = self.get('filename') 34 | self.frame_positions = [] 35 | 36 | if self.filename: 37 | self.open_file(self.filename) 38 | self.determine_frame_positions() 39 | else: 40 | log.warn("No filename specified. Take care of the file handling!") 41 | 42 | def next_blob(self): 43 | """Get the next frame from file""" 44 | blob_file = self.blob_file 45 | try: 46 | preamble = DAQPreamble(file_obj=blob_file) 47 | except struct.error: 48 | raise StopIteration 49 | 50 | try: 51 | data_type = DATA_TYPES[preamble.data_type] 52 | except KeyError: 53 | data_type = 'Unknown' 54 | 55 | blob = Blob() 56 | blob[data_type] = None 57 | blob['DAQPreamble'] = preamble 58 | 59 | if data_type == 'DAQSummaryslice': 60 | daq_frame = DAQSummaryslice(blob_file) 61 | blob[data_type] = daq_frame 62 | blob['DAQHeader'] = daq_frame.header 63 | elif data_type == 'DAQEvent': 64 | daq_frame = DAQEvent(blob_file) 65 | blob[data_type] = daq_frame 66 | blob['DAQHeader'] = daq_frame.header 67 | else: 68 | log.warning("Skipping DAQ frame with data type code '{0}'." 69 | .format(preamble.data_type)) 70 | blob_file.seek(preamble.length - DAQPreamble.size, 1) 71 | 72 | return blob 73 | 74 | def seek_to_frame(self, index): 75 | """Move file pointer to the frame with given index.""" 76 | pointer_position = self.frame_positions[index] 77 | self.blob_file.seek(pointer_position, 0) 78 | 79 | def get_blob(self, index): 80 | """Return blob at given index.""" 81 | self.seek_to_frame(index) 82 | return self.next_blob() 83 | 84 | def determine_frame_positions(self): 85 | """Record the file pointer position of each frame""" 86 | self.rewind_file() 87 | try: 88 | while True: 89 | pointer_position = self.blob_file.tell() 90 | length = struct.unpack('?$ 178 | 179 | # Allow the body of an if to be on the same line as the test if there is no 180 | # else. 181 | single-line-if-stmt=no 182 | 183 | # List of optional constructs for which whitespace checking is disabled 184 | no-space-check=trailing-comma,dict-separator 185 | 186 | # Maximum number of lines in a module 187 | max-module-lines=1000 188 | 189 | # String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 190 | # tab). 191 | indent-string=' ' 192 | 193 | # Number of spaces of indent required inside a hanging or continued line. 194 | indent-after-paren=4 195 | 196 | 197 | [LOGGING] 198 | 199 | # Logging modules to check that the string format arguments are in logging 200 | # function parameter format 201 | logging-modules=logging 202 | 203 | 204 | [MISCELLANEOUS] 205 | 206 | # List of note tags to take in consideration, separated by a comma. 207 | notes=FIXME,XXX,TODO 208 | 209 | 210 | [SIMILARITIES] 211 | 212 | # Minimum lines number of a similarity. 213 | min-similarity-lines=4 214 | 215 | # Ignore comments when computing similarities. 216 | ignore-comments=yes 217 | 218 | # Ignore docstrings when computing similarities. 219 | ignore-docstrings=yes 220 | 221 | # Ignore imports when computing similarities. 222 | ignore-imports=no 223 | 224 | 225 | [TYPECHECK] 226 | 227 | # Tells whether missing members accessed in mixin class should be ignored. A 228 | # mixin class is detected if its name ends with "mixin" (case insensitive). 229 | ignore-mixin-members=yes 230 | 231 | # List of module names for which member attributes should not be checked 232 | # (useful for modules/projects where namespaces are manipulated during runtime 233 | # and thus existing member attributes cannot be deduced by static analysis 234 | ignored-modules=numpy 235 | 236 | # List of classes names for which member attributes should not be checked 237 | # (useful for classes with attributes dynamically set). 238 | ignored-classes=SQLObject 239 | 240 | # When zope mode is activated, add a predefined set of Zope acquired attributes 241 | # to generated-members. 242 | zope=no 243 | 244 | # List of members which are set dynamically and missed by pylint inference 245 | # system, and so shouldn't trigger E0201 when accessed. Python regular 246 | # expressions are accepted. 247 | generated-members=REQUEST,acl_users,aq_parent 248 | 249 | 250 | [VARIABLES] 251 | 252 | # Tells whether we should check for unused import in __init__ files. 253 | init-import=no 254 | 255 | # A regular expression matching the name of dummy variables (i.e. expectedly 256 | # not used). 257 | dummy-variables-rgx=_$|dummy 258 | 259 | # List of additional names supposed to be defined in builtins. Remember that 260 | # you should avoid to define new builtins when possible. 261 | additional-builtins= 262 | 263 | 264 | [CLASSES] 265 | 266 | # List of interface methods to ignore, separated by a comma. This is used for 267 | # instance to not check methods defines in Zope's Interface base class. 268 | ignore-iface-methods=isImplementedBy,deferred,extends,names,namesAndDescriptions,queryDescriptionFor,getBases,getDescriptionFor,getDoc,getName,getTaggedValue,getTaggedValueTags,isEqualOrExtendedBy,setTaggedValue,isImplementedByInstancesOf,adaptWith,is_implemented_by 269 | 270 | # List of method names used to declare (i.e. assign) instance attributes. 271 | defining-attr-methods=__init__,__new__,setUp 272 | 273 | # List of valid names for the first argument in a class method. 274 | valid-classmethod-first-arg=cls 275 | 276 | # List of valid names for the first argument in a metaclass class method. 277 | valid-metaclass-classmethod-first-arg=mcs 278 | 279 | 280 | [DESIGN] 281 | 282 | # Maximum number of arguments for function / method 283 | max-args=5 284 | 285 | # Argument names that match this expression will be ignored. Default to name 286 | # with leading underscore 287 | ignored-argument-names=_.* 288 | 289 | # Maximum number of locals for function / method body 290 | max-locals=15 291 | 292 | # Maximum number of return / yield for function / method body 293 | max-returns=6 294 | 295 | # Maximum number of branch for function / method body 296 | max-branches=12 297 | 298 | # Maximum number of statements in function / method body 299 | max-statements=50 300 | 301 | # Maximum number of parents for a class (see R0901). 302 | max-parents=7 303 | 304 | # Maximum number of attributes for a class (see R0902). 305 | max-attributes=7 306 | 307 | # Minimum number of public methods for a class (see R0903). 308 | min-public-methods=2 309 | 310 | # Maximum number of public methods for a class (see R0904). 311 | max-public-methods=20 312 | 313 | 314 | [IMPORTS] 315 | 316 | # Deprecated modules which should not be used, separated by a comma 317 | deprecated-modules=regsub,TERMIOS,Bastion,rexec 318 | 319 | # Create a graph of every (i.e. internal and external) dependencies in the 320 | # given file (report RP0402 must not be disabled) 321 | import-graph= 322 | 323 | # Create a graph of external dependencies in the given file (report RP0402 must 324 | # not be disabled) 325 | ext-import-graph= 326 | 327 | # Create a graph of internal dependencies in the given file (report RP0402 must 328 | # not be disabled) 329 | int-import-graph= 330 | 331 | 332 | [EXCEPTIONS] 333 | 334 | # Exceptions that will emit a warning when being caught. Defaults to 335 | # "Exception" 336 | overgeneral-exceptions=Exception 337 | -------------------------------------------------------------------------------- /km3pipe/pumps/evt.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: evt.py 3 | # pylint: disable=C0103,R0903 4 | """ 5 | Pumps for the EVT simulation dataformat. 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | __author__ = 'tamasgal' 11 | 12 | import sys 13 | 14 | from collections import namedtuple 15 | 16 | from km3pipe import Pump 17 | from km3pipe.logger import logging 18 | 19 | from km3pipe.dataclasses import Point, Direction 20 | from km3pipe.tools import pdg2name, geant2pdg, unpack_nfirst 21 | 22 | log = logging.getLogger(__name__) # pylint: disable=C0103 23 | 24 | 25 | class EvtPump(Pump): # pylint: disable:R0902 26 | """Provides a pump for EVT-files""" 27 | 28 | def __init__(self, **context): 29 | super(self.__class__, self).__init__(**context) 30 | self.filename = self.get('filename') 31 | self.cache_enabled = self.get('cache_enabled') or False 32 | self.basename = self.get('basename') or None 33 | self.index_start = self.get('index_start') or 1 34 | self.index_stop = self.get('index_stop') or 1 35 | 36 | self.raw_header = None 37 | self.event_offsets = [] 38 | self.index = 0 39 | self.whole_file_cached = False 40 | 41 | self.file_index = int(self.index_start) 42 | 43 | if self.basename: 44 | self.filename = self.basename + str(self.index_start) + '.evt' 45 | 46 | if self.filename: 47 | print("Opening {0}".format(self.filename)) 48 | self.open_file(self.filename) 49 | self.prepare_blobs() 50 | else: 51 | log.warn("No filename specified. Take care of the file handling!") 52 | 53 | def _reset(self): 54 | """Clear the cache.""" 55 | self.raw_header = None 56 | self.event_offsets = [] 57 | self.index = 0 58 | 59 | def prepare_blobs(self): 60 | """Populate the blobs""" 61 | self.raw_header = self.extract_header() 62 | if self.cache_enabled: 63 | self._cache_offsets() 64 | 65 | def extract_header(self): 66 | """Create a dictionary with the EVT header information""" 67 | raw_header = self.raw_header = {} 68 | #for line in self.blob_file: 69 | first_line = self.blob_file.readline() 70 | self.blob_file.seek(0, 0) 71 | if not first_line.startswith('start_run'): 72 | log.warning("No header found.") 73 | return raw_header 74 | for line in iter(self.blob_file.readline, ''): 75 | line = line.strip() 76 | try: 77 | tag, value = line.split(':') 78 | except ValueError: 79 | continue 80 | raw_header[tag] = value.split() 81 | if line.startswith('end_event:'): 82 | self._record_offset() 83 | return raw_header 84 | raise ValueError("Incomplete header, no 'end_event' tag found!") 85 | 86 | def get_blob(self, index): 87 | """Return a blob with the event at the given index""" 88 | if index > len(self.event_offsets) - 1: 89 | self._cache_offsets(index, verbose=False) 90 | self.blob_file.seek(self.event_offsets[index], 0) 91 | blob = self._create_blob() 92 | if blob is None: 93 | raise IndexError 94 | else: 95 | return blob 96 | 97 | def process(self, blob=None): 98 | """Pump the next blob to the modules""" 99 | try: 100 | blob = self.get_blob(self.index) 101 | except IndexError: 102 | if self.basename and self.file_index < self.index_stop: 103 | self.file_index += 1 104 | self._reset() 105 | self.blob_file.close() 106 | self.index = 0 107 | self.filename = self.basename + str(self.file_index) + '.evt' 108 | print("Opening {0}".format(self.filename)) 109 | self.open_file(self.filename) 110 | self.prepare_blobs() 111 | return blob 112 | raise StopIteration 113 | self.index += 1 114 | return blob 115 | 116 | def _cache_offsets(self, up_to_index=None, verbose=True): 117 | """Cache all event offsets.""" 118 | if not up_to_index: 119 | if verbose: 120 | print("Caching event file offsets, this may take a minute.") 121 | self.blob_file.seek(0, 0) 122 | self.event_offsets = [] 123 | if not self.raw_header: 124 | self.event_offsets.append(0) 125 | else: 126 | self.blob_file.seek(self.event_offsets[-1], 0) 127 | for line in iter(self.blob_file.readline, ''): 128 | line = line.strip() 129 | if line.startswith('end_event:'): 130 | self._record_offset() 131 | if len(self.event_offsets) % 100 == 0: 132 | if verbose: 133 | print('.', end='') 134 | sys.stdout.flush() 135 | if up_to_index and len(self.event_offsets) >= up_to_index + 1: 136 | return 137 | self.event_offsets.pop() # get rid of the last entry 138 | #self.blob_file.seek(self.event_offsets[0], 0) 139 | if not up_to_index: 140 | self.whole_file_cached = True 141 | print("\n{0} events indexed.".format(len(self.event_offsets))) 142 | 143 | def _record_offset(self): 144 | """Stores the current file pointer position""" 145 | offset = self.blob_file.tell() 146 | self.event_offsets.append(offset) 147 | 148 | def _create_blob(self): 149 | """Parse the next event from the current file position""" 150 | blob = None 151 | for line in self.blob_file: 152 | line = line.strip() 153 | if line.startswith('end_event:') and blob: 154 | blob['raw_header'] = self.raw_header 155 | return blob 156 | if line.startswith('start_event:'): 157 | blob = {} 158 | tag, value = line.split(':') 159 | blob[tag] = value.split() 160 | continue 161 | if blob: 162 | self._create_blob_entry_for_line(line, blob) 163 | 164 | def _create_blob_entry_for_line(self, line, blob): 165 | """Create the actual blob entry from the given line.""" 166 | try: 167 | tag, value = line.split(':') 168 | except ValueError: 169 | log.warning("Corrupt line in EVT file:\n{0}".format(line)) 170 | return 171 | if tag in ('track_in', 'track_fit', 'hit', 'hit_raw'): 172 | values = [float(x) for x in value.split()] 173 | blob.setdefault(tag, []).append(values) 174 | if tag == 'hit': 175 | hit = Hit(*values) 176 | blob.setdefault("EvtHits", []).append(hit) 177 | blob.setdefault("MCHits", []).append(hit) 178 | if tag == "hit_raw": 179 | raw_hit = RawHit(*values) 180 | blob.setdefault("EvtRawHits", []).append(raw_hit) 181 | blob.setdefault("Hits", []).append(raw_hit) 182 | if tag == "track_in": 183 | blob.setdefault("TrackIns", []).append(TrackIn(values)) 184 | if tag == "track_fit": 185 | blob.setdefault("TrackFits", []).append(TrackFit(values)) 186 | else: 187 | if tag == 'neutrino': 188 | values = [float(x) for x in value.split()] 189 | blob['Neutrino'] = Neutrino(values) 190 | else: 191 | blob[tag] = value.split() 192 | 193 | def __len__(self): 194 | if not self.whole_file_cached: 195 | self._cache_offsets() 196 | return len(self.event_offsets) 197 | 198 | def __iter__(self): 199 | return self 200 | 201 | def next(self): 202 | """Python 2/3 compatibility for iterators""" 203 | return self.__next__() 204 | 205 | def __next__(self): 206 | try: 207 | blob = self.get_blob(self.index) 208 | except IndexError: 209 | self.index = 0 210 | raise StopIteration 211 | self.index += 1 212 | return blob 213 | 214 | def __getitem__(self,index): 215 | if isinstance(index, int): 216 | return self.get_blob(index) 217 | elif isinstance(index, slice): 218 | return self._slice_generator(index) 219 | else: 220 | raise TypeError("index must be int or slice") 221 | 222 | def _slice_generator(self, index): 223 | """A simple slice generator for iterations""" 224 | start, stop, step = index.indices(len(self)) 225 | for i in range(start, stop, step): 226 | yield self.get_blob(i) 227 | 228 | def finish(self): 229 | """Clean everything up""" 230 | self.blob_file.close() 231 | 232 | 233 | class Track(object): 234 | """Bass class for particle or shower tracks""" 235 | # def __init__(self, id, x, y, z, dx, dy, dz, E=None, t=0, *args): 236 | def __init__(self, data, zed_correction=405.93): 237 | id, x, y, z, dx, dy, dz, E, t, args = unpack_nfirst(data, 9) 238 | self.id = int(id) 239 | # z correctio due to gen/km3 (ZED -> sea level shift) 240 | # http://wiki.km3net.physik.uni-erlangen.de/index.php/Simulations 241 | self.pos = Point((x, y, z + zed_correction)) 242 | self.dir = Direction((dx, dy, dz)) 243 | self.E = E 244 | self.time = t 245 | self.args = args 246 | 247 | def __repr__(self): 248 | text = "Track:\n" 249 | text += " id: {0}\n".format(self.id) 250 | text += " pos: {0}\n".format(self.pos) 251 | text += " dir: {0}\n".format(self.dir) 252 | text += " energy: {0} GeV\n".format(self.E) 253 | text += " time: {0} ns\n".format(self.time) 254 | return text 255 | 256 | 257 | class TrackIn(Track): 258 | """Representation of a track_in entry in an EVT file""" 259 | def __init__(self, *args, **kwargs): 260 | super(self.__class__, self).__init__(*args, **kwargs) 261 | self.particle_type = geant2pdg(int(self.args[0])) 262 | try: 263 | self.length = self.args[1] 264 | except IndexError: 265 | self.length = 0 266 | 267 | def __repr__(self): 268 | text = super(self.__class__, self).__repr__() 269 | text += " type: {0} '{1}' [PDG]\n".format(self.particle_type, 270 | pdg2name(self.particle_type)) 271 | text += " length: {0} [m]\n".format(self.length) 272 | return text 273 | 274 | 275 | class TrackFit(Track): 276 | """Representation of a track_fit entry in an EVT file""" 277 | def __init__(self, *args, **kwargs): 278 | super(self.__class__, self).__init__(*args, **kwargs) 279 | self.speed = self.args[0] 280 | self.ts = self.args[1] 281 | self.te = self.args[2] 282 | self.con1 = self.args[3] 283 | self.con2 = self.args[4] 284 | 285 | def __repr__(self): 286 | text = super(self.__class__, self).__repr__() 287 | text += " speed: {0} [m/ns]\n".format(self.speed) 288 | text += " ts: {0} [ns]\n".format(self.ts) 289 | text += " te: {0} [ns]\n".format(self.te) 290 | text += " con1: {0}\n".format(self.con1) 291 | text += " con2: {0}\n".format(self.con2) 292 | return text 293 | 294 | 295 | class Neutrino(object): # pylint: disable:R0902 296 | """Representation of a neutrino entry in an EVT file""" 297 | def __init__(self, data, zed_correction=405.93): 298 | id, x, y, z, dx, dy, dz, E, t, Bx, By, \ 299 | ichan, particle_type, channel, args = unpack_nfirst(data, 14) 300 | self.id = id 301 | # z correctio due to gen/km3 (ZED -> sea level shift) 302 | # http://wiki.km3net.physik.uni-erlangen.de/index.php/Simulations 303 | self.pos = Point((x, y, z + zed_correction)) 304 | self.dir = Direction((dx, dy, dz)) 305 | self.E = E 306 | self.time = t 307 | self.Bx = Bx 308 | self.By = By 309 | self.ichan = ichan 310 | self.particle_type = particle_type 311 | self.channel = channel 312 | 313 | def __str__(self): 314 | text = "Neutrino: " 315 | text += pdg2name(self.particle_type) 316 | if self.E >= 1000000: 317 | text += ", {0:.3} PeV".format(self.E / 1000000) 318 | elif self.E >= 1000: 319 | text += ", {0:.3} TeV".format(self.E / 1000) 320 | else: 321 | text += ", {0:.3} GeV".format(float(self.E)) 322 | text += ', CC' if int(self.channel) == 2 else ', NC' 323 | return text 324 | 325 | 326 | # The hit entry in an EVT file 327 | Hit = namedtuple('Hit', 'id pmt_id pe time type n_photons track_in c_time') 328 | Hit.__new__.__defaults__ = (None, None, None, None, None, None, None, None) 329 | 330 | 331 | # The hit_raw entry in an EVT file 332 | def __add_raw_hit__(self, other): 333 | """Add two hits by adding the ToT and preserve time and pmt_id 334 | of the earlier one.""" 335 | first = self if self.time <= other.time else other 336 | return RawHit(first.id, first.pmt_id, self.tot+other.tot, first.time) 337 | RawHit = namedtuple('RawHit', 'id pmt_id tot time') 338 | RawHit.__new__.__defaults__ = (None, None, None, None) 339 | RawHit.__add__ = __add_raw_hit__ 340 | 341 | -------------------------------------------------------------------------------- /km3pipe/pumps/tests/test_clb.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: test_clb.py 3 | # pylint: disable=C0111,R0904,C0103 4 | """ 5 | ... 6 | 7 | """ 8 | from __future__ import division, absolute_import, print_function 9 | 10 | from km3pipe.testing import TestCase, StringIO, skipIf 11 | from km3pipe.pumps.clb import CLBPump, CLBHeader, PMTData 12 | 13 | import binascii 14 | 15 | 16 | HEX_DATA = ("7A0500005454444300000000000000030000684200BEBC2030BEAF008000000" + 17 | "00003F2F79B0C0003F64C6B060F03F7560B1C0F03F75631030203F88BEB040A" + 18 | "04039B750B0404059CF2050404059F7E0C040405AC880B1404083F480A0A040" + 19 | "98CEA060B04099B1A020A040A4E1E090A040AC7FA070A040B199E0B00040C92" + 20 | "B20600040CDB170A05040F92840E04041063550A140410770A16000410EE911" + 21 | "B00041336E605040413870707000413F20E100504142BA21204041495151000" + 22 | "0415D2311C14041828A9041404182C491702041C73D40600041E68902108041" + 23 | "EFBD40F0004222E3405140425CD0F051404274D3C050A0428E54F0A050429EC" + 24 | "0D0300042BEF780500042BF0F10600042BF23A0B00042BF24B0400042BF4D90" + 25 | "300042BF9150300042C0ABB0E00042C2018061B042CA6BF0307042CF8ED0C0B" + 26 | "042CF89B160B042CFB0F060B042D279A0D0C042D29F6080B042D7A2B1000042" + 27 | "DB7271005043070E602140431A40A02140431B4A404140431B61609050431C5" + 28 | "2F0A140431E4CA090004324D4A260A04327A26041404327DB907020433B73D0" + 29 | "91104340D2D041104341626080004356B551700043597A6091A04362222171A" + 30 | "04362F16050004368FCF3000043A4CD41900043E7BE50300044017300B00044" + 31 | "026AD0E0404403C950F04044047EE0B0404404B41130804425F5607000442F4" + 32 | "0A0D040442FC6B0E0504463FB80E0C0446890D031A044875DA15000448FB210" + 33 | "D0B044B94310605044B95010905044EFF220F05044EFF350300044F7ED20300" + 34 | "044FEB3104050450722D0D1404533BD407050453B31E0C00045443541600045" + 35 | "4BE120905045643291005045855860205045857BE1A00045C929E0208045CB0" + 36 | "3F0208045D17F9070804608DE20B050464AF48071404651A3A0B0C0469FE410" + 37 | "30C0469FE64050C0469FFCE0F18046CC9671D19046CC966330A046CC9660E04" + 38 | "046EBDBC0F08046FACCA04000470D04511000470DEDA02050472373F0A00047" + 39 | "3C94904050476AE6108050476B086030B0476B6EB0A070478C35B08140478DE" + 40 | "1B0314047901AA0314047A6E5C0900047AFFA51100047C47F40608047CD88D0" + 41 | "B000481AE1F0A140483C96822020484205A0700048554F10314048965870500" + 42 | "049156F2081B04933F2017000494171D0D1A049573731900049DCB3B1E10049" + 43 | "EDEB00E14049F197F0305049FC648040C04A372AE021404A42B03050504AABF" + 44 | "771F0804AB6FD80D1204AD3BAF031604AED0DB060A04B42C8E0A0504B589131" + 45 | "B1404B5D55C051A04B816A5080004B87A7E020504B87A77100A04B8C4010500" + 46 | "04BA01C3250004BA0DC3060004BA0DD2020404BA9FB6110004BDFB4D070004B" + 47 | "DFE42030504C41C020C0004C715920A0804C73EAF080504C7434F0F0004C75E" + 48 | "540B0004C8A3B7070504C92DD5090204CCC690050404CD1395060504CD68170" + 49 | "90404CF1CBC061A04CF42A5060004D0BF720D0004D2CC07061404D3462C2214" + 50 | "04D3476C091404D349A40A0804D37D890A0104D503CD041404D5CB6A020404D" + 51 | "65C5C060104D8FD0F040B04DA69E10A0004DB16C5060804DCBFAD040804DD75" + 52 | "DA040804DD7F15080404DD8187050704DE2D8E090B04E0BB43090004E16DDE2" + 53 | "10004E3DA35140B04E48E2E0D0504E4F796100F04E5691F110004E8D5B8060A" + 54 | "04EA7027040A04EA752F0D0A04EA99A5081404EAEFBF0A1404ECC77F0A0004E" + 55 | "F3F37040A04F110BF080004F309DC080004F47A91030504F5C254100404FA0D" + 56 | "4C050104FA1061040C04FDD6E50B0004FDD687240004FDE4D4080C04FDFD2A0" + 57 | "51A04FEE042090004FF8936180405028A0C07000505EA0604000509845A0D05" + 58 | "050AB6D9030B05131DAA020B0513B9F51F000516583C0F0A051A25440D02051" + 59 | "F2A3A031B0521757909070524607C06000525B5C50408052663DF0900052872" + 60 | "DC0F00052879AC1A02052F56FB08000533D98E076A030000545444430000000" + 61 | "0000000040000684200BEBC2030BEAF00800000001A0536A45C071B05387A03" + 62 | "030B053AE0CA10000540B704081405424BAA0F1A0542B64C04050543DCB70E0" + 63 | "505456028091905477559071405495B3505050549EE790E00054B1FA2030905" + 64 | "51F12D1D07055314DA050005541EBA0605055487540D050556EE1F060B055AB" + 65 | "EEF0505055AF4BB0805055B632E0505056044260D07056194BD090C05622F3E" + 66 | "031E0563F5D8040B05656B810E1405695D650B080569BD4E04140569F1EE1C1" + 67 | "80569F1EF0C1A0569F1EE171B0569F1F2051C0569F1E67C1C0569F2BD041C05" + 68 | "69F3AB041D0569F1F6031E0569F1EA32080569F1F00F1B0569F3D1091E056A0" + 69 | "BDB0205056A8FD6080C056B84D1031C056BFB2B0216056C5D99051C056CF8A3" + 70 | "081E056CF8A01514056D23190514056FE6830914057367C11E14057367E6041" + 71 | "4057377EA1F00057438B3070505771BA40A000578372823000578FC4D031405" + 72 | "7A769D0A0C057D0BEF0500057D7D85080B058160E30900058188BC0A140582C" + 73 | "6C004080583704A0704058685B711020589A5430F00058A4B390404058D669F" + 74 | "0A0505921D5F04050595D2B613000596F6A5030A059C2FB7050B059CA512240" + 75 | "B059CAF96091A059DD2BA1800059E8ABB1207059FCD310707059FD1A1080705" + 76 | "9FEB620707059FF321050005A017E20E0405A0834A110405A085CA0A0405A09" + 77 | "04D040405A091E9100405A09354060405A096B6030405A0A8AB080B05A10F44" + 78 | "0D0005A52952110805A5A2250C0505A6C7EA060B05A770DF191405AA29870C1" + 79 | "A05AC34D6030205ACC85D130405B23E57240505B4CE421A0005B5CEBE0A1405" + 80 | "B5D1B4111405B5D43F2E0105B6EB83010005B8CF330A0205BF41B7071A05C0C" + 81 | "949040A05C15776071705C20CAD031B05C30B4C110805C38A91030405C560EC" + 82 | "0E0805C56B0F070205C7480D050005C7520B070005C99D613B0005CAA853170" + 83 | "505CBB43B0A0005D4EB3F1C0005D634EE040C05D718D7030005DB70BE041A05" + 84 | "DDC0D5090405DE4216091105DF876B080705DFB2D20A1405E15D240E0005E21" + 85 | "B9F061405E28B87040005E2C98B0B1405E4F888201405E50398050405E545E5" + 86 | "090405E546410D0405E54874070405E5B1840A0005E8F88A080405E8F7A9110" + 87 | "505E93B72021A05E95A90060A05E99D01080405EB0E42050A05EC7371040005" + 88 | "EFD856030505F12515030505F1251C0B7A05000054544443000000000000000" + 89 | "000006842011E1A3030BEAF00800000001B0002496D0E0B0002BC5C0E000005" + 90 | "79CF05040006CB4C1207000856D4050C0008748903050010EEAF0B000017E4F" + 91 | "B0C04001A2E020514001A34860614001D2356070A001E4FED030A001E80CF0E" + 92 | "14001F32271E140023CC8405000024BECB070A0024FF582F130025924A05150" + 93 | "025923F41180025924B03090025923B590A002592421C150025961E0E090025" + 94 | "9EF3190A0025DA660211002765B70607002911D20D00002A09B2050B002A2CE" + 95 | "70A0A002A724E0A0B002CCF7D0914002D562D060A0030F2C008000031416125" + 96 | "1A003340B3040400334C093D0400334C78050400334CDE080600334C0875080" + 97 | "0334C0F110A00334C0F1A0A00334DE1041700334C15091B00334C1809040033" + 98 | "4D5E060400334F64040600334DDA0804003351BC1104003351D414040033532" + 99 | "B07060033526E0C0A0033525818040033558906040033559D0E040033563A0F" + 100 | "04003356A11006003358BA1408003359DA04040033602304040033615E05040" + 101 | "0336AD10704003371F50504003377F213040033B7B005040033BDC010040033" + 102 | "FE540A0400340FD40B040034571E10000034D5C516000034E2A9040B00376F4" + 103 | "313100037FBF11E11003960271705003CF5130F1A004014AB0F00004022C606" + 104 | "00004026E0081E0041ECCA110A0043D5440A070046372204140046C70D13080" + 105 | "0479FE40D080047B41405080047C2BB07080048EC820500004AA3670400004C" + 106 | "57E408050050FDC01305005100C806040052E2B807040052E46106040052E63" + 107 | "A02040052E8BE07040053155209040053318E0F00005579C02A05005632DF0A" + 108 | "04005670BE0D140056ADE109140056ADF007140056ADFE02040057A9C3061A0" + 109 | "05B0BF60510005D00E50400005FE3BF06000061D61D040500623C5B10040062" + 110 | "628210140062841305050063961907000065072609000067160611140067BD6" + 111 | "E0610006C87CB0907006E3E41100A007018A70C000078CFB11D0A007B73DD14" + 112 | "08007D815404000080719407140081AC6F05000081EB54040B008516300F000" + 113 | "086C3170E00008996150308008D38690614009210EF0407009228F70D0A0092" + 114 | "767B060A0092A57E04050093F742040A00949AEA11140097F8BA1907009BA68" + 115 | "C0704009EE8FC0A0B009F9C380B14009FBD39050000A4107A0F0000A6508609" + 116 | "0000A67C05060000A67C810B0000A68F6E050000A6AD42100000A823350C140" + 117 | "0AB0AF3110800AFF54C0D0400B109AD0B0500B255D4101A00B52C9E0A0400B8" + 118 | "8C91030000B8D6FB0C1B00B8E9F52F0000B95CB3230000B96188100500B9CF6" + 119 | "F0B0000BA3B5D060800BF35920A0800BF433B0E1400C054FA040700C84AE00E" + 120 | "0700C8548B080A00C89C2E040B00CAB786100800CC0029200800CC035F040A0" + 121 | "0CCE4290A0500D0695D030500D0BA3E0D1400D24405050000D305BE020000DA" + 122 | "286C260000DADF13200A00DB1A220F0000DBA8E10C1400DC4A27070500DC6C2" + 123 | "8190500DC7327140C00DF18BB040000E033CB100100E284B7070100E2BA0D06" + 124 | "0100E32855150900E39B30050500E4F108081B00E91C37040000EF05C306000" + 125 | "0F2A102020000F2A3B01A1A00F48B6A060000F6510D040000F7D602021A00F8" + 126 | "A772021400F8E7AA04000100CB3E1014010405070514010408900D14010408A" + 127 | "1121A0107419608000107C14F050B010A09650C05010B81880500010B8E4813" + 128 | "00010BA9D90900010DE4021105010F27AD0B00011206F60D0001125E4E050B0" + 129 | "1134D9806010115E8550400011A26EF0F00011AA8A80A00011B25E90507011B" + 130 | "5F6A0B07011B626E1107011B63D1090A011B634C0707011B83E70B07011B8DF" + 131 | "21205011D9C5E0B1B012140DD15000122E1A105000122F104060001234ED404" + 132 | "000124B66406140125ABD610070127CBA40A14012DA81B1D1A012DA82206000" + 133 | "12F7E5F24") 134 | 135 | 136 | BINARY_DATA = binascii.unhexlify(HEX_DATA.encode()) 137 | try: 138 | TEST_FILE = StringIO(BINARY_DATA) 139 | except TypeError: 140 | from io import BytesIO 141 | TEST_FILE = BytesIO(BINARY_DATA) 142 | # TEST_FILE = StringIO(str(BINARY_DATA)) 143 | 144 | 145 | class TestCLBPump(TestCase): 146 | 147 | def setUp(self): 148 | TEST_FILE.seek(0, 0) 149 | self.pump = CLBPump() 150 | self.pump.blob_file = TEST_FILE 151 | 152 | def test_determine_packet_positions_finds_packets(self): 153 | self.pump.determine_packet_positions() 154 | self.assertListEqual([0, 1406, 2284], self.pump.packet_positions) 155 | 156 | def test_seek_to_packet(self): 157 | pump = self.pump 158 | pump.determine_packet_positions() 159 | pump.seek_to_packet(2) 160 | self.assertEqual(2284, pump.blob_file.tell()) 161 | pump.seek_to_packet(1) 162 | self.assertEqual(1406, pump.blob_file.tell()) 163 | 164 | def test_get_blob(self): 165 | self.pump.determine_packet_positions() 166 | blob = self.pump.get_blob(0) 167 | self.assertEqual(0, blob['CLBHeader'].run_number) 168 | self.assertEqual('TTDC', blob['CLBHeader'].data_type) 169 | self.assertEqual(229, len(blob['PMTData'])) 170 | a_pmt_data = blob['PMTData'][0] 171 | self.assertEqual(0, a_pmt_data.channel_id) 172 | self.assertEqual(66254747, a_pmt_data.timestamp) 173 | self.assertEqual(12, a_pmt_data.tot) 174 | 175 | def test_next_blob(self): 176 | self.pump.determine_packet_positions() 177 | blob = self.pump.next_blob() 178 | self.assertEqual(229, len(blob['PMTData'])) 179 | blob = self.pump.next_blob() 180 | self.assertEqual(141, len(blob['PMTData'])) 181 | blob = self.pump.next_blob() 182 | self.assertEqual(229, len(blob['PMTData'])) 183 | 184 | def test_next_blob_raises_stop_iteration_on_eof(self): 185 | self.pump.determine_packet_positions() 186 | self.pump.next_blob() 187 | self.pump.next_blob() 188 | self.pump.next_blob() 189 | self.assertRaises(StopIteration, self.pump.next_blob) 190 | 191 | 192 | class TestCLBHeader(TestCase): 193 | 194 | def test_init_with_byte_data(self): 195 | raw_data = "5454444300000000000000030000684200BEBC2030BEAF0080000000" 196 | byte_data = binascii.unhexlify(raw_data.encode()) 197 | header = CLBHeader(byte_data=byte_data) 198 | self.assertEqual('TTDC', header.data_type) 199 | self.assertEqual(0, header.run_number) 200 | self.assertEqual(3, header.udp_sequence) 201 | self.assertEqual(26690, header.timestamp) 202 | self.assertEqual('30beaf00', header.dom_id) 203 | self.assertEqual('10000000000000000000000000000000', header.dom_status) 204 | 205 | @skipIf(True, "Weird one hour bias on date?") 206 | def test_str_representation(self): 207 | raw_data = "5454444300000000000000030000684200BEBC2030BEAF0080000000" 208 | byte_data = binascii.unhexlify(raw_data.encode()) 209 | header = CLBHeader(byte_data=byte_data) 210 | description = "CLBHeader\n" \ 211 | " Data type: TTDC\n" \ 212 | " Run number: 0\n" \ 213 | " UDP sequence: 3\n" \ 214 | " Time stamp: 26690\n" \ 215 | " 1970-01-01 08:24:50\n" \ 216 | " Ticks [16ns]: 12500000\n" \ 217 | " DOM ID: 30beaf00\n" \ 218 | " DOM status: 10000000000000000000000000000000\n" 219 | self.assertEqual(description, str(header)) 220 | 221 | 222 | class TestPMTData(TestCase): 223 | 224 | def test_init(self): 225 | pmt_data = PMTData(1, 2, 3) 226 | self.assertEqual(1, pmt_data.channel_id) 227 | self.assertEqual(2, pmt_data.timestamp) 228 | self.assertEqual(3, pmt_data.tot) 229 | -------------------------------------------------------------------------------- /km3pipe/pumps/tests/test_evt.py: -------------------------------------------------------------------------------- 1 | # coding=utf-8 2 | # Filename: test_evt.py 3 | # pylint: disable=locally-disabled,C0111,R0904,C0301,C0103,W0212 4 | from __future__ import division, absolute_import, print_function 5 | 6 | __author__ = 'tamasgal' 7 | 8 | import operator 9 | from functools import reduce 10 | 11 | from km3pipe.testing import TestCase, StringIO 12 | from km3pipe.pumps import EvtPump 13 | from km3pipe.pumps.evt import Track, TrackIn, Neutrino, Hit, RawHit, TrackFit 14 | 15 | 16 | class TestEvtParser(TestCase): 17 | 18 | def setUp(self): 19 | self.valid_evt_header = "\n".join(( 20 | "start_run: 1", 21 | "cut_nu: 0.100E+03 0.100E+09-0.100E+01 0.100E+01", 22 | "spectrum: -1.40", 23 | "end_event:", 24 | "start_event: 12 1", 25 | "track_in: 1 -389.951 213.427 516 -0.204562 -0.60399 -0.770293 9.092 0 5 40.998", 26 | "hit: 1 44675 1 1170.59 5 2 1 1170.59", 27 | "end_event:", 28 | "start_event: 13 1", 29 | "track_in: 1 272.695 -105.613 516 -0.425451 -0.370522 -0.825654 2431.47 0 5 -1380", 30 | "track_in: 2 272.348 -106.292 516 -0.425451 -0.370522 -0.825654 24670.7 1.33 5 -1484", 31 | "track_in: 3 279.47 -134.999 516 -0.425451 -0.370522 -0.825654 164.586 26.7 5 601.939", 32 | "hit: 1 20140 1 1140.06 5 1 1 1140.06", 33 | "hit: 2 20159 1 1177.14 5 1 1 1177.14", 34 | "hit: 3 20164 1 1178.19 5 1 1 1178.19", 35 | "hit: 4 20170 1 1177.23 5 1 1 1177.23", 36 | "hit: 5 20171 2 1177.25 5 1 2 1177.25", 37 | "end_event:", 38 | "start_event: 14 1", 39 | "track_in: 1 40.256 -639.888 516 0.185998 0.476123 -0.859483 10016.1 0 5 -1668", 40 | "hit: 1 33788 1 2202.81 5 1 1 2202.81", 41 | "hit: 2 33801 1 2248.95 5 1 1 2248.95", 42 | "hit: 3 33814 1 2249.2 5 1 1 2249.2", 43 | "end_event:" 44 | )) 45 | self.no_evt_header = "\n".join(( 46 | "start_event: 12 1", 47 | "track_in: 1 -389.951 213.427 516 -0.204562 -0.60399 -0.770293 9.092 0 5 40.998", 48 | "hit: 1 44675 1 1170.59 5 2 1 1170.59", 49 | "end_event:", 50 | "start_event: 13 1", 51 | "track_in: 1 272.695 -105.613 516 -0.425451 -0.370522 -0.825654 2431.47 0 5 -1380", 52 | "track_in: 2 272.348 -106.292 516 -0.425451 -0.370522 -0.825654 24670.7 1.33 5 -1484", 53 | "track_in: 3 279.47 -134.999 516 -0.425451 -0.370522 -0.825654 164.586 26.7 5 601.939", 54 | "hit: 1 20140 1 1140.06 5 1 1 1140.06", 55 | "hit: 2 20159 1 1177.14 5 1 1 1177.14", 56 | "hit: 3 20164 1 1178.19 5 1 1 1178.19", 57 | "hit: 4 20170 1 1177.23 5 1 1 1177.23", 58 | "hit: 5 20171 2 1177.25 5 1 2 1177.25", 59 | "end_event:", 60 | "start_event: 14 1", 61 | "track_in: 1 40.256 -639.888 516 0.185998 0.476123 -0.859483 10016.1 0 5 -1668", 62 | "hit: 1 33788 1 2202.81 5 1 1 2202.81", 63 | "hit: 2 33801 1 2248.95 5 1 1 2248.95", 64 | "hit: 3 33814 1 2249.2 5 1 1 2249.2", 65 | "end_event:" 66 | )) 67 | self.corrupt_evt_header = "foo" 68 | self.corrupt_line = "\n".join(( 69 | "start_event: 1 1", 70 | "corrupt line", 71 | "end_event:" 72 | )) 73 | 74 | self.pump = EvtPump() 75 | self.pump.blob_file = StringIO(self.valid_evt_header) 76 | 77 | def tearDown(self): 78 | self.pump.blob_file.close() 79 | 80 | def test_parse_header(self): 81 | raw_header = self.pump.extract_header() 82 | self.assertEqual(['1'], raw_header['start_run']) 83 | self.assertAlmostEqual(-1.4, float(raw_header['spectrum'][0])) 84 | self.assertAlmostEqual(1, float(raw_header['cut_nu'][2])) 85 | 86 | # def test_incomplete_header_raises_value_error(self): 87 | # temp_file = StringIO(self.corrupt_evt_header) 88 | # pump = EvtPump() 89 | # pump.blob_file = temp_file 90 | # with self.assertRaises(ValueError): 91 | # pump.extract_header() 92 | # temp_file.close() 93 | 94 | def test_record_offset_saves_correct_offset(self): 95 | self.pump.blob_file = StringIO('a'*42) 96 | offsets = [1, 4, 9, 12, 23] 97 | for offset in offsets: 98 | self.pump.blob_file.seek(0, 0) 99 | self.pump.blob_file.seek(offset, 0) 100 | self.pump._record_offset() 101 | self.assertListEqual(offsets, self.pump.event_offsets) 102 | 103 | def test_event_offset_is_at_first_event_after_parsing_header(self): 104 | self.pump.extract_header() 105 | self.assertEqual(88, self.pump.event_offsets[0]) 106 | 107 | def test_rebuild_offsets(self): 108 | self.pump.extract_header() 109 | self.pump._cache_offsets() 110 | self.assertListEqual([88, 233, 700], self.pump.event_offsets) 111 | 112 | def test_rebuild_offsets_without_header(self): 113 | self.pump.blob_file = StringIO(self.no_evt_header) 114 | self.pump.extract_header() 115 | self.pump._cache_offsets() 116 | self.assertListEqual([0, 145, 612], self.pump.event_offsets) 117 | 118 | def test_cache_enabled_triggers_rebuild_offsets(self): 119 | self.pump.cache_enabled = True 120 | self.pump.prepare_blobs() 121 | self.assertEqual(3, len(self.pump.event_offsets)) 122 | 123 | def test_cache_disabled_doesnt_trigger_cache_offsets(self): 124 | self.pump.cache_enabled = False 125 | self.pump.prepare_blobs() 126 | self.assertEqual(1, len(self.pump.event_offsets)) 127 | 128 | def test_get_blob_triggers_cache_offsets_if_cache_disabled_and_asking_for_not_indexed_event(self): 129 | self.pump.cache_enabled = False 130 | self.pump.prepare_blobs() 131 | self.assertEqual(1, len(self.pump.event_offsets)) 132 | blob = self.pump.get_blob(2) 133 | self.assertListEqual(['14', '1'], blob['start_event']) 134 | self.assertEqual(3, len(self.pump.event_offsets)) 135 | 136 | def test_get_blob_raises_index_error_for_wrong_index(self): 137 | self.pump.prepare_blobs() 138 | with self.assertRaises(IndexError): 139 | self.pump.get_blob(23) 140 | 141 | def test_get_blob_returns_correct_event_information(self): 142 | self.pump.prepare_blobs() 143 | blob = self.pump.get_blob(0) 144 | self.assertTrue('raw_header' in blob) 145 | self.assertEqual(['1'], blob['raw_header']['start_run']) 146 | self.assertListEqual(['12', '1'], blob['start_event']) 147 | self.assertListEqual([[1.0, 44675.0, 1.0, 1170.59, 5.0, 2.0, 1.0, 1170.59]], 148 | blob['hit']) 149 | 150 | def test_get_blob_returns_correct_events(self): 151 | self.pump.prepare_blobs() 152 | blob = self.pump.get_blob(0) 153 | self.assertListEqual(['12', '1'], blob['start_event']) 154 | blob = self.pump.get_blob(2) 155 | self.assertListEqual(['14', '1'], blob['start_event']) 156 | blob = self.pump.get_blob(1) 157 | self.assertListEqual(['13', '1'], blob['start_event']) 158 | 159 | def test_process_returns_correct_blobs(self): 160 | self.pump.prepare_blobs() 161 | blob = self.pump.process() 162 | self.assertListEqual(['12', '1'], blob['start_event']) 163 | blob = self.pump.process() 164 | self.assertListEqual(['13', '1'], blob['start_event']) 165 | blob = self.pump.process() 166 | self.assertListEqual(['14', '1'], blob['start_event']) 167 | 168 | def test_process_raises_stop_iteration_if_eof_reached(self): 169 | self.pump.prepare_blobs() 170 | self.pump.process() 171 | self.pump.process() 172 | self.pump.process() 173 | with self.assertRaises(StopIteration): 174 | self.pump.process() 175 | 176 | def test_pump_acts_as_iterator(self): 177 | self.pump.prepare_blobs() 178 | event_numbers = [] 179 | for blob in self.pump: 180 | event_numbers.append(blob['start_event'][0]) 181 | self.assertListEqual(['12', '13', '14'], event_numbers) 182 | 183 | 184 | def test_pump_has_len(self): 185 | self.pump.prepare_blobs() 186 | self.assertEqual(3, len(self.pump)) 187 | 188 | def test_pump_get_item_returns_first_for_index_zero(self): 189 | self.pump.prepare_blobs() 190 | first_blob = self.pump[0] 191 | self.assertEqual('12', first_blob['start_event'][0]) 192 | 193 | def test_pump_get_item_returns_correct_blob_for_index(self): 194 | self.pump.prepare_blobs() 195 | blob = self.pump[1] 196 | self.assertEqual('13', blob['start_event'][0]) 197 | 198 | def test_pump_slice_generator(self): 199 | self.pump.prepare_blobs() 200 | blobs = self.pump[:] 201 | blobs = list(self.pump[1:3]) 202 | self.assertEqual(2, len(blobs)) 203 | self.assertEqual(['13', '1'], blobs[0]['start_event']) 204 | 205 | def test_create_blob_entry_for_line_ignores_corrupt_line(self): 206 | self.pump.blob_file = StringIO(self.corrupt_line) 207 | self.pump.extract_header() 208 | self.pump.prepare_blobs() 209 | self.pump.get_blob(0) 210 | 211 | 212 | class TestTrack(TestCase): 213 | def setUp(self): 214 | self.track = Track((1, 2, 3, 4, 0, 0, 1, 8, 9, 'a', 'b', 'c'), 215 | zed_correction=0) 216 | 217 | def test_track_init(self): 218 | track = self.track 219 | self.assertEqual(1, track.id) 220 | self.assertListEqual([2, 3, 4], list(track.pos)) 221 | self.assertListEqual([0, 0, 1], list(track.dir)) 222 | self.assertEqual(8, track.E) 223 | self.assertEqual(9, track.time) 224 | self.assertTupleEqual(('a', 'b', 'c'), track.args) 225 | 226 | def test_track_repr(self): 227 | repr_str = ("Track:\n id: 1\n pos: [2 3 4]\n dir: (0.0, 0.0, 1.0)\n" 228 | " energy: 8 GeV\n time: 9 ns\n") 229 | self.assertEqual(repr_str, self.track.__repr__()) 230 | 231 | 232 | class TestTrackIn(TestCase): 233 | 234 | def setUp(self): 235 | self.track_in = TrackIn((1, 2, 3, 4, 0, 0, 1, 8, 9, 10, 11), 236 | zed_correction=0) 237 | 238 | def test_trackin_init(self): 239 | track_in = self.track_in 240 | self.assertEqual(1, track_in.id) 241 | self.assertListEqual([2, 3, 4], list(track_in.pos)) 242 | self.assertListEqual([0, 0, 1], list(track_in.dir)) 243 | self.assertEqual(8, track_in.E) 244 | self.assertEqual(9, track_in.time) 245 | self.assertEqual(130, track_in.particle_type) # this should be PDG! 246 | self.assertEqual(11, track_in.length) 247 | 248 | def test_track_repr(self): 249 | repr_str = ("Track:\n id: 1\n pos: [2 3 4]\n dir: (0.0, 0.0, 1.0)\n" 250 | " energy: 8 GeV\n time: 9 ns\n type: 130 'K0L' [PDG]\n" 251 | " length: 11 [m]\n") 252 | self.assertEqual(repr_str, self.track_in.__repr__()) 253 | 254 | 255 | class TestTrackFit(TestCase): 256 | 257 | def setUp(self): 258 | data = (1, 2, 3, 4, 0, 0, 1, 8, 9, 10, 11, 12, 13, 14) 259 | self.track_fit = TrackFit(data, zed_correction=0) 260 | 261 | def test_trackfit_init(self): 262 | track_fit = self.track_fit 263 | self.assertEqual(1, track_fit.id) 264 | self.assertListEqual([2, 3, 4], list(track_fit.pos)) 265 | self.assertListEqual([0, 0, 1], list(track_fit.dir)) 266 | self.assertEqual(8, track_fit.E) 267 | self.assertEqual(9, track_fit.time) 268 | self.assertEqual(10, track_fit.speed) 269 | self.assertEqual(11, track_fit.ts) 270 | self.assertEqual(12, track_fit.te) 271 | self.assertEqual(13, track_fit.con1) 272 | self.assertEqual(14, track_fit.con2) 273 | 274 | def test_trackfit_repr(self): 275 | repr_str = ("Track:\n id: 1\n pos: [2 3 4]\n dir: (0.0, 0.0, 1.0)\n " 276 | "energy: 8 GeV\n time: 9 ns\n speed: 10 [m/ns]\n" 277 | " ts: 11 [ns]\n te: 12 [ns]\n con1: 13\n con2: 14\n") 278 | self.assertEqual(repr_str, self.track_fit.__repr__()) 279 | 280 | 281 | class TestNeutrino(TestCase): 282 | 283 | def setUp(self): 284 | data = (1, 2, 3, 4, 0, 0, 1, 8, 9, 10, 11, 12, 13, 14) 285 | self.neutrino = Neutrino(data, zed_correction=0) 286 | 287 | def test_neutrino_init(self): 288 | neutrino = self.neutrino 289 | self.assertEqual(1, neutrino.id) 290 | self.assertListEqual([2, 3, 4], list(neutrino.pos)) 291 | self.assertListEqual([0, 0, 1], list(neutrino.dir)) 292 | self.assertEqual(8, neutrino.E) 293 | self.assertEqual(9, neutrino.time) 294 | self.assertEqual(10, neutrino.Bx) 295 | self.assertEqual(11, neutrino.By) 296 | self.assertEqual(12, neutrino.ichan) 297 | self.assertEqual(13, neutrino.particle_type) 298 | self.assertEqual(14, neutrino.channel) 299 | 300 | def test_neutrino_str(self): 301 | neutrino = self.neutrino 302 | repr_str = "Neutrino: mu-, 8.0 GeV, NC" 303 | self.assertEqual(repr_str, str(neutrino)) 304 | neutrino.E = 2000 305 | repr_str = "Neutrino: mu-, 2.0 TeV, NC" 306 | self.assertEqual(repr_str, str(neutrino)) 307 | neutrino.E = 3000000 308 | repr_str = "Neutrino: mu-, 3.0 PeV, NC" 309 | self.assertEqual(repr_str, str(neutrino)) 310 | 311 | class TestHit(TestCase): 312 | 313 | def test_hit_init(self): 314 | hit = Hit(1, 2, 3, 4, 5, 6, 7, 8) 315 | self.assertEqual(1, hit.id) 316 | self.assertEqual(2, hit.pmt_id) 317 | self.assertEqual(3, hit.pe) 318 | self.assertEqual(4, hit.time) 319 | self.assertEqual(5, hit.type) 320 | self.assertEqual(6, hit.n_photons) 321 | self.assertEqual(7, hit.track_in) 322 | self.assertEqual(8, hit.c_time) 323 | 324 | def test_hit_default_values(self): 325 | hit = Hit() 326 | self.assertIsNone(hit.id) 327 | self.assertIsNone(hit.pmt_id) 328 | self.assertIsNone(hit.time) 329 | 330 | def test_hit_default_values_are_set_if_others_are_given(self): 331 | hit = Hit(track_in=1) 332 | self.assertIsNone(hit.id) 333 | self.assertIsNone(hit.time) 334 | 335 | def test_hit_attributes_are_immutable(self): 336 | hit = Hit(1, True) 337 | with self.assertRaises(AttributeError): 338 | hit.time = 10 339 | 340 | 341 | 342 | 343 | class TestRawHit(TestCase): 344 | 345 | def test_rawhit_init(self): 346 | raw_hit = RawHit(1, 2, 3, 4) 347 | self.assertEqual(1, raw_hit.id) 348 | self.assertEqual(2, raw_hit.pmt_id) 349 | self.assertEqual(3, raw_hit.tot) 350 | self.assertEqual(4, raw_hit.time) 351 | 352 | def test_hit_default_values(self): 353 | raw_hit = RawHit() 354 | self.assertIsNone(raw_hit.id) 355 | self.assertIsNone(raw_hit.pmt_id) 356 | self.assertIsNone(raw_hit.time) 357 | 358 | def test_hit_default_values_are_set_if_others_are_given(self): 359 | raw_hit = RawHit(pmt_id=1) 360 | self.assertIsNone(raw_hit.id) 361 | self.assertIsNone(raw_hit.time) 362 | 363 | def test_hit_attributes_are_immutable(self): 364 | raw_hit = RawHit(1, True) 365 | with self.assertRaises(AttributeError): 366 | raw_hit.time = 10 367 | 368 | def test_hit_addition_remains_time_id_and_pmt_id_and_adds_tot(self): 369 | hit1 = RawHit(id=1, time=1, pmt_id=1, tot=10) 370 | hit2 = RawHit(id=2, time=2, pmt_id=2, tot=20) 371 | merged_hit = hit1 + hit2 372 | self.assertEqual(1, merged_hit.id) 373 | self.assertEqual(1, merged_hit.time) 374 | self.assertEqual(1, merged_hit.pmt_id) 375 | self.assertEqual(30, merged_hit.tot) 376 | 377 | def test_hit_addition_picks_correct_time_if_second_hit_is_the_earlier_one(self): 378 | hit1 = RawHit(id=1, time=2, pmt_id=1, tot=10) 379 | hit2 = RawHit(id=2, time=1, pmt_id=2, tot=20) 380 | merged_hit = hit1 + hit2 381 | self.assertEqual(2, merged_hit.id) 382 | self.assertEqual(2, merged_hit.pmt_id) 383 | 384 | def test_hit_additions_works_with_multiple_hits(self): 385 | hit1 = RawHit(id=1, time=2, pmt_id=1, tot=10) 386 | hit2 = RawHit(id=2, time=1, pmt_id=2, tot=20) 387 | hit3 = RawHit(id=3, time=1, pmt_id=3, tot=30) 388 | merged_hit = hit1 + hit2 + hit3 389 | self.assertEqual(2, merged_hit.pmt_id) 390 | self.assertEqual(60, merged_hit.tot) 391 | self.assertEqual(1, merged_hit.time) 392 | self.assertEqual(2, merged_hit.id) 393 | 394 | def test_hit_addition_works_with_sum(self): 395 | hit1 = RawHit(id=1, time=2, pmt_id=1, tot=10) 396 | hit2 = RawHit(id=2, time=1, pmt_id=2, tot=20) 397 | hit3 = RawHit(id=3, time=1, pmt_id=3, tot=30) 398 | hits = [hit1, hit2, hit3] 399 | merged_hit = reduce(operator.add, hits) 400 | self.assertEqual(2, merged_hit.id) 401 | self.assertEqual(1, merged_hit.time) 402 | self.assertEqual(60, merged_hit.tot) 403 | self.assertEqual(2, merged_hit.pmt_id) 404 | 405 | --------------------------------------------------------------------------------