├── ptsa ├── tests │ ├── test_filt.py │ └── test_fixed_scipy.py ├── plotting │ ├── tests │ │ └── test_topoplot.py │ ├── logo.png │ ├── ptsa.pdf │ ├── ptsa.png │ ├── logo_old2.png │ ├── ptsa_blue.pdf │ ├── logo_background.png │ ├── __init__.py │ ├── misc.py │ ├── logo.py │ ├── HCGSN128.dat │ ├── GSN129.dat │ ├── HCGSN128.sfp │ ├── ptsa.eps │ └── ptsa_blue.eps ├── stats │ ├── __init__.py │ ├── stat_helper.py │ └── nonparam.py ├── data │ ├── edf │ │ ├── setup.py │ │ ├── __init__.py │ │ ├── edfwrap.h │ │ ├── edfwrap.c │ │ └── edf.pyx │ ├── __init__.py │ ├── tests │ │ ├── testdata.py │ │ └── test_timeseries.py │ ├── arraywrapper.py │ ├── edfwrapper.py │ ├── datawrapper.py │ ├── align.py │ ├── rawbinarydata.py │ └── bvwrapper.py ├── versionString.py ├── version.py ├── pca.py ├── hilbert.py ├── __init__.py ├── fixed_scipy.py ├── contributed.py └── _arraytools.py ├── README ├── docs ├── _static │ └── logo.png ├── devel │ └── gitwash │ │ ├── pull_button.png │ │ ├── forking_button.png │ │ ├── branch_dropdown.png │ │ ├── links.inc │ │ ├── this_project.inc │ │ ├── git_development.rst │ │ ├── index.rst │ │ ├── git_intro.rst │ │ ├── git_install.rst │ │ ├── following_latest.rst │ │ ├── forking_hell.rst │ │ ├── known_projects.inc │ │ ├── git_resources.rst │ │ ├── set_up_fork.rst │ │ ├── maintainer_workflow.rst │ │ ├── git_links.inc │ │ ├── patching.rst │ │ └── configure_git.rst ├── contents.rst ├── timeseries.rst ├── dimarray.rst ├── _templates │ └── layout.html ├── Makefile ├── sphinxexts │ ├── numpydoc.py │ └── docscrape_sphinx.py ├── index.rst ├── api │ └── epydoc.conf ├── devguide.rst └── conf.py ├── AUTHOR ├── examples ├── example_data │ └── sinus.bdf ├── da_wish.py ├── process_edf.py ├── topoPlotDemo.py ├── dataWaveDemo.m ├── basic_analysis.py ├── dataWaveDemo.py └── testLocs.dat ├── MANIFEST.in ├── .gitignore ├── dimarray └── __init__.py ├── COPYING ├── setup.py ├── todo.txt └── tools └── gitwash_dumper.py /ptsa/tests/test_filt.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /README: -------------------------------------------------------------------------------- 1 | PTSA README 2 | 3 | 4 | -------------------------------------------------------------------------------- /ptsa/plotting/tests/test_topoplot.py: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/_static/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/docs/_static/logo.png -------------------------------------------------------------------------------- /ptsa/plotting/logo.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/ptsa/plotting/logo.png -------------------------------------------------------------------------------- /ptsa/plotting/ptsa.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/ptsa/plotting/ptsa.pdf -------------------------------------------------------------------------------- /ptsa/plotting/ptsa.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/ptsa/plotting/ptsa.png -------------------------------------------------------------------------------- /AUTHOR: -------------------------------------------------------------------------------- 1 | Per B. Sederberg 2 | Christoph T. Weidemann 3 | 4 | -------------------------------------------------------------------------------- /ptsa/plotting/logo_old2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/ptsa/plotting/logo_old2.png -------------------------------------------------------------------------------- /ptsa/plotting/ptsa_blue.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/ptsa/plotting/ptsa_blue.pdf -------------------------------------------------------------------------------- /examples/example_data/sinus.bdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/examples/example_data/sinus.bdf -------------------------------------------------------------------------------- /docs/devel/gitwash/pull_button.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/docs/devel/gitwash/pull_button.png -------------------------------------------------------------------------------- /ptsa/plotting/logo_background.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/ptsa/plotting/logo_background.png -------------------------------------------------------------------------------- /docs/devel/gitwash/forking_button.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/docs/devel/gitwash/forking_button.png -------------------------------------------------------------------------------- /docs/devel/gitwash/branch_dropdown.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/compmem/ptsa/HEAD/docs/devel/gitwash/branch_dropdown.png -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include AUTHOR COPYING MANIFEST.in setup.* 2 | recursive-include docs * 3 | recursive-include examples * 4 | prune docs/_build 5 | prune *~ -------------------------------------------------------------------------------- /docs/devel/gitwash/links.inc: -------------------------------------------------------------------------------- 1 | .. compiling links file 2 | .. include:: known_projects.inc 3 | .. include:: this_project.inc 4 | .. include:: git_links.inc 5 | -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | *.orig 2 | *~ 3 | *.py[co] 4 | ptsa/griddata-0.1 5 | \#* 6 | \.\#* 7 | build 8 | _build 9 | *.so 10 | build-arch* 11 | dist 12 | MANIFEST 13 | 14 | -------------------------------------------------------------------------------- /docs/devel/gitwash/this_project.inc: -------------------------------------------------------------------------------- 1 | .. ptsa 2 | .. _`ptsa`: http://ptsa.sourceforge.net 3 | .. _`ptsa github`: http://github.com/compmem/ptsa 4 | 5 | .. _`ptsa mailing list`: https://github.com/compmem/ptsa 6 | -------------------------------------------------------------------------------- /docs/devel/gitwash/git_development.rst: -------------------------------------------------------------------------------- 1 | .. _git-development: 2 | 3 | ===================== 4 | Git for development 5 | ===================== 6 | 7 | Contents: 8 | 9 | .. toctree:: 10 | :maxdepth: 2 11 | 12 | forking_hell 13 | set_up_fork 14 | configure_git 15 | development_workflow 16 | maintainer_workflow 17 | -------------------------------------------------------------------------------- /docs/devel/gitwash/index.rst: -------------------------------------------------------------------------------- 1 | .. _using-git: 2 | 3 | Working with *ptsa* source code 4 | ================================================ 5 | 6 | Contents: 7 | 8 | .. toctree:: 9 | :maxdepth: 2 10 | 11 | git_intro 12 | git_install 13 | following_latest 14 | patching 15 | git_development 16 | git_resources 17 | 18 | 19 | -------------------------------------------------------------------------------- /ptsa/stats/__init__.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | 11 | -------------------------------------------------------------------------------- /dimarray/__init__.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | from dimarray import DimArray,Dim 11 | from attrarray import AttrArray 12 | -------------------------------------------------------------------------------- /ptsa/plotting/__init__.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | from topo import topoplot 11 | from misc import errorfill 12 | 13 | -------------------------------------------------------------------------------- /ptsa/data/edf/setup.py: -------------------------------------------------------------------------------- 1 | from distutils.core import setup 2 | from distutils.extension import Extension 3 | from Cython.Distutils import build_ext 4 | 5 | sourcefiles = ["edf.pyx", "edfwrap.c", "edflib.c"] 6 | setup( 7 | cmdclass = {'build_ext': build_ext}, 8 | ext_modules = [Extension("edf", 9 | sources = sourcefiles, 10 | define_macros = [('_LARGEFILE64_SOURCE', None), 11 | ('_LARGEFILE_SOURCE', None)]), 12 | ] 13 | ) 14 | -------------------------------------------------------------------------------- /ptsa/versionString.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | """ PTSA Version """ 11 | 12 | # !!!!!!!!!!!! 13 | # Be sure to update the version before a release 14 | vstr = '0.0.1' 15 | 16 | -------------------------------------------------------------------------------- /ptsa/data/edf/__init__.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | from edf import read_samples, read_number_of_samples 11 | from edf import read_samplerate, read_annotations, read_number_of_signals 12 | 13 | -------------------------------------------------------------------------------- /docs/contents.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst -*- 2 | .. ex: set sts=4 ts=4 sw=4 et tw=79: 3 | 4 | .. _contents: 5 | 6 | .. ***************************** 7 | .. PTSA Documentation Contents 8 | .. ***************************** 9 | 10 | .. .. toctree:: 11 | 12 | .. intro 13 | .. installation 14 | .. overview 15 | .. datasets 16 | .. classifiers 17 | .. measures 18 | .. featsel 19 | .. misc 20 | .. examples 21 | .. matlab 22 | .. faq 23 | .. glossary 24 | .. references 25 | .. legal 26 | .. changelog 27 | .. history 28 | .. todo 29 | .. modref 30 | -------------------------------------------------------------------------------- /ptsa/data/edf/edfwrap.h: -------------------------------------------------------------------------------- 1 | 2 | #ifndef __EDFWRAP_H__ 3 | #define __EDFWRAP_H__ 4 | 5 | #include "edflib.h" 6 | 7 | int open_file_readonly(const char *filepath, 8 | struct edf_hdr_struct *hdr, 9 | int read_annot); 10 | 11 | long long get_samples_in_file(struct edf_hdr_struct *hdr, 12 | int edfsignal); 13 | 14 | double get_samplerate(struct edf_hdr_struct *hdr, 15 | int edfsignal); 16 | 17 | int read_samples_from_file(struct edf_hdr_struct *hdr, 18 | int edfsignal, 19 | long long offset, 20 | int n, 21 | double *buf); 22 | 23 | #endif 24 | -------------------------------------------------------------------------------- /docs/devel/gitwash/git_intro.rst: -------------------------------------------------------------------------------- 1 | ============== 2 | Introduction 3 | ============== 4 | 5 | These pages describe a git_ and github_ workflow for the `ptsa`_ 6 | project. 7 | 8 | There are several different workflows here, for different ways of 9 | working with *ptsa*. 10 | 11 | This is not a comprehensive git reference, it's just a workflow for our 12 | own project. It's tailored to the github hosting service. You may well 13 | find better or quicker ways of getting stuff done with git, but these 14 | should get you started. 15 | 16 | For general resources for learning git, see :ref:`git-resources`. 17 | 18 | .. include:: links.inc 19 | -------------------------------------------------------------------------------- /ptsa/data/__init__.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | from dimarray import Dim,DimArray,AttrArray 11 | from timeseries import TimeSeries 12 | 13 | from basewrapper import BaseWrapper 14 | from arraywrapper import ArrayWrapper 15 | #from edfwrapper import EdfWrapper 16 | 17 | from events import Events 18 | 19 | 20 | -------------------------------------------------------------------------------- /examples/da_wish.py: -------------------------------------------------------------------------------- 1 | # 2 | # Pythonic fancy indexing of DimArrays! 3 | # 4 | 5 | import numpy as np 6 | from dimarray import Dim,DimArray,AttrArray 7 | 8 | if __name__ == "__main__": 9 | 10 | dims = [Dim(data=np.arange(20), name='time'), 11 | Dim(data=np.arange(10), name='freqs'), 12 | Dim(data=np.arange(30), name='events')] 13 | 14 | dat = DimArray(data=np.random.rand(20,10,30), dims=dims) 15 | 16 | # select some data 17 | ind = ((dat['time'] > 10) & 18 | ((dat['events']<10) | (dat['events']>20)) & 19 | (dat['freqs'].is_in(range(0,10,2)))) 20 | 21 | subdat = dat[ind] 22 | 23 | print dat.shape 24 | print subdat.shape 25 | -------------------------------------------------------------------------------- /docs/devel/gitwash/git_install.rst: -------------------------------------------------------------------------------- 1 | .. _install-git: 2 | 3 | ============= 4 | Install git 5 | ============= 6 | 7 | Overview 8 | ======== 9 | 10 | ================ ============= 11 | Debian / Ubuntu ``sudo apt-get install git-core`` 12 | Fedora ``sudo yum install git-core`` 13 | Windows Download and install msysGit_ 14 | OS X Use the git-osx-installer_ 15 | ================ ============= 16 | 17 | In detail 18 | ========= 19 | 20 | See the git page for the most recent information. 21 | 22 | Have a look at the github install help pages available from `github help`_ 23 | 24 | There are good instructions here: http://book.git-scm.com/2_installing_git.html 25 | 26 | .. include:: links.inc 27 | -------------------------------------------------------------------------------- /docs/timeseries.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst -*- 2 | .. ex: set sts=4 ts=4 sw=4 et tw=79: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### 4 | # 5 | # See COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### 9 | 10 | .. _timeseries: 11 | 12 | .. index:: AttrArray, Attribute Array, Dim, Dimension, DimArray, Dimensioned Array, TimeSeries, time series 13 | 14 | **************************** 15 | Analysis of time series data 16 | **************************** 17 | 18 | .. index:: TimeSeries 19 | 20 | 21 | Data structure for time series (TimeSeries) 22 | =========================================== 23 | 24 | .. autoclass:: ptsa.data.TimeSeries 25 | :members: 26 | 27 | 28 | -------------------------------------------------------------------------------- /ptsa/plotting/misc.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | 11 | import numpy as np 12 | import pylab as pl 13 | 14 | def errorfill(xvals,yvals,errvals,**kwargs): 15 | """ 16 | Plot an errorbar as a filled polygon that can be transparent. 17 | 18 | See the pylab.fill method for kwargs. 19 | """ 20 | # set the xrange 21 | x_range = np.concatenate((xvals,np.flipud(xvals))) 22 | y_range = np.concatenate((yvals+errvals,np.flipud(yvals-errvals))) 23 | 24 | # do the fill 25 | return pl.fill(x_range,y_range,**kwargs) 26 | -------------------------------------------------------------------------------- /docs/devel/gitwash/following_latest.rst: -------------------------------------------------------------------------------- 1 | .. _following-latest: 2 | 3 | ============================= 4 | Following the latest source 5 | ============================= 6 | 7 | These are the instructions if you just want to follow the latest 8 | *ptsa* source, but you don't need to do any development for now. 9 | 10 | The steps are: 11 | 12 | * :ref:`install-git` 13 | * get local copy of the `ptsa github`_ git repository 14 | * update local copy from time to time 15 | 16 | Get the local copy of the code 17 | ============================== 18 | 19 | From the command line:: 20 | 21 | git clone git://github.com/compmem/ptsa.git 22 | 23 | You now have a copy of the code tree in the new ``ptsa`` directory. 24 | 25 | Updating the code 26 | ================= 27 | 28 | From time to time you may want to pull down the latest code. Do this with:: 29 | 30 | cd ptsa 31 | git pull 32 | 33 | The tree in ``ptsa`` will now have the latest changes from the initial 34 | repository. 35 | 36 | .. include:: links.inc 37 | -------------------------------------------------------------------------------- /COPYING: -------------------------------------------------------------------------------- 1 | PTSA - The Python Time Series Analysis Toolbox 2 | 3 | Copyright (C) 2007-2014 4 | Per B. Sederberg and 5 | Christoph T. Weidemann 6 | 7 | Authors: Per B. Sederberg and Christoph T. Weidemann 8 | URL: https://github.com/compmem/ptsa 9 | 10 | This program is free software: you can redistribute it and/or modify 11 | it under the terms of the GNU General Public License as published by 12 | the Free Software Foundation, either version 3 of the License, or 13 | (at your option) any later version. 14 | 15 | This program is distributed in the hope that it will be useful, 16 | but WITHOUT ANY WARRANTY; without even the implied warranty of 17 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the 18 | GNU General Public License for more details. 19 | 20 | You should have received a copy of the GNU General Public License 21 | along with this program. It can be found in the file license.txt which 22 | is part of the ptsa package and online at 23 | . -------------------------------------------------------------------------------- /docs/dimarray.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst -*- 2 | .. ex: set sts=4 ts=4 sw=4 et tw=79: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### 4 | # 5 | # See COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### 9 | 10 | .. _dimarray: 11 | 12 | .. index:: AttrArray, Attribute Array, Dim, Dimension, DimArray, Dimensioned Array 13 | 14 | **************************** 15 | Main data structures in PTSA 16 | **************************** 17 | 18 | .. index:: AttrArray, Attribute Array, NumPy, ndarray 19 | 20 | .. _AttrArray: 21 | 22 | Attribute Array (AttrArray) 23 | =========================== 24 | 25 | .. autoclass:: dimarray.AttrArray 26 | :members: 27 | 28 | 29 | Dimension (Dim) 30 | =============== 31 | 32 | .. autoclass:: dimarray.Dim 33 | :members: 34 | 35 | 36 | Dimensioned Array (DimArray) 37 | ============================ 38 | 39 | .. autoclass:: dimarray.DimArray 40 | :members: 41 | -------------------------------------------------------------------------------- /docs/devel/gitwash/forking_hell.rst: -------------------------------------------------------------------------------- 1 | .. _forking: 2 | 3 | ====================================================== 4 | Making your own copy (fork) of ptsa 5 | ====================================================== 6 | 7 | You need to do this only once. The instructions here are very similar 8 | to the instructions at http://help.github.com/forking/ |emdash| please see 9 | that page for more detail. We're repeating some of it here just to give the 10 | specifics for the `ptsa`_ project, and to suggest some default names. 11 | 12 | Set up and configure a github account 13 | ===================================== 14 | 15 | If you don't have a github account, go to the github page, and make one. 16 | 17 | You then need to configure your account to allow write access |emdash| see 18 | the ``Generating SSH keys`` help on `github help`_. 19 | 20 | Create your own forked copy of `ptsa`_ 21 | ====================================================== 22 | 23 | #. Log into your github account. 24 | #. Go to the `ptsa`_ github home at `ptsa github`_. 25 | #. Click on the *fork* button: 26 | 27 | .. image:: forking_button.png 28 | 29 | Now, after a short pause and some 'Hardcore forking action', you 30 | should find yourself at the home page for your own forked copy of `ptsa`_. 31 | 32 | .. include:: links.inc 33 | 34 | -------------------------------------------------------------------------------- /ptsa/version.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | """ 11 | Version management module. 12 | """ 13 | 14 | from distutils.version import StrictVersion 15 | 16 | ## !!!!! 17 | import versionString 18 | #vstr = open('versionString.txt').readline().strip() 19 | ## !!!!! 20 | 21 | ptsaVersion = StrictVersion(versionString.vstr) 22 | 23 | def versionAtLeast(someString): 24 | """ 25 | Check that the current ptsa Version >= argument string's version. 26 | """ 27 | if ptsaVersion >= StrictVersion(someString): 28 | # Is above specified version 29 | return True 30 | else: 31 | return False 32 | 33 | def versionWithin(str1, str2): 34 | """ 35 | Check that the current ptsa version is in the version-range described 36 | by the 2 argument strings. 37 | """ 38 | if not (ptsaVersion >= StrictVersion(str1) and ptsaVersion <= StrictVersion(str2)): 39 | # not within range 40 | return False 41 | else: 42 | # within range 43 | return True 44 | -------------------------------------------------------------------------------- /docs/_templates/layout.html: -------------------------------------------------------------------------------- 1 | {% extends "!layout.html" %} 2 | 3 | {% block extrahead %} 4 | 5 | 6 | {% endblock %} 7 | 8 | {% block rootrellink %} 9 |
  • ptsa Home
  • 10 | 11 | {% endblock %} 12 | 13 | {% block relbar1 %} 14 | 21 | {{ super() }} 22 | {% endblock %} 23 | 24 | 25 | {% block sidebar1 %}{{ sidebar() }}{% endblock %} 26 | {% block sidebar2 %}{% endblock %} 27 | 28 | {% block footer %} 29 | {{ super() }} 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | {% endblock %} 39 | 40 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | 2 | try: 3 | import numpy 4 | except ImportError: 5 | print 'Numpy is required to build PTSA. Please install Numpy before proceeding' 6 | import sys 7 | sys.exit(1) 8 | 9 | from distutils.core import setup, Extension 10 | from distutils.sysconfig import get_config_var 11 | from distutils.extension import Extension 12 | 13 | import os 14 | import sys 15 | 16 | # get the version loaded as vstr 17 | execfile('ptsa/versionString.py') 18 | 19 | # set up extensions 20 | ext_modules = [] 21 | edf_ext = Extension("ptsa.data.edf.edf", 22 | sources = ["ptsa/data/edf/edf.c", 23 | "ptsa/data/edf/edfwrap.c", 24 | "ptsa/data/edf/edflib.c"], 25 | include_dirs=[numpy.get_include()], 26 | define_macros = [('_LARGEFILE64_SOURCE', None), 27 | ('_LARGEFILE_SOURCE', None)]) 28 | ext_modules.append(edf_ext) 29 | 30 | # define the setup 31 | setup(name='ptsa', 32 | version=vstr, 33 | maintainer=['Per B. Sederberg'], 34 | maintainer_email=['psederberg@gmail.com'], 35 | url=['http://ptsa.sourceforge.net'], 36 | packages=['ptsa','ptsa.tests','ptsa.data','ptsa.data.tests', 37 | 'ptsa.data.edf','ptsa.plotting','ptsa.plotting.tests', 38 | 'ptsa.stats', 39 | 'dimarray','dimarray.tests'], 40 | ext_modules = ext_modules 41 | ) 42 | 43 | -------------------------------------------------------------------------------- /ptsa/pca.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | # global imports 11 | import numpy as np 12 | 13 | def pca(X, ncomps=None, eigratio=1e6): 14 | """ 15 | Principal components analysis 16 | 17 | % [W,Y] = pca(X,NBC,EIGRATIO) returns the PCA matrix W and the principal 18 | % components Y corresponding to the data matrix X (realizations 19 | % columnwise). The number of components is NBC components unless the 20 | % ratio between the maximum and minimum covariance eigenvalue is below 21 | % EIGRATIO. In such a case, the function will return as few components as 22 | % are necessary to guarantee that such ratio is greater than EIGRATIO. 23 | 24 | """ 25 | 26 | if ncomps is None: 27 | ncomps = X.shape[0] 28 | 29 | C = np.cov(X) 30 | D,V = np.linalg.eigh(C) 31 | val = np.abs(D) 32 | I = np.argsort(val)[::-1] 33 | val = val[I] 34 | 35 | while (val[0]/val[ncomps-1])>eigratio: 36 | ncomps -= 1 37 | 38 | V = V[:,I[:ncomps]] 39 | D = np.diag(D[I[:ncomps]]**(-.5)) 40 | W = np.dot(D,V.T) 41 | Y = np.dot(W,X) 42 | 43 | return W,Y 44 | 45 | 46 | -------------------------------------------------------------------------------- /docs/devel/gitwash/known_projects.inc: -------------------------------------------------------------------------------- 1 | .. Known projects 2 | 3 | .. PROJECTNAME placeholders 4 | .. _PROJECTNAME: http://neuroimaging.scipy.org 5 | .. _`PROJECTNAME github`: http://github.com/nipy 6 | .. _`PROJECTNAME mailing list`: http://projects.scipy.org/mailman/listinfo/nipy-devel 7 | 8 | .. numpy 9 | .. _numpy: hhttp://numpy.scipy.org 10 | .. _`numpy github`: http://github.com/numpy/numpy 11 | .. _`numpy mailing list`: http://mail.scipy.org/mailman/listinfo/numpy-discussion 12 | 13 | .. scipy 14 | .. _scipy: http://www.scipy.org 15 | .. _`scipy github`: http://github.com/scipy/scipy 16 | .. _`scipy mailing list`: http://mail.scipy.org/mailman/listinfo/scipy-dev 17 | 18 | .. nipy 19 | .. _nipy: http://nipy.org/nipy 20 | .. _`nipy github`: http://github.com/nipy/nipy 21 | .. _`nipy mailing list`: http://mail.scipy.org/mailman/listinfo/nipy-devel 22 | 23 | .. ipython 24 | .. _ipython: http://ipython.scipy.org 25 | .. _`ipython github`: http://github.com/ipython/ipython 26 | .. _`ipython mailing list`: http://mail.scipy.org/mailman/listinfo/IPython-dev 27 | 28 | .. dipy 29 | .. _dipy: http://nipy.org/dipy 30 | .. _`dipy github`: http://github.com/Garyfallidis/dipy 31 | .. _`dipy mailing list`: http://mail.scipy.org/mailman/listinfo/nipy-devel 32 | 33 | .. nibabel 34 | .. _nibabel: http://nipy.org/nibabel 35 | .. _`nibabel github`: http://github.com/nipy/nibabel 36 | .. _`nibabel mailing list`: http://mail.scipy.org/mailman/listinfo/nipy-devel 37 | 38 | .. marsbar 39 | .. _marsbar: http://marsbar.sourceforge.net 40 | .. _`marsbar github`: http://github.com/matthew-brett/marsbar 41 | .. _`MarsBaR mailing list`: https://lists.sourceforge.net/lists/listinfo/marsbar-users 42 | -------------------------------------------------------------------------------- /docs/devel/gitwash/git_resources.rst: -------------------------------------------------------------------------------- 1 | .. _git-resources: 2 | 3 | ============= 4 | git resources 5 | ============= 6 | 7 | Tutorials and summaries 8 | ======================= 9 | 10 | * `github help`_ has an excellent series of how-to guides. 11 | * `learn.github`_ has an excellent series of tutorials 12 | * The `pro git book`_ is a good in-depth book on git. 13 | * A `git cheat sheet`_ is a page giving summaries of common commands. 14 | * The `git user manual`_ 15 | * The `git tutorial`_ 16 | * The `git community book`_ 17 | * `git ready`_ |emdash| a nice series of tutorials 18 | * `git casts`_ |emdash| video snippets giving git how-tos. 19 | * `git magic`_ |emdash| extended introduction with intermediate detail 20 | * The `git parable`_ is an easy read explaining the concepts behind git. 21 | * `git foundation`_ expands on the `git parable`_. 22 | * Fernando Perez' git page |emdash| `Fernando's git page`_ |emdash| many 23 | links and tips 24 | * A good but technical page on `git concepts`_ 25 | * `git svn crash course`_: git for those of us used to subversion_ 26 | 27 | Advanced git workflow 28 | ===================== 29 | 30 | There are many ways of working with git; here are some posts on the 31 | rules of thumb that other projects have come up with: 32 | 33 | * Linus Torvalds on `git management`_ 34 | * Linus Torvalds on `linux git workflow`_ . Summary; use the git tools 35 | to make the history of your edits as clean as possible; merge from 36 | upstream edits as little as possible in branches where you are doing 37 | active development. 38 | 39 | Manual pages online 40 | =================== 41 | 42 | You can get these on your own machine with (e.g) ``git help push`` or 43 | (same thing) ``git push --help``, but, for convenience, here are the 44 | online manual pages for some common commands: 45 | 46 | * `git add`_ 47 | * `git branch`_ 48 | * `git checkout`_ 49 | * `git clone`_ 50 | * `git commit`_ 51 | * `git config`_ 52 | * `git diff`_ 53 | * `git log`_ 54 | * `git pull`_ 55 | * `git push`_ 56 | * `git remote`_ 57 | * `git status`_ 58 | 59 | .. include:: links.inc 60 | -------------------------------------------------------------------------------- /ptsa/data/tests/testdata.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | 11 | from ptsa.data import Dim 12 | 13 | 14 | import numpy as N 15 | 16 | class TestData(): 17 | def __init__(self): 18 | # create 10 Hz sine waves at 200 and 50 Hz 4000ms long 19 | numSecs = 4. 20 | numPoints = int(numSecs*200.) 21 | Hz = 10 22 | d200_10 = N.sin(N.arange(numPoints,dtype=N.float)*2*N.pi*Hz*numSecs/numPoints) 23 | Hz = 5 24 | d200_5 = N.sin(N.arange(numPoints,dtype=N.float)*2*N.pi*Hz*numSecs/numPoints) 25 | self.dat200 = N.array([d200_10,d200_5]) 26 | # calc the time range in MS 27 | offset = -200 28 | duration = numPoints 29 | samplesize = 1000./200. 30 | sampStart = offset*samplesize 31 | sampEnd = sampStart + (duration-1)*samplesize 32 | timeRange = N.linspace(sampStart,sampEnd,duration) 33 | self.dims200 = [Dim('channel',N.arange(self.dat200.shape[0])), 34 | Dim('time',timeRange,'ms')] 35 | 36 | numSecs = 4. 37 | numPoints = int(numSecs*50.) 38 | Hz = 10 39 | d50_10 = N.sin(N.arange(numPoints,dtype=N.float)*2*N.pi*Hz*numSecs/numPoints) 40 | Hz = 5 41 | d50_5 = N.sin(N.arange(numPoints,dtype=N.float)*2*N.pi*Hz*numSecs/numPoints) 42 | self.dat50 = N.array([d50_10,d50_5]) 43 | # calc the time range in MS 44 | offset = -50 45 | duration = numPoints 46 | samplesize = 1000./50. 47 | sampStart = offset*samplesize 48 | sampEnd = sampStart + (duration-1)*samplesize 49 | timeRange = N.linspace(sampStart,sampEnd,duration) 50 | self.dims50 = [Dim('channel',N.arange(self.dat50.shape[0])), 51 | Dim('time',timeRange,'ms')] 52 | 53 | -------------------------------------------------------------------------------- /examples/process_edf.py: -------------------------------------------------------------------------------- 1 | import os.path 2 | import numpy as np 3 | import pylab as pl 4 | 5 | from ptsa.data import EdfWrapper, Events 6 | from ptsa.wavelet import phase_pow_multi 7 | 8 | # load in example data 9 | if os.path.exists('examples/example_data/sinus.bdf'): 10 | edfw = EdfWrapper('examples/example_data/sinus.bdf') 11 | elif os.path.exists('example_data/sinus.bdf'): 12 | edfw = EdfWrapper('example_data/sinus.bdf') 13 | else: 14 | raise IOError('Example data file sinus.bdf not found! '+ 15 | 'This file must be in example_data folder!') 16 | 17 | for chan_num in [0,1]: 18 | samplerate = edfw.get_samplerate(chan_num) 19 | nsamples = samplerate*100 20 | event_dur = samplerate*1 21 | buf_dur = 1.0 22 | 23 | 24 | # generate fake events (one every .25 second) 25 | eoffset = np.arange(20)*samplerate/4 26 | esrc = [edfw]*len(eoffset) 27 | events = Events(np.rec.fromarrays([esrc,eoffset], 28 | names='esrc,eoffset')) 29 | 30 | # load in data with events (resample at the same time) 31 | # check out the ringing induced in the saw-tooth with the resample! 32 | dat = events.get_data(chan_num, # channel 33 | 1.0, # duration in sec 34 | 0.0, # offset in sec 35 | buf_dur, # buffer in sec 36 | keep_buffer=True, 37 | resampled_rate=500 38 | ) 39 | # calc wavelet power 40 | freqs = np.arange(2,50,2) 41 | datpow = phase_pow_multi(freqs,dat,to_return='power') 42 | 43 | # remove the buffer now that we have filtered and calculated power 44 | dat = dat.remove_buffer(buf_dur) 45 | datpow = datpow.remove_buffer(buf_dur) 46 | 47 | # plot ERP 48 | pl.figure() 49 | pl.clf() 50 | pl.plot(dat['time'],dat.nanmean('events'),'r') 51 | pl.xlabel('Time (s)') 52 | pl.ylabel('Voltage') 53 | 54 | # plot power spectrum 55 | pl.figure() 56 | pl.clf() 57 | pl.plot(datpow['freqs'],datpow.nanmean('events').nanmean('time'),'r') 58 | pl.xlabel('Frequency (Hz)') 59 | pl.ylabel('Power') 60 | 61 | pl.show() 62 | -------------------------------------------------------------------------------- /ptsa/hilbert.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | import sys 11 | import numpy as np 12 | from scipy.signal import hilbert 13 | 14 | from ptsa.data.timeseries import TimeSeries,Dim 15 | from ptsa.helper import next_pow2 16 | 17 | freq_bands = [('delta', [2.0,4.0]), 18 | ('theta', [4.0,8.0]), 19 | ('alpha', [9.0,14.0]), 20 | ('beta', [16.0,26.0]), 21 | ('gamma_1', [28.0,42.0]), 22 | ('gamma_2', [44.0,100.0])] 23 | def hilbert_pow(dat_ts, bands=None, pad_to_pow2=False, verbose=True): 24 | """ 25 | """ 26 | # set default freq bands 27 | if bands is None: 28 | bands = freq_bands 29 | 30 | # proc padding 31 | taxis = dat_ts.get_axis(dat_ts.tdim) 32 | npts_orig = dat_ts.shape[taxis] 33 | if pad_to_pow2: 34 | npts = 2**next_pow2(npts_orig) 35 | else: 36 | npts = npts_orig 37 | 38 | # calc the hilbert power 39 | if verbose: 40 | sys.stdout.write('Hilbert Bands: ') 41 | sys.stdout.flush() 42 | pow = None 43 | for band in bands: 44 | if verbose: 45 | sys.stdout.write('%s '%band[0]) 46 | sys.stdout.flush() 47 | p = TimeSeries(np.abs(hilbert(dat_ts.filtered(band[1], 48 | filt_type='pass'), 49 | N=npts, axis=taxis).take(np.arange(npts_orig), 50 | axis=taxis)), 51 | tdim=dat_ts.tdim, samplerate=dat_ts.samplerate, 52 | dims=dat_ts.dims.copy()).add_dim(Dim([band[0]],'freqs')) 53 | if pow is None: 54 | pow = p 55 | else: 56 | pow = pow.extend(p, 'freqs') 57 | 58 | if verbose: 59 | sys.stdout.write('\n') 60 | sys.stdout.flush() 61 | return pow 62 | -------------------------------------------------------------------------------- /ptsa/__init__.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | """ 11 | PTSA - The Python Time-Series Analysis toolbox. 12 | """ 13 | 14 | 15 | # from data import DataWrapper,Events,RawBinaryEEG,createEventsFromMatFile 16 | # from ptsa.data import Dim,Dims,DimData,TimeSeries 17 | # from ptsa.filt import buttfilt, filtfilt 18 | # from ptsa.plotting import topoplot 19 | # from ptsa.wavelet import tsPhasePow,phasePow1d,phasePow2d 20 | # from ptsa.version import versionAtLeast,versionWithin 21 | 22 | #__all__ = [data,filter,plotting,wavelet] 23 | 24 | packages = ('ptsa', 25 | 'ptsa.tests', 26 | 'ptsa.data', 27 | 'ptsa.data.tests', 28 | 'ptsa.plotting', 29 | 'ptsa.plotting.tests', 30 | 'ptsa.stats') 31 | 32 | def _test(method, level, verbosity, flags): 33 | """ 34 | Run test suite with level and verbosity. 35 | 36 | level: 37 | None --- do nothing, return None 38 | < 0 --- scan for tests of level=abs(level), 39 | don't run them, return TestSuite-list 40 | > 0 --- scan for tests of level, run them, 41 | return TestRunner 42 | 43 | verbosity: 44 | >= 0 --- show information messages 45 | > 1 --- show warnings on missing tests 46 | """ 47 | from numpy.testing import NumpyTest, importall 48 | importall('ptsa') 49 | return getattr(NumpyTest(), method)(level, verbosity=2) 50 | 51 | def test(level=1, verbosity=1, flags=[]): 52 | _test('test', level=level, verbosity=verbosity, flags=flags) 53 | test.__doc__ = "Using NumpyTest test method.\n"+_test.__doc__ 54 | 55 | def testall(level=1, verbosity=1, flags=[]): 56 | _test('testall', level=level, verbosity=verbosity, flags=flags) 57 | testall.__doc__ = "Using NumpyTest testall method.\n"+_test.__doc__ 58 | 59 | -------------------------------------------------------------------------------- /docs/devel/gitwash/set_up_fork.rst: -------------------------------------------------------------------------------- 1 | .. _set-up-fork: 2 | 3 | ================== 4 | Set up your fork 5 | ================== 6 | 7 | First you follow the instructions for :ref:`forking`. 8 | 9 | Overview 10 | ======== 11 | 12 | :: 13 | 14 | git clone git@github.com:your-user-name/ptsa.git 15 | cd ptsa 16 | git remote add upstream git://github.com/compmem/ptsa.git 17 | 18 | In detail 19 | ========= 20 | 21 | Clone your fork 22 | --------------- 23 | 24 | #. Clone your fork to the local computer with ``git clone 25 | git@github.com:your-user-name/ptsa.git`` 26 | #. Investigate. Change directory to your new repo: ``cd ptsa``. Then 27 | ``git branch -a`` to show you all branches. You'll get something 28 | like:: 29 | 30 | * master 31 | remotes/origin/master 32 | 33 | This tells you that you are currently on the ``master`` branch, and 34 | that you also have a ``remote`` connection to ``origin/master``. 35 | What remote repository is ``remote/origin``? Try ``git remote -v`` to 36 | see the URLs for the remote. They will point to your github fork. 37 | 38 | Now you want to connect to the upstream `ptsa github`_ repository, so 39 | you can merge in changes from trunk. 40 | 41 | .. _linking-to-upstream: 42 | 43 | Linking your repository to the upstream repo 44 | -------------------------------------------- 45 | 46 | :: 47 | 48 | cd ptsa 49 | git remote add upstream git://github.com/compmem/ptsa.git 50 | 51 | ``upstream`` here is just the arbitrary name we're using to refer to the 52 | main `ptsa`_ repository at `ptsa github`_. 53 | 54 | Note that we've used ``git://`` for the URL rather than ``git@``. The 55 | ``git://`` URL is read only. This means we that we can't accidentally 56 | (or deliberately) write to the upstream repo, and we are only going to 57 | use it to merge into our own code. 58 | 59 | Just for your own satisfaction, show yourself that you now have a new 60 | 'remote', with ``git remote -v show``, giving you something like:: 61 | 62 | upstream git://github.com/compmem/ptsa.git (fetch) 63 | upstream git://github.com/compmem/ptsa.git (push) 64 | origin git@github.com:your-user-name/ptsa.git (fetch) 65 | origin git@github.com:your-user-name/ptsa.git (push) 66 | 67 | .. include:: links.inc 68 | 69 | -------------------------------------------------------------------------------- /ptsa/data/arraywrapper.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | # local imports 11 | from basewrapper import BaseWrapper 12 | 13 | # global imports 14 | import numpy as np 15 | 16 | class ArrayWrapper(BaseWrapper): 17 | """ 18 | Interface to data stored in a numpy ndarray where the first 19 | dimension is the channel and the second dimension is samples. 20 | """ 21 | def __init__(self,data,samplerate,annotations=None): 22 | """Initialize the interface to the data. You must specify the 23 | data and the samplerate.""" 24 | # set up the basic params of the data 25 | self._data = data 26 | self._samplerate = samplerate 27 | self._annotations = annotations 28 | 29 | def _get_nchannels(self): 30 | return self._data.shape[0] 31 | 32 | def _get_nsamples(self, channel=None): 33 | return self._data.shape[1] 34 | 35 | def _get_samplerate(self, channel=None): 36 | # Same samplerate for all channels: 37 | return self._samplerate 38 | 39 | def _get_annotations(self): 40 | return self._annotations 41 | 42 | def _load_data(self,channels,event_offsets,dur_samp,offset_samp): 43 | """ 44 | """ 45 | # allocate for data 46 | eventdata = np.empty((len(channels),len(event_offsets),dur_samp), 47 | dtype=self._data.dtype)*np.nan 48 | 49 | # loop over events 50 | for e,evOffset in enumerate(event_offsets): 51 | # set the range 52 | ssamp = offset_samp+evOffset 53 | esamp = ssamp + dur_samp 54 | 55 | # check the ranges 56 | if ssamp < 0 or esamp > self._data.shape[1]: 57 | raise IOError('Event with offset '+str(evOffset)+ 58 | ' is outside the bounds of the data.') 59 | eventdata[:,e,:] = self._data[channels,ssamp:esamp] 60 | 61 | return eventdata 62 | -------------------------------------------------------------------------------- /ptsa/fixed_scipy.py: -------------------------------------------------------------------------------- 1 | """ 2 | Functions that are not yet included or fixed in a stable scipy release 3 | are provided here until they are easily available in scipy. 4 | """ 5 | 6 | 7 | ################################################################################ 8 | ################################################################################ 9 | ### 10 | ### scipy.signal.wavelets.morlet 11 | ### 12 | ################################################################################ 13 | ################################################################################ 14 | 15 | from scipy import linspace, pi, exp, zeros 16 | 17 | def morlet(M, w=5.0, s=1.0, complete=True): 18 | """Complex Morlet wavelet. 19 | 20 | Parameters 21 | ---------- 22 | M : int 23 | Length of the wavelet. 24 | w : float 25 | Omega0 26 | s : float 27 | Scaling factor, windowed from -s*2*pi to +s*2*pi. 28 | complete : bool 29 | Whether to use the complete or the standard version. 30 | 31 | Notes: 32 | ------ 33 | The standard version: 34 | pi**-0.25 * exp(1j*w*x) * exp(-0.5*(x**2)) 35 | 36 | This commonly used wavelet is often referred to simply as the 37 | Morlet wavelet. Note that, this simplified version can cause 38 | admissibility problems at low values of w. 39 | 40 | The complete version: 41 | pi**-0.25 * (exp(1j*w*x) - exp(-0.5*(w**2))) * exp(-0.5*(x**2)) 42 | 43 | The complete version of the Morlet wavelet, with a correction 44 | term to improve admissibility. For w greater than 5, the 45 | correction term is negligible. 46 | 47 | Note that the energy of the return wavelet is not normalised 48 | according to s. 49 | 50 | The fundamental frequency of this wavelet in Hz is given 51 | by f = 2*s*w*r / M where r is the sampling rate. 52 | 53 | """ 54 | x = linspace(-s*2*pi,s*2*pi,M) 55 | output = exp(1j*w*x) 56 | 57 | if complete: 58 | output -= exp(-0.5*(w**2)) 59 | 60 | output *= exp(-0.5*(x**2)) * pi**(-0.25) 61 | 62 | return output 63 | 64 | ### 65 | ### scipy.signal.wavelets.morlet() 66 | ### 67 | ################################################################################ 68 | ################################################################################ 69 | -------------------------------------------------------------------------------- /todo.txt: -------------------------------------------------------------------------------- 1 | Short-term goals 2 | ---------------- 3 | 4 | 1) Rereferencing code (may not be immediately necessary) 5 | 6 | 2) WICA eyeblink correction code (and also simple eyeblink id code). 7 | 8 | 3) Save DimArray to HDF5 files (and back with fancy indexing). 9 | 10 | 4) Redo DimArray and Dim to have Pythonic indexing: 11 | 12 | x[(x.time > 0)&(x.time < 1.0)&(x.channels == 'Cz')] 13 | 14 | instead of 15 | 16 | x['(time > 0)&(time < 1.0)', 'channels == "Cz"'] 17 | 18 | 5) Casting of HDF5Wrapper to int16 with scale factor to save space. 19 | 20 | 6) Fix topoplot issues (look at PyMVPA for hints). 21 | 22 | 7) Partial Least Squares 23 | 24 | 8) 25 | 26 | 27 | Analysis steps: 28 | --------------- 29 | 30 | - Load all data with wrapper 31 | - Downsample to 256 Hz 32 | - Rereference (likely to average to retain all channels) 33 | - Highpass filter at ~.5Hz 34 | - Identify range with clean eyeblinks 35 | - Run WICA to clean eyeblinks 36 | - Save processed file out to HDF5 (with annotations) 37 | 38 | - Align preprocessed data to experiment with events 39 | - Load events of interest and calculate wavelet power 40 | - Save to file (ideally DimArray to HDF5, but it's not ready) 41 | 42 | - Get means for the conditions for each participant and run PLS 43 | - Plot results 44 | - Write paper 45 | - Profit!!! 46 | 47 | 48 | Other ToDos: 49 | ------------ 50 | 51 | Redo fancy 3D scalp and intracranial plots using VTK (for 52 | publications and other fun stuph.) 53 | 54 | Circular stats, for analyzing phase info. 55 | 56 | Simple stats, such as bootstraps/permutation tests. The wilcox tests 57 | in scipy are kind of not ideal. However, we can use rpy for any stats 58 | we really care about. 59 | 60 | Add in EMD and Hilbert-Huang Transform. (In progress) 61 | 62 | Frequency flows analysis. 63 | 64 | Maybe add in multitaper at some point, but probably not. 65 | 66 | Methods for extracting sync pulses and aligning events. (DONE) 67 | 68 | Artifact removal (artifact correction) (DONE) 69 | 70 | 71 | 72 | Old basic workflow: 73 | 74 | - We have a bunch of subjects who performed some task. 75 | - Events for each subject, allow us to access the eeg data. 76 | - Calculate phase and power for eeg, saving to file (for now, we'll 77 | - save to dictionaries, eventually HDF5) 78 | - Run stats on power values. 79 | - Plot/display results. 80 | 81 | 82 | -------------------------------------------------------------------------------- /ptsa/stats/stat_helper.py: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | 3 | def _ecdf(x): 4 | '''no frills empirical cdf used in fdrcorrection 5 | ''' 6 | nobs = len(x) 7 | return np.arange(1, nobs + 1) / float(nobs) 8 | 9 | 10 | 11 | def fdr_correction(pvals, alpha=0.05, method='indep'): 12 | """P-value correction with False Discovery Rate (FDR) 13 | 14 | Correction for multiple comparison using FDR. 15 | 16 | This covers Benjamini/Hochberg for independent or positively correlated and 17 | Benjamini/Yekutieli for general or negatively correlated tests. 18 | 19 | Parameters 20 | ---------- 21 | pvals : array_like 22 | set of p-values of the individual tests. 23 | alpha : float 24 | error rate 25 | method : 'indep' | 'negcorr' 26 | If 'indep' it implements Benjamini/Hochberg for independent or if 27 | 'negcorr' it corresponds to Benjamini/Yekutieli. 28 | 29 | Returns 30 | ------- 31 | reject : array, bool 32 | True if a hypothesis is rejected, False if not 33 | pval_corrected : array 34 | pvalues adjusted for multiple hypothesis testing to limit FDR 35 | 36 | Notes 37 | ----- 38 | Reference: 39 | Genovese CR, Lazar NA, Nichols T. 40 | Thresholding of statistical maps in functional neuroimaging using the false 41 | discovery rate. Neuroimage. 2002 Apr;15(4):870-8. 42 | """ 43 | pvals = np.asarray(pvals) 44 | shape_init = pvals.shape 45 | pvals = pvals.ravel() 46 | 47 | pvals_sortind = np.argsort(pvals) 48 | pvals_sorted = pvals[pvals_sortind] 49 | sortrevind = pvals_sortind.argsort() 50 | 51 | if method in ['i', 'indep', 'p', 'poscorr']: 52 | ecdffactor = _ecdf(pvals_sorted) 53 | elif method in ['n', 'negcorr']: 54 | cm = np.sum(1. / np.arange(1, len(pvals_sorted) + 1)) 55 | ecdffactor = _ecdf(pvals_sorted) / cm 56 | else: 57 | raise ValueError("Method should be 'indep' and 'negcorr'") 58 | 59 | reject = pvals_sorted < (ecdffactor * alpha) 60 | if reject.any(): 61 | rejectmax = max(np.nonzero(reject)[0]) 62 | else: 63 | rejectmax = 0 64 | reject[:rejectmax] = True 65 | 66 | pvals_corrected_raw = pvals_sorted / ecdffactor 67 | pvals_corrected = np.minimum.accumulate(pvals_corrected_raw[::-1])[::-1] 68 | pvals_corrected[pvals_corrected > 1.0] = 1.0 69 | pvals_corrected = pvals_corrected[sortrevind].reshape(shape_init) 70 | reject = reject[sortrevind].reshape(shape_init) 71 | return reject, pvals_corrected 72 | 73 | -------------------------------------------------------------------------------- /examples/topoPlotDemo.py: -------------------------------------------------------------------------------- 1 | from pylab import loadtxt, rand, figure, xlim, ylim, show 2 | from ptsa.plotting.topoplot import topoplot 3 | 4 | def getElecs(): 5 | # read in testLocs.dat that was generated in Matlab as follows: 6 | # locs_orig=readlocs('GSN129.sfp'); 7 | # locs=locs_orig(4:end); %ignore orig locations 1-3, these are frontal ones we dont have 8 | # tmp = [locs.theta; locs.radius]; 9 | # save testLocs.dat tmp -ascii 10 | locs=loadtxt("testLocs.dat") 11 | theta=-locs[0]+90 12 | 13 | #theta=deg2rad(theta) 14 | radius=locs[1]#*(headRad/0.5) 15 | #x,y=pol2cart(theta,radius,radians=False) 16 | return theta,radius #x,y 17 | 18 | def getPowerVals(): 19 | # read in toPlotDiff 20 | toPlot = loadtxt("toPlotDiff.dat") 21 | return toPlot 22 | 23 | els = getElecs() 24 | toPlot=rand(129)#getPowerVals() 25 | 26 | fig=1 27 | figure(fig) 28 | topoplot() 29 | #show() 30 | 31 | fig+=1 32 | figure(fig) 33 | topoplot(sensors=els) 34 | #show() 35 | 36 | fig+=1 37 | figure(fig) 38 | topoplot(sensors=els,colors=[None,'black','black']) 39 | #show() 40 | 41 | fig+=1 42 | figure(fig) 43 | topoplot(values=toPlot,sensors=els,colors=['black',None,'black']) 44 | 45 | fig+=1 46 | figure(fig) 47 | topoplot(sensors=els,values=toPlot,colors=['black','black',None]) 48 | 49 | fig+=1 50 | figure(fig) 51 | topoplot(sensors=els,values=toPlot,colors=[None,None,'black']) 52 | 53 | 54 | fig+=1 55 | figure(fig) 56 | topoplot(center=(0,0),sensors=els,values=toPlot,plot_mask='linear') 57 | topoplot(center=(2,0),sensors=els,values=toPlot,plot_mask='circular') 58 | topoplot(center=(4,0),sensors=els,values=toPlot,plot_mask='square') 59 | xlim(-1,5) 60 | 61 | grid=100 62 | fig+=1 63 | figure(fig) 64 | topoplot(center=(0,0),radius=0.2,sensors=els,values=toPlot,resolution=grid) 65 | topoplot(center=(1.5,0),radius=0.5,sensors=els,values=toPlot,resolution=grid) 66 | topoplot(center=(4,0),radius=1,sensors=els,values=toPlot,resolution=grid) 67 | topoplot(center=(8.5,0),radius=2,sensors=els,values=toPlot,resolution=grid) 68 | xlim(-0.5,12) 69 | #axis('on') 70 | #show() 71 | 72 | fig+=1 73 | figure(fig) 74 | topoplot(center=(0,0),nose_dir=-45,sensors=els,values=toPlot,colors=['black',None,'black'],resolution=grid) 75 | topoplot(center=(0,2),nose_dir=-135,sensors=els,values=toPlot,colors=['black',None,'black'],resolution=grid) 76 | topoplot(center=(2,0),nose_dir=135,sensors=els,values=toPlot,colors=['black',None,'black'],resolution=grid) 77 | topoplot(center=(2,2),nose_dir=45,sensors=els,values=toPlot,colors=['black',None,'black'],resolution=grid) 78 | xlim(-1,3) 79 | ylim(-1,3) 80 | #show() 81 | 82 | #topoplot 83 | 84 | 85 | show() 86 | -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = -a 6 | SPHINXBUILD = sphinx-build 7 | PAPER = 8 | 9 | # Internal variables. 10 | PAPEROPT_a4 = -D latex_paper_size=a4 11 | PAPEROPT_letter = -D latex_paper_size=letter 12 | ALLSPHINXOPTS = -d _build/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . 13 | 14 | .PHONY: help clean html web pickle htmlhelp latex changes linkcheck 15 | 16 | help: 17 | @echo "Please use \`make ' where is one of" 18 | @echo " html to make standalone HTML files" 19 | @echo " pickle to make pickle files" 20 | @echo " json to make JSON files" 21 | @echo " htmlhelp to make HTML files and a HTML help project" 22 | @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" 23 | @echo " changes to make an overview over all changed/added/deprecated items" 24 | @echo " linkcheck to check all external links for integrity" 25 | 26 | clean: 27 | -rm -rf _build/* 28 | 29 | html: 30 | mkdir -p _build/html _build/doctrees 31 | $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) _build/html 32 | @echo 33 | @echo "Build finished. The HTML pages are in _build/html." 34 | 35 | pickle: 36 | mkdir -p _build/pickle _build/doctrees 37 | $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) _build/pickle 38 | @echo 39 | @echo "Build finished; now you can process the pickle files." 40 | 41 | web: pickle 42 | 43 | json: 44 | mkdir -p _build/json _build/doctrees 45 | $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) _build/json 46 | @echo 47 | @echo "Build finished; now you can process the JSON files." 48 | 49 | htmlhelp: 50 | mkdir -p _build/htmlhelp _build/doctrees 51 | $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) _build/htmlhelp 52 | @echo 53 | @echo "Build finished; now you can run HTML Help Workshop with the" \ 54 | ".hhp project file in _build/htmlhelp." 55 | 56 | latex: 57 | mkdir -p _build/latex _build/doctrees 58 | $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) _build/latex 59 | @echo 60 | @echo "Build finished; the LaTeX files are in _build/latex." 61 | @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ 62 | "run these through (pdf)latex." 63 | 64 | changes: 65 | mkdir -p _build/changes _build/doctrees 66 | $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) _build/changes 67 | @echo 68 | @echo "The overview file is in _build/changes." 69 | 70 | linkcheck: 71 | mkdir -p _build/linkcheck _build/doctrees 72 | $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) _build/linkcheck 73 | @echo 74 | @echo "Link check complete; look for any errors in the above output " \ 75 | "or in _build/linkcheck/output.txt." 76 | -------------------------------------------------------------------------------- /examples/dataWaveDemo.m: -------------------------------------------------------------------------------- 1 | 2 | % load events 3 | fprintf('Loading events...'); 4 | ev = loadEvents('/home1/per/eeg/free/CH012/events/events.mat'); 5 | 6 | % split out two conditions (recalled and not recalled) 7 | rInd = inStruct(ev,'recalled==1'); 8 | nInd = inStruct(ev,'recalled==0'); 9 | 10 | % get power for the events for a range of freqs 11 | 12 | % we leave the buffer on after getting the data, but pull it off 13 | % in the call to tsPhasePow 14 | freqs = [2:2:80]; 15 | chan = 27; 16 | durationMS = 4500; 17 | offsetMS = -1500; 18 | bufferMS = 1000; 19 | resampledRate = 200; 20 | filtFreq = [58.0,62.0]; 21 | 22 | % load the eeg data 23 | rEEG = gete_ms(chan,ev(rInd),durationMS,offsetMS,bufferMS,filtFreq,'stop',4,resampledRate); 24 | nEEG = gete_ms(chan,ev(nInd),durationMS,offsetMS,bufferMS,filtFreq,'stop',4,resampledRate); 25 | 26 | durationMS = 2500; 27 | offsetMS = -500; 28 | 29 | % power for recalled events 30 | rRes = getphasepow(chan,ev(rInd),durationMS,offsetMS,bufferMS,'freqs',freqs,... 31 | 'width',5,'filtfreq',filtFreq,'filttype','stop','filtorder',4,... 32 | 'resampledrate',resampledRate,'powonly'); 33 | nRes = getphasepow(chan,ev(nInd),durationMS,offsetMS,bufferMS,'freqs',freqs,... 34 | 'width',5,'filtfreq',filtFreq,'filttype','stop','filtorder',4,... 35 | 'resampledrate',resampledRate,'powonly'); 36 | 37 | % get mean power across events (axis=1) 38 | rPow = squeeze(mean(log10(rRes),1)); 39 | nPow = squeeze(mean(log10(nRes),1)); 40 | 41 | % times 42 | times = linspace(-500,2000,size(rPow,2)); 43 | timeserp = linspace(-1500,3000,size(rEEG,2)); 44 | 45 | fprintf('Generating plots...\n') 46 | fig = 0; 47 | 48 | % erp 49 | fig = fig + 1; 50 | figure(fig); 51 | plot(timeserp,mean(rEEG,1),'r'); 52 | hold on 53 | plot(timeserp,mean(nEEG,1),'b'); 54 | hold off 55 | xlim([-2000 4000]); 56 | legend('Recalled','Not Recalled') 57 | xlabel('Time (ms)') 58 | ylabel('Voltage (mV)') 59 | 60 | %keyboard 61 | 62 | 63 | % power spectrum 64 | 65 | fig=fig+1; 66 | figure(fig); 67 | plot(freqs,squeeze(mean(rPow,2)),'r'); 68 | hold on 69 | plot(freqs,squeeze(mean(nPow,2)),'b'); 70 | hold off 71 | legend('Recalled','Not Recalled'); 72 | xlabel('Frequency (Hz)'); 73 | ylabel('Power ($log_{10}(mV^2)$)'); 74 | 75 | 76 | 77 | % plot the diff in mean power 78 | fig=fig+1; 79 | figure(fig) 80 | contourf(times,freqs,rPow-nPow) 81 | colorbar() 82 | xlabel('Time (ms)') 83 | ylabel('Frequency (Hz)') 84 | %title('SME (diff in power) for channel %d' % (chan)) 85 | 86 | % Alternative way to do it: 87 | % fig=fig+1; 88 | % figure(fig) 89 | % imagesc(times,freqs,rPow-nPow) 90 | % axis xy 91 | % colorbar 92 | % xlabel('Time (ms)') 93 | % ylabel('Frequency (Hz)') 94 | 95 | 96 | 97 | -------------------------------------------------------------------------------- /ptsa/plotting/logo.py: -------------------------------------------------------------------------------- 1 | """ 2 | Logo design inspired by the matplotlib logo by Tony Yu . 3 | """ 4 | 5 | import numpy as np 6 | import matplotlib as mpl 7 | import matplotlib.pyplot as plt 8 | import matplotlib.cm as cm 9 | import matplotlib.mlab as mlab 10 | 11 | mpl.rcParams['xtick.labelsize'] = 10 12 | mpl.rcParams['ytick.labelsize'] = 12 13 | mpl.rcParams['axes.edgecolor'] = 'gray' 14 | 15 | axalpha = 0.05 16 | #figcolor = '#EFEFEF' 17 | figcolor = 'white' 18 | dpi = 80 19 | fig = plt.figure(figsize=(4, 1.1),dpi=dpi) 20 | fig.figurePatch.set_edgecolor(figcolor) 21 | fig.figurePatch.set_facecolor(figcolor) 22 | 23 | 24 | def add_timeseries(): 25 | ax = fig.add_axes([0., 0., 1., 1.]) 26 | x = np.linspace(0,1,1000) 27 | freqs = [8,16,32,64] 28 | # y = np.zeros(1000) 29 | # for f in freqs: 30 | # y = y + np.sin(x*np.pi*f*4 + f/60.)*(10.0/(f)) 31 | # y = y+.5 32 | y = np.sin(x*np.pi*32)*.45 + .5 33 | lines = plt.plot(x,y, 34 | transform=ax.transAxes, 35 | color="#11557c", alpha=0.25,) 36 | 37 | ax.set_axis_off() 38 | return ax 39 | 40 | def add_ptsa_text(ax): 41 | ax.text(0.95, 0.5, 'PTSA', color='#11557c', fontsize=65, 42 | ha='right', va='center', alpha=1.0, transform=ax.transAxes) 43 | 44 | def add_pizza(): 45 | ax = fig.add_axes([0.025, 0.075, 0.3, 0.85], polar=True, resolution=50) 46 | 47 | ax.axesPatch.set_alpha(axalpha) 48 | ax.set_axisbelow(True) 49 | N = 8 50 | arc = 2. * np.pi 51 | theta = np.arange(0.0, arc, arc/N) 52 | radii = 10 * np.array([0.79, 0.81, 0.78, 0.77, 0.79, 0.78, 0.83, 0.78]) 53 | width = np.pi / 4 * np.array([1.0]*N) 54 | theta = theta[1:] 55 | radii = radii[1:] 56 | width = width[1:] 57 | bars = ax.bar(theta, radii, width=width, bottom=0.0) 58 | for r, bar in zip(radii, bars): 59 | bar.set_facecolor(cm.hot(r/10.)) 60 | bar.set_edgecolor('r') 61 | bar.set_alpha(0.6) 62 | 63 | for label in ax.get_xticklabels() + ax.get_yticklabels(): 64 | label.set_visible(False) 65 | 66 | for line in ax.get_ygridlines() + ax.get_xgridlines(): 67 | line.set_lw(0.8) 68 | line.set_alpha(0.9) 69 | line.set_ls('-') 70 | line.set_color('0.5') 71 | 72 | # add some veggie peperoni 73 | #theta = np.array([.08,.18,.32,.46,.54,.68,.77,.85,.96]) * np.pi * 2.0 74 | #radii = 10*np.array([.6,.38,.58,.5,.62,.42,.58,.67,.45]) 75 | theta = np.array([.18,.32,.46,.54,.68,.77,.85,.96]) * np.pi * 2.0 76 | radii = 10*np.array([.38,.58,.5,.62,.42,.58,.67,.45]) 77 | c = plt.scatter(theta,radii,c='r',s=7**2) 78 | c.set_alpha(0.75) 79 | 80 | ax.set_yticks(np.arange(1, 9, 2)) 81 | ax.set_rmax(9) 82 | 83 | 84 | if __name__ == '__main__': 85 | main_axes = add_timeseries() 86 | add_pizza() 87 | #add_ptsa_text(main_axes) 88 | #plt.show() 89 | plt.savefig('logo.png') 90 | 91 | -------------------------------------------------------------------------------- /examples/basic_analysis.py: -------------------------------------------------------------------------------- 1 | 2 | import numpy as np 3 | import pylab as pl 4 | 5 | from ptsa.data import ArrayWrapper, Events 6 | from ptsa.wavelet import phase_pow_multi 7 | 8 | # some general info 9 | nchan = 2 10 | samplerate = 200 11 | nsamples = samplerate*100 12 | event_dur = samplerate*1 13 | buf_dur = 1.0 14 | 15 | # generate fake data 16 | dat = np.random.rand(nchan, nsamples) 17 | aw = ArrayWrapper(dat, samplerate) 18 | 19 | # generate fake events 20 | eoffset = np.arange(event_dur*2,nsamples-(2*event_dur),event_dur) 21 | esrc = [aw]*len(eoffset) 22 | nrec = len(eoffset)/2 23 | recalled = [True]*nrec + [False]*(len(eoffset)-nrec) 24 | events = Events(np.rec.fromarrays([esrc,eoffset,recalled], 25 | names='esrc,eoffset,recalled')) 26 | 27 | # load in data with events (filter at the same time) 28 | rdat = events[events.recalled==True].get_data(0, # channel 29 | 1.0, # duration in sec 30 | 0.0, # offset in sec 31 | buf_dur, # buffer in sec 32 | filt_freq = 20., 33 | filt_type = 'low', 34 | keep_buffer=True 35 | ) 36 | ndat = events[events.recalled==False].get_data(0, # channel 37 | 1.0, # duration in sec 38 | 0.0, # offset in sec 39 | buf_dur, # buffer in sec 40 | filt_freq = 20., 41 | filt_type = 'low', 42 | keep_buffer=True 43 | ) 44 | 45 | # calc wavelet power 46 | freqs = np.arange(2,50,2) 47 | rpow = phase_pow_multi(freqs,rdat,to_return='power') 48 | npow = phase_pow_multi(freqs,ndat,to_return='power') 49 | 50 | # remove the buffer now that we have filtered and calculated power 51 | #for ts in [rdat,ndat,rpow,npow]: 52 | # ts = ts.remove_buffer(buf_dur) 53 | # why does the above not work? 54 | rdat = rdat.remove_buffer(buf_dur) 55 | ndat = ndat.remove_buffer(buf_dur) 56 | rpow = rpow.remove_buffer(buf_dur) 57 | npow = npow.remove_buffer(buf_dur) 58 | 59 | # plot ERP 60 | pl.figure(1) 61 | pl.clf() 62 | pl.plot(rdat['time'],rdat.nanmean('events'),'r') 63 | pl.plot(ndat['time'],ndat.nanmean('events'),'b') 64 | pl.legend(('Recalled','Not Recalled'),loc=0) 65 | pl.xlabel('Time (s)') 66 | pl.ylabel('Voltage') 67 | 68 | # plot power spectrum 69 | pl.figure(2) 70 | pl.clf() 71 | pl.plot(rpow['freqs'],rpow.nanmean('events').nanmean('time'),'r') 72 | pl.plot(npow['freqs'],npow.nanmean('events').nanmean('time'),'b') 73 | pl.legend(('Recalled','Not Recalled'),loc=0) 74 | pl.xlabel('Frequency (Hz)') 75 | pl.ylabel('Power') 76 | 77 | pl.show() 78 | -------------------------------------------------------------------------------- /ptsa/tests/test_fixed_scipy.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | import numpy as N 11 | from numpy.testing import * 12 | #from numpy.testing import NumpyTest, NumpyTestCase 13 | import ptsa.fixed_scipy as wavelets 14 | 15 | 16 | class TestFixed_Scipy(TestCase): 17 | def test_morlet(self): 18 | 19 | x = wavelets.morlet(50,4.1,complete=True) 20 | y = wavelets.morlet(50,4.1,complete=False) 21 | # Test if complete and incomplete wavelet have same lengths: 22 | assert_equal(len(x),len(y)) 23 | # Test if complete wavelet is less than incomplete wavelet: 24 | assert_array_less(x,y) 25 | 26 | x = wavelets.morlet(10,50,complete=False) 27 | y = wavelets.morlet(10,50,complete=True) 28 | # For large widths complete and incomplete wavelets should be 29 | # identical within numerical precision: 30 | assert_equal(x,y) 31 | 32 | # miscellaneous tests: 33 | x = N.array([1.73752399e-09 +9.84327394e-25j, 34 | 6.49471756e-01 +0.00000000e+00j, 35 | 1.73752399e-09 -9.84327394e-25j]) 36 | y = wavelets.morlet(3,w=2,complete=True) 37 | assert_array_almost_equal(x,y) 38 | 39 | x = N.array([2.00947715e-09 +9.84327394e-25j, 40 | 7.51125544e-01 +0.00000000e+00j, 41 | 2.00947715e-09 -9.84327394e-25j]) 42 | y = wavelets.morlet(3,w=2,complete=False) 43 | assert_array_almost_equal(x,y,decimal=2) 44 | 45 | x = wavelets.morlet(10000,s=4,complete=True) 46 | y = wavelets.morlet(20000,s=8,complete=True)[5000:15000] 47 | assert_array_almost_equal(x,y,decimal=2) 48 | 49 | x = wavelets.morlet(10000,s=4,complete=False) 50 | assert_array_almost_equal(y,x,decimal=2) 51 | y = wavelets.morlet(20000,s=8,complete=False)[5000:15000] 52 | assert_array_almost_equal(x,y,decimal=2) 53 | 54 | x = wavelets.morlet(10000,w=3,s=5,complete=True) 55 | y = wavelets.morlet(20000,w=3,s=10,complete=True)[5000:15000] 56 | assert_array_almost_equal(x,y,decimal=2) 57 | 58 | x = wavelets.morlet(10000,w=3,s=5,complete=False) 59 | assert_array_almost_equal(y,x,decimal=2) 60 | y = wavelets.morlet(20000,w=3,s=10,complete=False)[5000:15000] 61 | assert_array_almost_equal(x,y,decimal=2) 62 | 63 | x = wavelets.morlet(10000,w=7,s=10,complete=True) 64 | y = wavelets.morlet(20000,w=7,s=20,complete=True)[5000:15000] 65 | assert_array_almost_equal(x,y,decimal=2) 66 | 67 | x = wavelets.morlet(10000,w=7,s=10,complete=False) 68 | assert_array_almost_equal(x,y,decimal=2) 69 | y = wavelets.morlet(20000,w=7,s=20,complete=False)[5000:15000] 70 | assert_array_almost_equal(x,y,decimal=2) 71 | -------------------------------------------------------------------------------- /examples/dataWaveDemo.py: -------------------------------------------------------------------------------- 1 | import numpy as N 2 | # Can't use latex on the cluster 3 | #import matplotlib 4 | #matplotlib.rc('text', usetex = True) 5 | import pylab 6 | import pdb 7 | 8 | from pyeeg.data.rawbinarydata import createEventsFromMatFile 9 | from pyeeg import wavelet 10 | 11 | 12 | # hypothetical test case 13 | 14 | # load events 15 | print "Loading events..." 16 | ev = createEventsFromMatFile('/home1/per/eeg/free/CH012/events/events.mat') 17 | 18 | # we leave the buffer on after getting the data, but pull it off 19 | # in the call to tsPhasePow 20 | freqs = range(2,81,2) 21 | chan = 27 22 | dur = 2.5 23 | offset = -.500 24 | buf = 1.000 25 | resampledRate = 200 26 | filtFreq = [58.0,62.0] 27 | 28 | # load the eeg data 29 | print "Loading EEG data..." 30 | rEEG = ev.select(ev['recalled']==1).get_data(chan, 31 | dur, 32 | offset, 33 | buf, 34 | resampledRate, 35 | filtFreq=filtFreq, 36 | keepBuffer=True) 37 | nEEG = ev.select(ev['recalled']==0).get_data(chan, 38 | dur, 39 | offset, 40 | buf, 41 | resampledRate, 42 | filtFreq=filtFreq, 43 | keepBuffer=True) 44 | 45 | # power for recalled events 46 | print "Calculating power..." 47 | rRes = wavelet.tsPhasePow(freqs, 48 | rEEG, 49 | verbose=True,toReturn='pow') 50 | # power for not recalled events 51 | nRes = wavelet.tsPhasePow(freqs, 52 | nEEG, 53 | verbose=True,toReturn='pow') 54 | 55 | # get mean power across events (axis=1) 56 | print "Taking mean power..." 57 | rPow = rRes.apply_func(N.log10).aggregate('event',N.mean) 58 | nPow = nRes.apply_func(N.log10).aggregate('event',N.mean) 59 | 60 | print "Generating plots..." 61 | fig = 0 62 | 63 | # erp 64 | fig+=1 65 | pylab.figure(fig) 66 | pylab.plot(rEEG['time'],rEEG.aggregate('event',N.mean).data,'r') 67 | pylab.plot(nEEG['time'],nEEG.aggregate('event',N.mean).data,'b') 68 | pylab.legend(('Recalled','Not Recalled')) 69 | pylab.xlabel('Time (s)') 70 | pylab.ylabel('Voltage (mV)') 71 | 72 | # power spectrum 73 | fig+=1 74 | pylab.figure(fig) 75 | pylab.plot(rPow['freq'],N.squeeze(rPow.aggregate('time',N.mean).data),'r') 76 | pylab.plot(nPow['freq'],N.squeeze(nPow.data.mean(nPow.dim('time'))),'b') 77 | pylab.legend(('Recalled','Not Recalled')) 78 | pylab.xlabel('Frequency (Hz)') 79 | pylab.ylabel(r'Power ($log_{10}(mV^2)$)') 80 | 81 | # plot the diff in mean power 82 | fig+=1 83 | pylab.figure(fig) 84 | pylab.contourf(rPow['time'],rPow['freq'],rPow.data-nPow.data) 85 | pylab.colorbar() 86 | pylab.xlabel('Time (s)') 87 | pylab.ylabel('Frequency (Hz)') 88 | pylab.title('SME (diff in power) for channel %d' % (chan)) 89 | 90 | # show the plots 91 | pylab.show() 92 | 93 | 94 | -------------------------------------------------------------------------------- /ptsa/data/edf/edfwrap.c: -------------------------------------------------------------------------------- 1 | 2 | #include 3 | #include 4 | 5 | #include "edflib.h" 6 | 7 | #include "edfwrap.h" 8 | 9 | int open_file_readonly(const char *filepath, 10 | struct edf_hdr_struct *hdr, 11 | int read_annot) 12 | { 13 | if(edfopen_file_readonly(filepath, hdr, read_annot)) 14 | { 15 | switch(hdr->filetype) 16 | { 17 | case EDFLIB_MALLOC_ERROR : printf("\nmalloc error\n\n"); 18 | break; 19 | case EDFLIB_NO_SUCH_FILE_OR_DIRECTORY : printf("\ncan not open file, no such file or directory\n\n"); 20 | break; 21 | case EDFLIB_FILE_CONTAINS_FORMAT_ERRORS : printf("\nthe file is not EDF(+) or BDF(+) compliant\n" 22 | "(it contains format errors)\n\n"); 23 | break; 24 | case EDFLIB_MAXFILES_REACHED : printf("\nto many files opened\n\n"); 25 | break; 26 | case EDFLIB_FILE_READ_ERROR : printf("\na read error occurred\n\n"); 27 | break; 28 | case EDFLIB_FILE_ALREADY_OPENED : printf("\nfile has already been opened\n\n"); 29 | break; 30 | default : printf("\nunknown error\n\n"); 31 | break; 32 | } 33 | 34 | return(-1); 35 | } 36 | 37 | return 0; 38 | } 39 | 40 | long long get_samples_in_file(struct edf_hdr_struct *hdr, 41 | int edfsignal) 42 | { 43 | return hdr->signalparam[edfsignal].smp_in_file; 44 | } 45 | 46 | double get_samplerate(struct edf_hdr_struct *hdr, 47 | int edfsignal) 48 | { 49 | double samplerate; 50 | 51 | // check the channel 52 | if(edfsignal>(hdr->edfsignals)) 53 | { 54 | printf("\nerror: file has %i signals and you selected signal %i\n\n", 55 | hdr->edfsignals, edfsignal); 56 | return(0.0); 57 | } 58 | 59 | samplerate = ((double)hdr->signalparam[edfsignal].smp_in_datarecord / 60 | (double)hdr->datarecord_duration) * EDFLIB_TIME_DIMENSION; 61 | return samplerate; 62 | } 63 | 64 | int read_samples_from_file(struct edf_hdr_struct *hdr, 65 | int edfsignal, 66 | long long offset, 67 | int n, 68 | double *buf) 69 | { 70 | int hdl; 71 | /* struct edf_hdr_struct hdr; */ 72 | 73 | /* if (open_file_readonly(filepath, &hdr) < 0) */ 74 | /* { */ 75 | /* printf("\nerror opening file\n\n"); */ 76 | /* return -1; */ 77 | /* } */ 78 | 79 | // get the handle 80 | hdl = hdr->handle; 81 | 82 | // check the channel 83 | if(edfsignal>(hdr->edfsignals)) 84 | { 85 | printf("\nerror: file has %i signals and you selected signal %i\n\n", 86 | hdr->edfsignals, edfsignal); 87 | //edfclose_file(hdl); 88 | return(-1); 89 | } 90 | 91 | // seek to the correct point in the file 92 | edfseek(hdl, edfsignal, offset, SEEK_SET); 93 | 94 | // read the samples 95 | n = edfread_physical_samples(hdl, edfsignal, n, buf); 96 | 97 | // close the file 98 | //edfclose_file(hdl); 99 | 100 | // return how many we read 101 | return n; 102 | } 103 | -------------------------------------------------------------------------------- /docs/devel/gitwash/maintainer_workflow.rst: -------------------------------------------------------------------------------- 1 | .. _maintainer-workflow: 2 | 3 | ################### 4 | Maintainer workflow 5 | ################### 6 | 7 | This page is for maintainers |emdash| those of us who merge our own or other 8 | peoples' changes into the upstream repository. 9 | 10 | Being as how you're a maintainer, you are completely on top of the basic stuff 11 | in :ref:`development-workflow`. 12 | 13 | The instructions in :ref:`linking-to-upstream` add a remote that has read-only 14 | access to the upstream repo. Being a maintainer, you've got read-write access. 15 | 16 | It's good to have your upstream remote have a scary name, to remind you that 17 | it's a read-write remote:: 18 | 19 | git remote add upstream-rw git@github.com:compmem/ptsa.git 20 | git fetch upstream-rw 21 | 22 | ******************* 23 | Integrating changes 24 | ******************* 25 | 26 | Let's say you have some changes that need to go into trunk 27 | (``upstream-rw/master``). 28 | 29 | The changes are in some branch that you are currently on. For example, you are 30 | looking at someone's changes like this:: 31 | 32 | git remote add someone git://github.com/someone/ptsa.git 33 | git fetch someone 34 | git branch cool-feature --track someone/cool-feature 35 | git checkout cool-feature 36 | 37 | So now you are on the branch with the changes to be incorporated upstream. The 38 | rest of this section assumes you are on this branch. 39 | 40 | A few commits 41 | ============= 42 | 43 | If there are only a few commits, consider rebasing to upstream:: 44 | 45 | # Fetch upstream changes 46 | git fetch upstream-rw 47 | # rebase 48 | git rebase upstream-rw/master 49 | 50 | Remember that, if you do a rebase, and push that, you'll have to close any 51 | github pull requests manually, because github will not be able to detect the 52 | changes have already been merged. 53 | 54 | A long series of commits 55 | ======================== 56 | 57 | If there are a longer series of related commits, consider a merge instead:: 58 | 59 | git fetch upstream-rw 60 | git merge --no-ff upstream-rw/master 61 | 62 | The merge will be detected by github, and should close any related pull requests 63 | automatically. 64 | 65 | Note the ``--no-ff`` above. This forces git to make a merge commit, rather than 66 | doing a fast-forward, so that these set of commits branch off trunk then rejoin 67 | the main history with a merge, rather than appearing to have been made directly 68 | on top of trunk. 69 | 70 | Check the history 71 | ================= 72 | 73 | Now, in either case, you should check that the history is sensible and you have 74 | the right commits:: 75 | 76 | git log --oneline --graph 77 | git log -p upstream-rw/master.. 78 | 79 | The first line above just shows the history in a compact way, with a text 80 | representation of the history graph. The second line shows the log of commits 81 | excluding those that can be reached from trunk (``upstream-rw/master``), and 82 | including those that can be reached from current HEAD (implied with the ``..`` 83 | at the end). So, it shows the commits unique to this branch compared to trunk. 84 | The ``-p`` option shows the diff for these commits in patch form. 85 | 86 | Push to trunk 87 | ============= 88 | 89 | :: 90 | 91 | git push upstream-rw my-new-feature:master 92 | 93 | This pushes the ``my-new-feature`` branch in this repository to the ``master`` 94 | branch in the ``upstream-rw`` repository. 95 | 96 | .. include:: links.inc 97 | -------------------------------------------------------------------------------- /docs/devel/gitwash/git_links.inc: -------------------------------------------------------------------------------- 1 | .. This (-*- rst -*-) format file contains commonly used link targets 2 | and name substitutions. It may be included in many files, 3 | therefore it should only contain link targets and name 4 | substitutions. Try grepping for "^\.\. _" to find plausible 5 | candidates for this list. 6 | 7 | .. NOTE: reST targets are 8 | __not_case_sensitive__, so only one target definition is needed for 9 | nipy, NIPY, Nipy, etc... 10 | 11 | .. git stuff 12 | .. _git: http://git-scm.com/ 13 | .. _github: http://github.com 14 | .. _github help: http://help.github.com 15 | .. _msysgit: http://code.google.com/p/msysgit/downloads/list 16 | .. _git-osx-installer: http://code.google.com/p/git-osx-installer/downloads/list 17 | .. _subversion: http://subversion.tigris.org/ 18 | .. _git cheat sheet: http://github.com/guides/git-cheat-sheet 19 | .. _pro git book: http://progit.org/ 20 | .. _git svn crash course: http://git-scm.com/course/svn.html 21 | .. _learn.github: http://learn.github.com/ 22 | .. _network graph visualizer: http://github.com/blog/39-say-hello-to-the-network-graph-visualizer 23 | .. _git user manual: http://schacon.github.com/git/user-manual.html 24 | .. _git tutorial: http://schacon.github.com/git/gittutorial.html 25 | .. _git community book: http://book.git-scm.com/ 26 | .. _git ready: http://www.gitready.com/ 27 | .. _git casts: http://www.gitcasts.com/ 28 | .. _Fernando's git page: http://www.fperez.org/py4science/git.html 29 | .. _git magic: http://www-cs-students.stanford.edu/~blynn/gitmagic/index.html 30 | .. _git concepts: http://www.eecs.harvard.edu/~cduan/technical/git/ 31 | .. _git clone: http://schacon.github.com/git/git-clone.html 32 | .. _git checkout: http://schacon.github.com/git/git-checkout.html 33 | .. _git commit: http://schacon.github.com/git/git-commit.html 34 | .. _git push: http://schacon.github.com/git/git-push.html 35 | .. _git pull: http://schacon.github.com/git/git-pull.html 36 | .. _git add: http://schacon.github.com/git/git-add.html 37 | .. _git status: http://schacon.github.com/git/git-status.html 38 | .. _git diff: http://schacon.github.com/git/git-diff.html 39 | .. _git log: http://schacon.github.com/git/git-log.html 40 | .. _git branch: http://schacon.github.com/git/git-branch.html 41 | .. _git remote: http://schacon.github.com/git/git-remote.html 42 | .. _git rebase: http://schacon.github.com/git/git-rebase.html 43 | .. _git config: http://schacon.github.com/git/git-config.html 44 | .. _why the -a flag?: http://www.gitready.com/beginner/2009/01/18/the-staging-area.html 45 | .. _git staging area: http://www.gitready.com/beginner/2009/01/18/the-staging-area.html 46 | .. _tangled working copy problem: http://tomayko.com/writings/the-thing-about-git 47 | .. _git management: http://kerneltrap.org/Linux/Git_Management 48 | .. _linux git workflow: http://www.mail-archive.com/dri-devel@lists.sourceforge.net/msg39091.html 49 | .. _git parable: http://tom.preston-werner.com/2009/05/19/the-git-parable.html 50 | .. _git foundation: http://matthew-brett.github.com/pydagogue/foundation.html 51 | .. _deleting master on github: http://matthew-brett.github.com/pydagogue/gh_delete_master.html 52 | .. _rebase without tears: http://matthew-brett.github.com/pydagogue/rebase_without_tears.html 53 | .. _resolving a merge: http://schacon.github.com/git/user-manual.html#resolving-a-merge 54 | .. _ipython git workflow: http://mail.scipy.org/pipermail/ipython-dev/2010-October/006746.html 55 | 56 | .. other stuff 57 | .. _python: http://www.python.org 58 | 59 | .. |emdash| unicode:: U+02014 60 | 61 | .. vim: ft=rst 62 | -------------------------------------------------------------------------------- /ptsa/data/edfwrapper.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | # local imports 11 | from basewrapper import BaseWrapper 12 | from edf import read_samples, read_number_of_samples 13 | from edf import read_samplerate, read_annotations 14 | from edf import read_number_of_signals 15 | # global imports 16 | import numpy as np 17 | import os.path 18 | 19 | class EdfWrapper(BaseWrapper): 20 | """ 21 | Interface to data stored in a EDF file and related formats (such 22 | as BDF). 23 | """ 24 | def __init__(self, filepath): 25 | """ 26 | Initialize the interface to the data. 27 | 28 | Parameters 29 | ---------- 30 | filepath : string 31 | String specifiying the filename (with full path if 32 | applicable). 33 | """ 34 | # set up the basic params of the data 35 | if os.path.exists(filepath): 36 | self.filepath = filepath 37 | else: 38 | raise IOError(str(filepath)+'\n does not exist!'+ 39 | 'Valid path to data file is needed!') 40 | self._nchannels = read_number_of_signals(self.filepath) 41 | 42 | numbers = [] 43 | names = [] 44 | for i in range(self._nchannels): 45 | numbers.append(i+1) 46 | # CTW: Here we should use the actual channel labels as 47 | # name -- requires additional cython code to interface 48 | # with the EDF library: 49 | names.append(str(i+1)) 50 | self._channel_info = np.rec.fromarrays( 51 | [numbers, names], names='number,name') 52 | 53 | def _get_nchannels(self): 54 | return self._nchannels 55 | 56 | def _get_nsamples(self, channel=None): 57 | if channel is None: 58 | # pick first channel 59 | channel = 0 60 | return read_number_of_samples(self.filepath, channel) 61 | 62 | def _get_channel_info(self): 63 | return self._channel_info 64 | 65 | def _get_samplerate(self, channel=None): 66 | if channel is None: 67 | # pick first channel 68 | channel = 0 69 | return read_samplerate(self.filepath, channel) 70 | 71 | def _get_annotations(self): 72 | return read_annotations(self.filepath) 73 | 74 | def _load_data(self,channels,event_offsets,dur_samp,offset_samp): 75 | """ 76 | """ 77 | # allocate for data 78 | eventdata = np.empty((len(channels),len(event_offsets),dur_samp), 79 | dtype=np.float64)*np.nan 80 | 81 | # loop over events 82 | # PBS: eventually move this to the cython file 83 | for c, channel in enumerate(channels): 84 | for e, ev_offset in enumerate(event_offsets): 85 | # set the range 86 | ssamp = offset_samp + ev_offset 87 | 88 | # read the data 89 | dat = read_samples(self.filepath, 90 | channel, 91 | ssamp, dur_samp) 92 | 93 | # check the ranges 94 | if len(dat) < dur_samp: 95 | raise IOError('Event with offset '+str(ev_offset)+ 96 | ' is outside the bounds of the data.') 97 | eventdata[c, e, :] = dat 98 | 99 | return eventdata 100 | 101 | 102 | -------------------------------------------------------------------------------- /ptsa/plotting/HCGSN128.dat: -------------------------------------------------------------------------------- 1 | 4.6351589e+01 3.8264632e+01 2.6846561e+01 2.1874325e+01 1.4581944e+01 0.0000000e+00 -3.8133998e+01 2.7825796e+01 1.6876327e+01 1.1872545e+01 0.0000000e+00 -1.4581944e+01 -3.6815100e+01 7.6337108e+00 0.0000000e+00 0.0000000e+00 0.0000000e+00 -1.1872545e+01 -2.1874325e+01 -3.6759195e+01 -7.6337108e+00 -1.6876327e+01 -2.6846561e+01 -3.6524362e+01 -2.7825796e+01 -3.8264632e+01 -4.6178909e+01 -5.3144352e+01 -6.0468013e+01 -7.5431834e+01 -1.0929177e+02 -4.6351589e+01 -5.7179011e+01 -6.6490962e+01 -7.6922594e+01 -8.7022947e+01 -1.1123182e+02 -6.1131219e+01 -8.3258549e+01 -8.9750790e+01 -9.6616476e+01 -1.1144028e+02 -6.9654787e+01 -7.9413888e+01 -1.0433176e+02 -1.0744971e+02 -1.1408635e+02 -6.9431184e+01 -8.6215277e+01 -1.2067732e+02 -1.2543764e+02 -1.2762566e+02 -1.3391069e+02 -1.4637742e+02 1.8000000e+02 -1.1170657e+02 -1.1960885e+02 -1.3364463e+02 -1.4104708e+02 -1.4581619e+02 -1.5683771e+02 1.8000000e+02 -1.2889243e+02 -1.3736791e+02 -1.4746039e+02 -1.5479273e+02 -1.6591948e+02 -1.4940375e+02 -1.5510218e+02 -1.6235024e+02 -1.7054614e+02 1.8000000e+02 -1.6556632e+02 -1.7241621e+02 1.8000000e+02 1.7054614e+02 1.6591948e+02 1.5683771e+02 1.4637742e+02 1.0929177e+02 1.8000000e+02 1.7241621e+02 1.6235024e+02 1.5479273e+02 1.4581619e+02 1.3391069e+02 1.1123182e+02 1.6556632e+02 1.5510218e+02 1.4746039e+02 1.4104708e+02 1.2762566e+02 1.1144028e+02 1.4940375e+02 1.3736791e+02 1.3364463e+02 1.2543764e+02 1.1408635e+02 1.2889243e+02 1.1960885e+02 1.2067732e+02 1.0744971e+02 9.6616476e+01 8.7022947e+01 7.5431834e+01 3.8133998e+01 1.1170657e+02 1.0433176e+02 8.9750790e+01 7.6922594e+01 6.0468013e+01 3.6815100e+01 8.6215277e+01 7.9413888e+01 8.3258549e+01 6.6490962e+01 5.3144352e+01 3.6759195e+01 6.9431184e+01 6.9654787e+01 6.1131219e+01 5.7179011e+01 4.6178909e+01 3.6524362e+01 5.3521492e+01 2.9379528e+01 -2.9379528e+01 -5.3521492e+01 2 | 5.9922820e-01 4.8855253e-01 3.9041997e-01 3.1698474e-01 2.2653740e-01 1.4307050e-01 7.3410787e-02 5.4732752e-01 4.6285863e-01 3.9053301e-01 3.2024325e-01 2.2653740e-01 1.5618491e-01 5.3142251e-01 4.5362720e-01 3.9506715e-01 5.7453604e-01 3.9053301e-01 3.1698474e-01 2.6160576e-01 5.3142251e-01 4.6285863e-01 3.9041997e-01 3.3208048e-01 5.4732752e-01 4.8855253e-01 3.8996261e-01 3.1263428e-01 2.2774343e-01 1.4772838e-01 7.5536205e-02 5.9922820e-01 5.1490639e-01 3.9953030e-01 3.0135425e-01 2.2599662e-01 1.5766893e-01 6.3413851e-01 5.3861906e-01 4.1015635e-01 3.1391888e-01 2.6014064e-01 7.0597474e-01 6.5034519e-01 5.2650286e-01 4.0397092e-01 3.3601109e-01 7.8543916e-01 7.4376018e-01 5.0936931e-01 3.9520726e-01 3.1143044e-01 2.2119107e-01 1.4754097e-01 7.5944155e-02 7.2391529e-01 6.1979607e-01 4.9802123e-01 3.8986307e-01 2.9880775e-01 2.2326320e-01 2.5512195e-01 7.1847315e-01 6.1067114e-01 4.9520376e-01 3.8735638e-01 3.0796634e-01 6.9596342e-01 6.0323681e-01 4.9156715e-01 3.8353311e-01 3.2828767e-01 6.8647567e-01 5.9529709e-01 4.8275277e-01 3.8353311e-01 3.0796634e-01 2.2326320e-01 1.4754097e-01 7.5536205e-02 6.7878336e-01 5.9529709e-01 4.9156715e-01 3.8735638e-01 2.9880775e-01 2.2119107e-01 1.5766893e-01 6.8647567e-01 6.0323681e-01 4.9520376e-01 3.8986307e-01 3.1143044e-01 2.6014064e-01 6.9596342e-01 6.1067114e-01 4.9802123e-01 3.9520726e-01 3.3601109e-01 7.1847315e-01 6.1979607e-01 5.0936931e-01 4.0397092e-01 3.1391888e-01 2.2599662e-01 1.4772838e-01 7.3410787e-02 7.2391529e-01 5.2650286e-01 4.1015635e-01 3.0135425e-01 2.2774343e-01 1.5618491e-01 7.4376018e-01 6.5034519e-01 5.3861906e-01 3.9953030e-01 3.1263428e-01 2.6160576e-01 7.8543916e-01 7.0597474e-01 6.3413851e-01 5.1490639e-01 3.8996261e-01 3.3208048e-01 6.6716702e-01 7.2531599e-01 7.2531599e-01 6.6716702e-01 3 | -------------------------------------------------------------------------------- /ptsa/stats/nonparam.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | # global imports 11 | import numpy as np 12 | from scipy.stats import ttest_ind, ttest_1samp, norm 13 | import sys 14 | 15 | def gen_perms(dat, group_var, nperms): 16 | """ 17 | Generate permutations within a group variable, but across conditions. 18 | 19 | There is no need to sort your data as this method will shuffle the 20 | indices properly. 21 | 22 | """ 23 | # grab the unique groups 24 | ugrp = np.unique(dat[group_var]) 25 | 26 | # save indices for each unique group 27 | grpind = {u:np.nonzero(dat[group_var]==u)[0] for u in ugrp} 28 | 29 | # set the base permutation indices for each unique group 30 | p_ind = {u:np.arange(len(grpind[u])) for u in ugrp} 31 | 32 | # start with actual data 33 | perms = [np.arange(len(dat))] 34 | 35 | # loop and shuffle for each perm 36 | for p in xrange(nperms): 37 | # set the starting indices 38 | ind = np.arange(len(dat)) 39 | 40 | # loop over each group 41 | for u in ugrp: 42 | # permute the indices for that group 43 | perm = np.random.permutation(p_ind[u]) 44 | 45 | # insert the permuted group indices into the base index 46 | np.put(ind,grpind[u],grpind[u][perm]) 47 | 48 | # append the shuffled perm to the list of permutations 49 | perms.append(ind) 50 | 51 | # turn the final perms into an array 52 | perms = np.array(perms) 53 | return perms 54 | 55 | 56 | 57 | def ttest_ind_z_one_sided(X,Y): 58 | # do the test 59 | t,p = ttest_ind(X,Y) 60 | 61 | # convert the pvals to one-sided tests based on the t 62 | p = (p/2.)+np.finfo(p.dtype).eps 63 | p[t>0] = 1-p[t>0] 64 | 65 | # convert the p to a z 66 | z = norm.ppf(p) 67 | 68 | return z 69 | 70 | 71 | def permutation_test(X, Y=None, parametric=True, iterations=1000): 72 | """ 73 | Perform a permutation test on paired or non-paired data. 74 | 75 | Observations must be on the first axis. 76 | """ 77 | # see if paired or not and concat data 78 | if Y is None: 79 | paired = True 80 | data = X 81 | nX = len(X) 82 | else: 83 | paired = False 84 | data = np.r_[X,Y] 85 | nX = len(X) 86 | nY = len(Y) 87 | 88 | # currently no non-parametric 89 | if not parametric: 90 | raise NotImplementedError("Currently only parametric stats are supported.") 91 | 92 | # perform stats 93 | z_boot = [] 94 | if paired: 95 | # paired stat 96 | raise NotImplementedError("Currently only non-paired stats are supported.") 97 | # first on actual data 98 | #t,p = ttest_1samp(data) 99 | else: 100 | # non-paired 101 | # first on actual data 102 | z = ttest_ind_z_one_sided(data[:nX],data[nX:]) 103 | 104 | # now on random shuffles 105 | sys.stdout.write('%d: '%iterations) 106 | for i in xrange(iterations): 107 | # shuffle it 108 | sys.stdout.write('%d '%i) 109 | sys.stdout.flush() 110 | np.random.shuffle(data) 111 | z_boot.append(ttest_ind_z_one_sided(data[:nX],data[nX:])) 112 | sys.stdout.write('\n') 113 | sys.stdout.flush() 114 | 115 | # convert z_boot to array 116 | z_boot = np.asarray(z_boot) 117 | 118 | # return those z values 119 | return z, z_boot 120 | -------------------------------------------------------------------------------- /examples/testLocs.dat: -------------------------------------------------------------------------------- 1 | 3.8241827e+01 3.3888206e+01 2.6304955e+01 1.8445913e+01 1.4506407e+01 0.0000000e+00 -3.9057101e+01 2.3656025e+01 1.8856055e+01 1.3994913e+01 0.0000000e+00 -1.4506407e+01 -3.6148575e+01 1.0592588e+01 6.4845245e+00 0.0000000e+00 0.0000000e+00 -6.4845245e+00 -1.3994913e+01 -1.8445913e+01 -3.3348566e+01 -1.0592588e+01 -1.8856055e+01 -2.6304955e+01 -3.4234344e+01 -2.3656025e+01 -3.3888206e+01 -4.2719008e+01 -5.3020925e+01 -5.9786194e+01 -7.5958075e+01 -1.1813245e+02 -3.8241827e+01 -4.7997295e+01 -6.1855338e+01 -7.2089465e+01 -8.7423024e+01 -1.1115585e+02 -5.5602412e+01 -6.9707597e+01 -7.9906235e+01 -9.1252163e+01 -1.1147459e+02 -6.5249879e+01 -7.7780619e+01 -8.8226249e+01 -1.0092497e+02 -1.1104064e+02 -8.3051033e+01 -1.1140607e+02 -1.1933948e+02 -1.2726362e+02 -1.3353332e+02 -1.4911757e+02 1.8000000e+02 -1.2022233e+02 -1.2024175e+02 -1.2880516e+02 -1.3654079e+02 -1.4505509e+02 -1.5702729e+02 1.8000000e+02 -1.3293845e+02 -1.3636909e+02 -1.4687303e+02 -1.5491284e+02 -1.6292934e+02 1.8000000e+02 -1.4987398e+02 -1.5601667e+02 -1.6275088e+02 -1.6793193e+02 1.8000000e+02 -1.6553625e+02 -1.6703238e+02 1.8000000e+02 1.6793193e+02 1.6292934e+02 1.5702729e+02 1.4911757e+02 1.1813245e+02 1.8000000e+02 1.6703238e+02 1.6275088e+02 1.5491284e+02 1.4505509e+02 1.3353332e+02 1.1115585e+02 1.6553625e+02 1.5601667e+02 1.4687303e+02 1.3654079e+02 1.2726362e+02 1.1147459e+02 1.4987398e+02 1.3636909e+02 1.2880516e+02 1.1933948e+02 1.1104064e+02 1.3293845e+02 1.2024175e+02 1.1140607e+02 1.0092497e+02 9.1252163e+01 8.7423024e+01 7.5958075e+01 3.9057101e+01 1.2022233e+02 8.8226249e+01 7.9906235e+01 7.2089465e+01 5.9786194e+01 3.6148575e+01 8.3051033e+01 7.7780619e+01 6.9707597e+01 6.1855338e+01 5.3020925e+01 3.3348566e+01 6.5249879e+01 5.5602412e+01 4.7997295e+01 4.2719008e+01 3.4234344e+01 4.7268792e+01 2.4970025e+01 -2.4970025e+01 -4.7268792e+01 -0.0000000e+00 2 | 5.7435555e-01 4.9211244e-01 4.1054770e-01 3.2178968e-01 2.3895219e-01 1.4198533e-01 8.2493070e-02 5.5329363e-01 4.7702561e-01 3.9148601e-01 3.0904334e-01 2.3895219e-01 1.7013032e-01 5.3636357e-01 4.6312362e-01 3.9400020e-01 5.8014881e-01 4.6312362e-01 3.9148601e-01 3.2178968e-01 2.6367691e-01 5.3636357e-01 4.7702561e-01 4.1054770e-01 3.4964389e-01 5.5329363e-01 4.9211244e-01 4.2583549e-01 3.4473021e-01 2.4517683e-01 1.5524645e-01 9.5851512e-02 5.7435555e-01 5.1224224e-01 4.2206600e-01 3.4627907e-01 2.5844969e-01 1.9592503e-01 6.2724444e-01 5.3625084e-01 4.4802495e-01 3.8089340e-01 2.9590146e-01 7.2583061e-01 6.4143205e-01 5.4563936e-01 4.5766663e-01 3.9773419e-01 7.4288762e-01 5.5534027e-01 4.7832833e-01 3.8447455e-01 2.8655722e-01 1.8228612e-01 1.1740025e-01 7.4541467e-01 6.3296406e-01 5.6050470e-01 4.7202527e-01 3.7322426e-01 2.8383293e-01 2.1310630e-01 7.3277406e-01 6.2251579e-01 5.5416077e-01 4.6633612e-01 3.7536174e-01 2.8985751e-01 7.1178026e-01 6.1282201e-01 5.4332190e-01 4.6577728e-01 4.0135547e-01 7.0192122e-01 6.2854804e-01 5.3094877e-01 4.6577728e-01 3.7536174e-01 2.8383293e-01 1.8228612e-01 9.5851512e-02 6.9093251e-01 6.2854804e-01 5.4332190e-01 4.6633612e-01 3.7322426e-01 2.8655722e-01 1.9592503e-01 7.0192122e-01 6.1282201e-01 5.5416077e-01 4.7202527e-01 3.8447455e-01 2.9590146e-01 7.1178026e-01 6.2251579e-01 5.6050470e-01 4.7832833e-01 3.9773419e-01 7.3277406e-01 6.3296406e-01 5.5534027e-01 4.5766663e-01 3.8089340e-01 2.5844969e-01 1.5524645e-01 8.2493070e-02 7.4541467e-01 5.4563936e-01 4.4802495e-01 3.4627907e-01 2.4517683e-01 1.7013032e-01 7.4288762e-01 6.4143205e-01 5.3625084e-01 4.2206600e-01 3.4473021e-01 2.6367691e-01 7.2583061e-01 6.2724444e-01 5.1224224e-01 4.2583549e-01 3.4964389e-01 6.9491142e-01 7.5332566e-01 7.5332566e-01 6.9491142e-01 0.0000000e+00 3 | -------------------------------------------------------------------------------- /ptsa/plotting/GSN129.dat: -------------------------------------------------------------------------------- 1 | 3.8241827e+01 3.3888206e+01 2.6304955e+01 1.8445913e+01 1.4506407e+01 0.0000000e+00 -3.9057101e+01 2.3656025e+01 1.8856055e+01 1.3994913e+01 0.0000000e+00 -1.4506407e+01 -3.6148575e+01 1.0592588e+01 6.4845245e+00 0.0000000e+00 0.0000000e+00 -6.4845245e+00 -1.3994913e+01 -1.8445913e+01 -3.3348566e+01 -1.0592588e+01 -1.8856055e+01 -2.6304955e+01 -3.4234344e+01 -2.3656025e+01 -3.3888206e+01 -4.2719008e+01 -5.3020925e+01 -5.9786194e+01 -7.5958075e+01 -1.1813245e+02 -3.8241827e+01 -4.7997295e+01 -6.1855338e+01 -7.2089465e+01 -8.7423024e+01 -1.1115585e+02 -5.5602412e+01 -6.9707597e+01 -7.9906235e+01 -9.1252163e+01 -1.1147459e+02 -6.5249879e+01 -7.7780619e+01 -8.8226249e+01 -1.0092497e+02 -1.1104064e+02 -8.3051033e+01 -1.1140607e+02 -1.1933948e+02 -1.2726362e+02 -1.3353332e+02 -1.4911757e+02 1.8000000e+02 -1.2022233e+02 -1.2024175e+02 -1.2880516e+02 -1.3654079e+02 -1.4505509e+02 -1.5702729e+02 1.8000000e+02 -1.3293845e+02 -1.3636909e+02 -1.4687303e+02 -1.5491284e+02 -1.6292934e+02 1.8000000e+02 -1.4987398e+02 -1.5601667e+02 -1.6275088e+02 -1.6793193e+02 1.8000000e+02 -1.6553625e+02 -1.6703238e+02 1.8000000e+02 1.6793193e+02 1.6292934e+02 1.5702729e+02 1.4911757e+02 1.1813245e+02 1.8000000e+02 1.6703238e+02 1.6275088e+02 1.5491284e+02 1.4505509e+02 1.3353332e+02 1.1115585e+02 1.6553625e+02 1.5601667e+02 1.4687303e+02 1.3654079e+02 1.2726362e+02 1.1147459e+02 1.4987398e+02 1.3636909e+02 1.2880516e+02 1.1933948e+02 1.1104064e+02 1.3293845e+02 1.2024175e+02 1.1140607e+02 1.0092497e+02 9.1252163e+01 8.7423024e+01 7.5958075e+01 3.9057101e+01 1.2022233e+02 8.8226249e+01 7.9906235e+01 7.2089465e+01 5.9786194e+01 3.6148575e+01 8.3051033e+01 7.7780619e+01 6.9707597e+01 6.1855338e+01 5.3020925e+01 3.3348566e+01 6.5249879e+01 5.5602412e+01 4.7997295e+01 4.2719008e+01 3.4234344e+01 4.7268792e+01 2.4970025e+01 -2.4970025e+01 -4.7268792e+01 -0.0000000e+00 2 | 5.7435555e-01 4.9211244e-01 4.1054770e-01 3.2178968e-01 2.3895219e-01 1.4198533e-01 8.2493070e-02 5.5329363e-01 4.7702561e-01 3.9148601e-01 3.0904334e-01 2.3895219e-01 1.7013032e-01 5.3636357e-01 4.6312362e-01 3.9400020e-01 5.8014881e-01 4.6312362e-01 3.9148601e-01 3.2178968e-01 2.6367691e-01 5.3636357e-01 4.7702561e-01 4.1054770e-01 3.4964389e-01 5.5329363e-01 4.9211244e-01 4.2583549e-01 3.4473021e-01 2.4517683e-01 1.5524645e-01 9.5851512e-02 5.7435555e-01 5.1224224e-01 4.2206600e-01 3.4627907e-01 2.5844969e-01 1.9592503e-01 6.2724444e-01 5.3625084e-01 4.4802495e-01 3.8089340e-01 2.9590146e-01 7.2583061e-01 6.4143205e-01 5.4563936e-01 4.5766663e-01 3.9773419e-01 7.4288762e-01 5.5534027e-01 4.7832833e-01 3.8447455e-01 2.8655722e-01 1.8228612e-01 1.1740025e-01 7.4541467e-01 6.3296406e-01 5.6050470e-01 4.7202527e-01 3.7322426e-01 2.8383293e-01 2.1310630e-01 7.3277406e-01 6.2251579e-01 5.5416077e-01 4.6633612e-01 3.7536174e-01 2.8985751e-01 7.1178026e-01 6.1282201e-01 5.4332190e-01 4.6577728e-01 4.0135547e-01 7.0192122e-01 6.2854804e-01 5.3094877e-01 4.6577728e-01 3.7536174e-01 2.8383293e-01 1.8228612e-01 9.5851512e-02 6.9093251e-01 6.2854804e-01 5.4332190e-01 4.6633612e-01 3.7322426e-01 2.8655722e-01 1.9592503e-01 7.0192122e-01 6.1282201e-01 5.5416077e-01 4.7202527e-01 3.8447455e-01 2.9590146e-01 7.1178026e-01 6.2251579e-01 5.6050470e-01 4.7832833e-01 3.9773419e-01 7.3277406e-01 6.3296406e-01 5.5534027e-01 4.5766663e-01 3.8089340e-01 2.5844969e-01 1.5524645e-01 8.2493070e-02 7.4541467e-01 5.4563936e-01 4.4802495e-01 3.4627907e-01 2.4517683e-01 1.7013032e-01 7.4288762e-01 6.4143205e-01 5.3625084e-01 4.2206600e-01 3.4473021e-01 2.6367691e-01 7.2583061e-01 6.2724444e-01 5.1224224e-01 4.2583549e-01 3.4964389e-01 6.9491142e-01 7.5332566e-01 7.5332566e-01 6.9491142e-01 0.0000000e+00 3 | -------------------------------------------------------------------------------- /ptsa/data/datawrapper.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | class DataWrapper(object): 11 | """ 12 | Base class to provide interface to timeseries data. 13 | """ 14 | def _load_data(self,channel,eventOffsets,dur_samp,offset_samp): 15 | raise NotImplementedError 16 | 17 | def get_event_data(self,channels,eventOffsets, 18 | dur,offset,buf, 19 | resampledRate=None, 20 | filtFreq=None,filtType='stop',filtOrder=4, 21 | keepBuffer=False): 22 | """ 23 | Return an dictionary containing data for the specified channel 24 | in the form [events,duration]. 25 | 26 | Parameters 27 | ---------- 28 | channel: Channel to load data from 29 | eventOffsets: Array of event offsets (in samples) into the data, 30 | specifying each event time 31 | Duration: Duration in ms of the data to return. 32 | Offset: Amount in ms to offset that data around the event. 33 | Buffer: Extra buffer to add when doing filtering to avoid edge effects. 34 | resampledRate: New samplerate to resample the data to after loading. 35 | filtFreq: Frequency specification for filter (depends on the filter type. 36 | filtType: Type of filter to run on the data. 37 | filtOrder: Order of the filter. 38 | keepBuffer: Whether to keep the buffer when returning the data. 39 | """ 40 | 41 | # set event durations from rate 42 | # get the samplesize in ms 43 | samplesize = 1./self.samplerate 44 | # get the number of buffer samples 45 | buf_samp = int(np.ceil(buf/samplesize)) 46 | # calculate the offset samples that contains the desired offsetMS 47 | offset_samp = int(np.ceil((np.abs(offset)-samplesize*.5)/samplesize)*np.sign(offset)) 48 | 49 | # finally get the duration necessary to cover the desired span 50 | dur_samp = int(np.ceil((dur+offset - samplesize*.5)/samplesize)) - offset_samp + 1 51 | 52 | # add in the buffer 53 | dur_samp += 2*buf_samp 54 | offset_samp -= buf_samp 55 | 56 | # load the timeseries (this must be implemented by subclasses) 57 | eventdata = self._load_data(channel,eventOffsets,dur_samp,offset_samp) 58 | 59 | # calc the time range 60 | sampStart = offset_samp*samplesize 61 | sampEnd = sampStart + (dur_samp-1)*samplesize 62 | timeRange = np.linspace(sampStart,sampEnd,dur_samp) 63 | 64 | # make it a timeseries 65 | # if isinstance(eventInfo,TsEvents): 66 | # dims = [Dim('event', eventInfo.data, 'event'), 67 | # Dim('time',timeRange)] 68 | # else: 69 | # dims = [Dim('eventOffsets', eventOffsets, 'samples'), 70 | # Dim('time',timeRange)] 71 | dims = [Dim(eventOffsets,'eventOffsets'), 72 | Dim(timeRange,'time')] 73 | eventdata = TimeSeries(np.asarray(eventdata), 74 | dims, 75 | tdim='time', 76 | self.samplerate) 77 | 78 | return eventdata 79 | 80 | 81 | # filter if desired 82 | if not(filtFreq is None): 83 | # filter that data 84 | eventdata = eventdata.filter(filtFreq,filtType=filtType,order=filtOrder) 85 | 86 | # resample if desired 87 | if not(resampledRate is None) and \ 88 | not(resampledRate == eventdata.samplerate): 89 | # resample the data 90 | eventdata = eventdata.resampled(resampledRate) 91 | 92 | # remove the buffer and set the time range 93 | if buf > 0 and not(keepBuffer): 94 | # remove the buffer 95 | eventdata = eventdata.removeBuf(buf) 96 | 97 | # return the timeseries 98 | return eventdata 99 | -------------------------------------------------------------------------------- /docs/sphinxexts/numpydoc.py: -------------------------------------------------------------------------------- 1 | """ 2 | ======== 3 | numpydoc 4 | ======== 5 | 6 | Sphinx extension that handles docstrings in the Numpy standard format. [1] 7 | 8 | It will: 9 | 10 | - Convert Parameters etc. sections to field lists. 11 | - Convert See Also section to a See also entry. 12 | - Renumber references. 13 | - Extract the signature from the docstring, if it can't be determined otherwise. 14 | 15 | .. [1] http://projects.scipy.org/numpy/wiki/CodingStyleGuidelines#docstring-standard 16 | 17 | """ 18 | 19 | import os, re, pydoc 20 | from docscrape_sphinx import get_doc_object, SphinxDocString 21 | import inspect 22 | 23 | def mangle_docstrings(app, what, name, obj, options, lines, 24 | reference_offset=[0]): 25 | if what == 'module': 26 | # Strip top title 27 | title_re = re.compile(r'^\s*[#*=]{4,}\n[a-z0-9 -]+\n[#*=]{4,}\s*', 28 | re.I|re.S) 29 | lines[:] = title_re.sub('', "\n".join(lines)).split("\n") 30 | else: 31 | doc = get_doc_object(obj, what, "\n".join(lines)) 32 | lines[:] = str(doc).split("\n") 33 | 34 | if app.config.numpydoc_edit_link and hasattr(obj, '__name__') and \ 35 | obj.__name__: 36 | if hasattr(obj, '__module__'): 37 | v = dict(full_name="%s.%s" % (obj.__module__, obj.__name__)) 38 | else: 39 | v = dict(full_name=obj.__name__) 40 | lines += ['', '.. htmlonly::', ''] 41 | lines += [' %s' % x for x in 42 | (app.config.numpydoc_edit_link % v).split("\n")] 43 | 44 | # replace reference numbers so that there are no duplicates 45 | references = [] 46 | for l in lines: 47 | l = l.strip() 48 | if l.startswith('.. ['): 49 | try: 50 | references.append(int(l[len('.. ['):l.index(']')])) 51 | except ValueError: 52 | print "WARNING: invalid reference in %s docstring" % name 53 | 54 | # Start renaming from the biggest number, otherwise we may 55 | # overwrite references. 56 | references.sort() 57 | if references: 58 | for i, line in enumerate(lines): 59 | for r in references: 60 | new_r = reference_offset[0] + r 61 | lines[i] = lines[i].replace('[%d]_' % r, 62 | '[%d]_' % new_r) 63 | lines[i] = lines[i].replace('.. [%d]' % r, 64 | '.. [%d]' % new_r) 65 | 66 | reference_offset[0] += len(references) 67 | 68 | def mangle_signature(app, what, name, obj, options, sig, retann): 69 | # Do not try to inspect classes that don't define `__init__` 70 | if (inspect.isclass(obj) and 71 | 'initializes x; see ' in pydoc.getdoc(obj.__init__)): 72 | return '', '' 73 | 74 | if not (callable(obj) or hasattr(obj, '__argspec_is_invalid_')): return 75 | if not hasattr(obj, '__doc__'): return 76 | 77 | doc = SphinxDocString(pydoc.getdoc(obj)) 78 | if doc['Signature']: 79 | sig = re.sub("^[^(]*", "", doc['Signature']) 80 | return sig, '' 81 | 82 | def initialize(app): 83 | try: 84 | app.connect('autodoc-process-signature', mangle_signature) 85 | except: 86 | monkeypatch_sphinx_ext_autodoc() 87 | 88 | def setup(app, get_doc_object_=get_doc_object): 89 | global get_doc_object 90 | get_doc_object = get_doc_object_ 91 | 92 | app.connect('autodoc-process-docstring', mangle_docstrings) 93 | app.connect('builder-inited', initialize) 94 | app.add_config_value('numpydoc_edit_link', None, True) 95 | 96 | #------------------------------------------------------------------------------ 97 | # Monkeypatch sphinx.ext.autodoc to accept argspecless autodocs (Sphinx < 0.5) 98 | #------------------------------------------------------------------------------ 99 | 100 | def monkeypatch_sphinx_ext_autodoc(): 101 | global _original_format_signature 102 | import sphinx.ext.autodoc 103 | 104 | if sphinx.ext.autodoc.format_signature is our_format_signature: 105 | return 106 | 107 | print "[numpydoc] Monkeypatching sphinx.ext.autodoc ..." 108 | _original_format_signature = sphinx.ext.autodoc.format_signature 109 | sphinx.ext.autodoc.format_signature = our_format_signature 110 | 111 | def our_format_signature(what, obj): 112 | r = mangle_signature(None, what, None, obj, None, None, None) 113 | if r is not None: 114 | return r[0] 115 | else: 116 | return _original_format_signature(what, obj) 117 | -------------------------------------------------------------------------------- /docs/devel/gitwash/patching.rst: -------------------------------------------------------------------------------- 1 | ================ 2 | Making a patch 3 | ================ 4 | 5 | You've discovered a bug or something else you want to change 6 | in `ptsa`_ .. |emdash| excellent! 7 | 8 | You've worked out a way to fix it |emdash| even better! 9 | 10 | You want to tell us about it |emdash| best of all! 11 | 12 | The easiest way is to make a *patch* or set of patches. Here 13 | we explain how. Making a patch is the simplest and quickest, 14 | but if you're going to be doing anything more than simple 15 | quick things, please consider following the 16 | :ref:`git-development` model instead. 17 | 18 | .. _making-patches: 19 | 20 | Making patches 21 | ============== 22 | 23 | Overview 24 | -------- 25 | 26 | :: 27 | 28 | # tell git who you are 29 | git config --global user.email you@yourdomain.example.com 30 | git config --global user.name "Your Name Comes Here" 31 | # get the repository if you don't have it 32 | git clone git://github.com/compmem/ptsa.git 33 | # make a branch for your patching 34 | cd ptsa 35 | git branch the-fix-im-thinking-of 36 | git checkout the-fix-im-thinking-of 37 | # hack, hack, hack 38 | # Tell git about any new files you've made 39 | git add somewhere/tests/test_my_bug.py 40 | # commit work in progress as you go 41 | git commit -am 'BF - added tests for Funny bug' 42 | # hack hack, hack 43 | git commit -am 'BF - added fix for Funny bug' 44 | # make the patch files 45 | git format-patch -M -C master 46 | 47 | Then, send the generated patch files to the `ptsa 48 | mailing list`_ |emdash| where we will thank you warmly. 49 | 50 | In detail 51 | --------- 52 | 53 | #. Tell git who you are so it can label the commits you've 54 | made:: 55 | 56 | git config --global user.email you@yourdomain.example.com 57 | git config --global user.name "Your Name Comes Here" 58 | 59 | #. If you don't already have one, clone a copy of the 60 | `ptsa`_ repository:: 61 | 62 | git clone git://github.com/compmem/ptsa.git 63 | cd ptsa 64 | 65 | #. Make a 'feature branch'. This will be where you work on 66 | your bug fix. It's nice and safe and leaves you with 67 | access to an unmodified copy of the code in the main 68 | branch:: 69 | 70 | git branch the-fix-im-thinking-of 71 | git checkout the-fix-im-thinking-of 72 | 73 | #. Do some edits, and commit them as you go:: 74 | 75 | # hack, hack, hack 76 | # Tell git about any new files you've made 77 | git add somewhere/tests/test_my_bug.py 78 | # commit work in progress as you go 79 | git commit -am 'BF - added tests for Funny bug' 80 | # hack hack, hack 81 | git commit -am 'BF - added fix for Funny bug' 82 | 83 | Note the ``-am`` options to ``commit``. The ``m`` flag just 84 | signals that you're going to type a message on the command 85 | line. The ``a`` flag |emdash| you can just take on faith |emdash| 86 | or see `why the -a flag?`_. 87 | 88 | #. When you have finished, check you have committed all your 89 | changes:: 90 | 91 | git status 92 | 93 | #. Finally, make your commits into patches. You want all the 94 | commits since you branched from the ``master`` branch:: 95 | 96 | git format-patch -M -C master 97 | 98 | You will now have several files named for the commits:: 99 | 100 | 0001-BF-added-tests-for-Funny-bug.patch 101 | 0002-BF-added-fix-for-Funny-bug.patch 102 | 103 | Send these files to the `ptsa mailing list`_. 104 | 105 | When you are done, to switch back to the main copy of the 106 | code, just return to the ``master`` branch:: 107 | 108 | git checkout master 109 | 110 | Moving from patching to development 111 | =================================== 112 | 113 | If you find you have done some patches, and you have one or 114 | more feature branches, you will probably want to switch to 115 | development mode. You can do this with the repository you 116 | have. 117 | 118 | Fork the `ptsa`_ repository on github |emdash| :ref:`forking`. 119 | Then:: 120 | 121 | # checkout and refresh master branch from main repo 122 | git checkout master 123 | git pull origin master 124 | # rename pointer to main repository to 'upstream' 125 | git remote rename origin upstream 126 | # point your repo to default read / write to your fork on github 127 | git remote add origin git@github.com:your-user-name/ptsa.git 128 | # push up any branches you've made and want to keep 129 | git push origin the-fix-im-thinking-of 130 | 131 | Then you can, if you want, follow the 132 | :ref:`development-workflow`. 133 | 134 | .. include:: links.inc 135 | -------------------------------------------------------------------------------- /docs/index.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst -*- 2 | .. ex: set sts=4 ts=4 sw=4 et tw=79: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### 4 | # 5 | # See COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### 9 | 10 | **ptsa** (pronounced pizza) is a Python_ module for performing time series 11 | analysis. Although it is specifically designed with neural data in 12 | mind (EEG, MEG, fMRI, etc. ...), the code should be applicable to almost 13 | any type of time series. 14 | 15 | .. _Python: http://www.python.org 16 | 17 | **ptsa** stands for **P**\ ython\ **T**\ ime **S**\ eries **A**\ nalysis. 18 | 19 | 20 | .. News 21 | .. ==== 22 | 23 | .. None yet... 24 | 25 | Documentation 26 | ============= 27 | 28 | * :ref:`Main data structures in ptsa `: The documentation 29 | for AttrArray, Dim, and DimArray -- powerful data structures based 30 | on NumPy ndarrays. 31 | 32 | * :ref:`Analysis of time series data `: The documentation 33 | of TimeSeries, a subclass of DimArray for storing and analyzing data 34 | with a time dimension. 35 | 36 | .. * `Installation Instructions`: to come... 37 | * :ref:`Developer Guidelines ` (information for people 38 | contributing code) 39 | 40 | .. * `API Reference`_ (comprehensive and up-to-date information about the details 41 | .. of the implementation) 42 | * :ref:`genindex` (access by keywords) 43 | * :ref:`search` (online and offline full-text search) 44 | 45 | .. _API Reference: api/index.html 46 | 47 | .. * `PTSA Manual (PDF)`_ (all documentation except for developer guidelines 48 | .. and API reference) 49 | .. * `Developer Guidelines (PDF)`_ 50 | 51 | .. _Main data structures in PTSA: PTSA-Manual.pdf 52 | .. _PTSA Manual (PDF): PTSA-Manual.pdf 53 | .. _Developer Guidelines (PDF): PTSA-DevGuide.pdf 54 | 55 | 56 | License 57 | ======= 58 | 59 | PTSA is free-software and covered by the `GPLv3 License`_. 60 | This applies to all source code, documentation, examples and snippets inside 61 | the source distribution (including this website). 62 | 63 | .. Please see the 64 | .. :ref:`appendix of the manual ` for the copyright statement and the 65 | .. full text of the license. 66 | 67 | .. _GPLv3 License: http://www.gnu.org/licenses/gpl-3.0.html 68 | .. .. _appendix of the manual: manual.html#license 69 | 70 | 71 | 72 | .. Download 73 | .. ======== 74 | 75 | .. Binary packages 76 | .. --------------- 77 | 78 | 79 | 80 | .. Source code 81 | .. ----------- 82 | 83 | .. Source code tarballs of PTSA releases are available from the `download 84 | .. area`_. Alternatively, one can also download a tarball of the latest 85 | .. development snapshot_ (i.e. the current state of the *master* branch of the 86 | .. PTSA source code repository). 87 | 88 | .. To get access to both the full PTSA history and the latest 89 | .. development code, the PTSA Git_ repository is publicly available. To view the 90 | .. repository, please point your webbrowser to gitweb: 91 | .. http://tbd 92 | 93 | .. To clone (aka checkout) the PTSA repository simply do: 94 | 95 | .. :: 96 | 97 | .. git clone git://tbd 98 | 99 | .. After a short while you will have a `ptsa` directory below your current 100 | .. working directory, that contains the PTSA repository. 101 | 102 | .. More detailed instructions on :ref:`installation requirements ` 103 | .. and on how to :ref:`build PTSA from source ` are provided 104 | .. in the manual. 105 | 106 | 107 | .. .. _download area: http://tbd 108 | .. .. _Git: http://git.or.cz/ 109 | .. .. _snapshot: http://tbd 110 | 111 | 112 | .. Support 113 | .. ======= 114 | 115 | .. If you have problems installing the software or questions about usage, 116 | .. documentation or something else related to PTSA, you can post to the PTSA 117 | .. mailing list: 118 | 119 | .. :Mailing list: tbd@tbd [subscription_, 120 | .. archive_] 121 | 122 | .. All users should subscribe to the mailing list. PTSA is still a young project 123 | .. that is under heavy development. Significant modifications (hopefully 124 | .. improvements) are very likely to happen frequently. The mailing list is the 125 | .. preferred way to announce such changes. The mailing list archive can also be 126 | .. searched using the *mailing list archive search* located in the sidebar of the 127 | .. PTSA home page. 128 | 129 | .. .. _subscription: http://tbd 130 | .. .. _archive: http://tbd 131 | 132 | 133 | 134 | .. Publications 135 | .. ============ 136 | 137 | .. .. .. include:: publications.txt 138 | 139 | 140 | .. Authors & Contributors 141 | .. ====================== 142 | 143 | .. .. .. include:: authors.txt 144 | 145 | 146 | .. Similar or Related Projects 147 | .. =========================== 148 | -------------------------------------------------------------------------------- /docs/sphinxexts/docscrape_sphinx.py: -------------------------------------------------------------------------------- 1 | import re, inspect, textwrap, pydoc 2 | from docscrape import NumpyDocString, FunctionDoc, ClassDoc 3 | 4 | class SphinxDocString(NumpyDocString): 5 | # string conversion routines 6 | def _str_header(self, name, symbol='`'): 7 | return ['.. rubric:: ' + name, ''] 8 | 9 | def _str_field_list(self, name): 10 | return [':' + name + ':'] 11 | 12 | def _str_indent(self, doc, indent=4): 13 | out = [] 14 | for line in doc: 15 | out += [' '*indent + line] 16 | return out 17 | 18 | def _str_signature(self): 19 | return [''] 20 | if self['Signature']: 21 | return ['``%s``' % self['Signature']] + [''] 22 | else: 23 | return [''] 24 | 25 | def _str_summary(self): 26 | return self['Summary'] + [''] 27 | 28 | def _str_extended_summary(self): 29 | return self['Extended Summary'] + [''] 30 | 31 | def _str_param_list(self, name): 32 | out = [] 33 | if self[name]: 34 | out += self._str_field_list(name) 35 | out += [''] 36 | for param,param_type,desc in self[name]: 37 | out += self._str_indent(['**%s** : %s' % (param.strip(), 38 | param_type)]) 39 | out += [''] 40 | out += self._str_indent(desc,8) 41 | out += [''] 42 | return out 43 | 44 | def _str_section(self, name): 45 | out = [] 46 | if self[name]: 47 | out += self._str_header(name) 48 | out += [''] 49 | content = textwrap.dedent("\n".join(self[name])).split("\n") 50 | out += content 51 | out += [''] 52 | return out 53 | 54 | def _str_see_also(self, func_role): 55 | out = [] 56 | if self['See Also']: 57 | see_also = super(SphinxDocString, self)._str_see_also(func_role) 58 | out = ['.. seealso::', ''] 59 | out += self._str_indent(see_also[2:]) 60 | return out 61 | 62 | def _str_warnings(self): 63 | out = [] 64 | if self['Warnings']: 65 | out = ['.. warning::', ''] 66 | out += self._str_indent(self['Warnings']) 67 | return out 68 | 69 | def _str_index(self): 70 | idx = self['index'] 71 | out = [] 72 | if len(idx) == 0: 73 | return out 74 | 75 | out += ['.. index:: %s' % idx.get('default','')] 76 | for section, references in idx.iteritems(): 77 | if section == 'default': 78 | continue 79 | elif section == 'refguide': 80 | out += [' single: %s' % (', '.join(references))] 81 | else: 82 | out += [' %s: %s' % (section, ','.join(references))] 83 | return out 84 | 85 | def _str_references(self): 86 | out = [] 87 | if self['References']: 88 | out += self._str_header('References') 89 | if isinstance(self['References'], str): 90 | self['References'] = [self['References']] 91 | out.extend(self['References']) 92 | out += [''] 93 | return out 94 | 95 | def __str__(self, indent=0, func_role="obj"): 96 | out = [] 97 | out += self._str_signature() 98 | out += self._str_index() + [''] 99 | out += self._str_summary() 100 | out += self._str_extended_summary() 101 | for param_list in ('Parameters', 'Attributes', 'Methods', 102 | 'Returns','Raises'): 103 | out += self._str_param_list(param_list) 104 | out += self._str_warnings() 105 | out += self._str_see_also(func_role) 106 | out += self._str_section('Notes') 107 | out += self._str_references() 108 | out += self._str_section('Examples') 109 | out = self._str_indent(out,indent) 110 | return '\n'.join(out) 111 | 112 | class SphinxFunctionDoc(SphinxDocString, FunctionDoc): 113 | pass 114 | 115 | class SphinxClassDoc(SphinxDocString, ClassDoc): 116 | pass 117 | 118 | def get_doc_object(obj, what=None, doc=None): 119 | if what is None: 120 | if inspect.isclass(obj): 121 | what = 'class' 122 | elif inspect.ismodule(obj): 123 | what = 'module' 124 | elif callable(obj): 125 | what = 'function' 126 | else: 127 | what = 'object' 128 | if what == 'class': 129 | return SphinxClassDoc(obj, '', func_doc=SphinxFunctionDoc, doc=doc) 130 | elif what in ('function', 'method'): 131 | return SphinxFunctionDoc(obj, '', doc=doc) 132 | else: 133 | if doc is None: 134 | doc = pydoc.getdoc(obj) 135 | return SphinxDocString(doc) 136 | 137 | -------------------------------------------------------------------------------- /docs/api/epydoc.conf: -------------------------------------------------------------------------------- 1 | [epydoc] # Epydoc section marker (required by ConfigParser) 2 | 3 | # The list of objects to document. Objects can be named using 4 | # dotted names, module filenames, or package directory names. 5 | # Alases for this option include "objects" and "values". 6 | modules: ptsa 7 | 8 | # The type of output that should be generated. Should be one 9 | # of: html, text, latex, dvi, ps, pdf. 10 | output: html 11 | 12 | # The path to the output directory. May be relative or absolute. 13 | target: build/html/api 14 | 15 | # An integer indicating how verbose epydoc should be. The default 16 | # value is 0; negative values will supress warnings and errors; 17 | # positive values will give more verbose output. 18 | verbosity: 0 19 | 20 | # A boolean value indicating that Epydoc should show a tracaback 21 | # in case of unexpected error. By default don't show tracebacks 22 | debug: 0 23 | 24 | # If True, don't try to use colors or cursor control when doing 25 | # textual output. The default False assumes a rich text prompt 26 | simple-term: 0 27 | 28 | 29 | ### Generation options 30 | 31 | # The default markup language for docstrings, for modules that do 32 | # not define __docformat__. Defaults to epytext. 33 | docformat: restructuredtext 34 | 35 | # Whether or not parsing should be used to examine objects. 36 | parse: yes 37 | 38 | # Whether or not introspection should be used to examine objects. 39 | introspect: no 40 | 41 | # Don't examine in any way the modules whose dotted name match this 42 | # regular expression pattern. 43 | #exclude 44 | 45 | # Don't perform introspection on the modules whose dotted name match this 46 | # regular expression pattern. 47 | #exclude-introspect 48 | 49 | # Don't perform parsing on the modules whose dotted name match this 50 | # regular expression pattern. 51 | #exclude-parse 52 | 53 | # The format for showing inheritance objects. 54 | # It should be one of: 'grouped', 'listed', 'included'. 55 | inheritance: listed 56 | 57 | # Whether or not to inclue private variables. (Even if included, 58 | # private variables will be hidden by default.) 59 | private: yes 60 | 61 | # Whether or not to list each module's imports. 62 | imports: yes 63 | 64 | # Whether or not to include syntax highlighted source code in 65 | # the output (HTML only). 66 | sourcecode: yes 67 | 68 | # Whether or not to includea a page with Epydoc log, containing 69 | # effective option at the time of generation and the reported logs. 70 | include-log: no 71 | 72 | 73 | ### Output options 74 | 75 | # The documented project's name. 76 | name: ptsa: Python Time Series Analysis 77 | 78 | # The CSS stylesheet for HTML output. Can be the name of a builtin 79 | # stylesheet, or the name of a file. 80 | css: white 81 | 82 | # The documented project's URL. 83 | url: http://ptsa.sourceforge.net 84 | 85 | # HTML code for the project link in the navigation bar. If left 86 | # unspecified, the project link will be generated based on the 87 | # project's name and URL. 88 | #link: My Cool Project 89 | 90 | # The "top" page for the documentation. Can be a URL, the name 91 | # of a module or class, or one of the special names "trees.html", 92 | # "indices.html", or "help.html" 93 | #top: os.path 94 | 95 | # An alternative help file. The named file should contain the 96 | # body of an HTML file; navigation bars will be added to it. 97 | #help: my_helpfile.html 98 | 99 | # Whether or not to include a frames-based table of contents. 100 | frames: yes 101 | 102 | # Whether each class should be listed in its own section when 103 | # generating LaTeX or PDF output. 104 | separate-classes: no 105 | 106 | 107 | ### API linking options 108 | 109 | # Define a new API document. A new interpreted text role 110 | # will be created 111 | #external-api: epydoc 112 | 113 | # Use the records in this file to resolve objects in the API named NAME. 114 | #external-api-file: epydoc:api-objects.txt 115 | 116 | # Use this URL prefix to configure the string returned for external API. 117 | #external-api-root: epydoc:http://epydoc.sourceforge.net/api 118 | 119 | 120 | ### Graph options 121 | 122 | # The list of graph types that should be automatically included 123 | # in the output. Graphs are generated using the Graphviz "dot" 124 | # executable. Graph types include: "classtree", "callgraph", 125 | # "umlclass". Use "all" to include all graph types 126 | graph: all 127 | 128 | # The path to the Graphviz "dot" executable, used to generate 129 | # graphs. 130 | dotpath: /usr/bin/dot 131 | 132 | # The name of one or more pstat files (generated by the profile 133 | # or hotshot module). These are used to generate call graphs. 134 | pstat: build/main.pstats 135 | 136 | # Specify the font used to generate Graphviz graphs. 137 | # (e.g., helvetica or times). 138 | graph-font: Helvetica 139 | 140 | # Specify the font size used to generate Graphviz graphs. 141 | graph-font-size: 10 142 | 143 | 144 | ### Return value options 145 | 146 | # The condition upon which Epydoc should exit with a non-zero 147 | # exit status. Possible values are error, warning, docstring_warning 148 | #fail-on: error 149 | 150 | -------------------------------------------------------------------------------- /ptsa/contributed.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | import numpy as N 11 | from scipy import unwrap 12 | import sys 13 | 14 | #from filter import decimate 15 | #from helper import reshapeTo2D,reshapeFrom2D 16 | from ptsa.data import TimeSeries,Dim,Dims,DimData 17 | from ptsa import wavelet 18 | import scipy.stats as stats 19 | 20 | 21 | 22 | import pdb 23 | 24 | def tsZtransPow(freqs,tseries,zTrans=True,log=True,width=5,resample=None, 25 | keepBuffer=False,verbose=False,to_return='both',freqDimName='freq'): 26 | """ 27 | Calculate z-transformed power (and optionally phase) on a 28 | TimeSeries, returning new TimeSeries instances. 29 | """ 30 | if (to_return != 'both') and (to_return != 'pow'): 31 | raise ValueError("to_return must be \'pow\'or \'both\' to specify\ 32 | whether power only, or power and phase are returned. Only power is\ 33 | z-tranformed; if only phase and/or untransformed power is of interest,\ 34 | the function tsPhasePow() should be called directly. Invalid value for\ 35 | to_return: %s" % to_return) 36 | 37 | # Get the power (and optionally phase) for tseries: 38 | if to_return == 'both': 39 | phaseAll,powerAll = wavelet.tsPhasePow(freqs=freqs,tseries=tseries,width=width, 40 | resample=resample,keepBuffer=keepBuffer, 41 | verbose=verbose,to_return=to_return, 42 | freqDimName=freqDimName) 43 | else: 44 | powerAll = wavelet.tsPhasePow(freqs=freqs,tseries=tseries,width=width, 45 | resample=resample,keepBuffer=keepBuffer, 46 | verbose=verbose,to_return=to_return, 47 | freqDimName=freqDimName) 48 | 49 | if log: # Ensure power is positive and log10 transform: 50 | powerAll.data[powerAll.data<=0] = N.finfo(powerAll.data.dtype).eps 51 | powerAll.data = N.log10(powerAll.data) 52 | 53 | # Get zmean and zstd (DimData objects with a frequency dimension each): 54 | if isinstance(zTrans,tuple): # zmean and zstd are passed as zTrans 55 | if ((len(zTrans) != 2) or (not isinstance(zTrans[0],DimData)) or 56 | (not isinstance(zTrans[1],DimData)) or (zTrans[0].ndim!=1) or 57 | (zTrans[1].ndim!=1) or (zTrans[0].dims.names[0]!=freqDimName) or 58 | (zTrans[1].dims.names[0]!=freqDimName) or 59 | (zTrans[0][freqDimName]!=powerAll[freqDimName]).any() or 60 | (zTrans[1][freqDimName]!=powerAll[freqDimName]).any()): 61 | raise ValueError("The ztrans tuple needs to conform to the\ 62 | following format: (zmean,zstd). Where zmean and zstd are both\ 63 | instances of DimData each with a single frequency dimension.\ 64 | The name of the dimension must be as specified in freqDimName and\ 65 | the same frequency values as those in tseries must be used.\ 66 | Invalid value: %s" % str(zTrans)) 67 | elif zTrans[1].data.min() <= 0: 68 | raise ValueError("The zstd must be postive: zTrans[1].data.min() =\ 69 | %f" % zTrans[1].data.min()) 70 | zmean = zTrans[0] 71 | zstd = zTrans[1] 72 | else: # zmean and zstd must be calculated 73 | if isinstance(zTrans,TimeSeries): 74 | # Get the power for the provided baseline time series: 75 | zpow = wavelet.tsPhasePow(freqs=freqs,tseries=zTrans,width=width, 76 | resample=resample,keepBuffer=False,verbose=verbose, 77 | to_return='pow',freqDimName=freqDimName) 78 | if log: 79 | zpow.data[zpow.data<=0] = N.finfo(zpow.data.dtype).eps 80 | zpow.data = N.log10(zpow.data) 81 | else: 82 | # Copy the power for the entire time series: 83 | zpow = powerAll.copy() 84 | zpow.removeBuffer() 85 | # Now calculate zmean and zstd from zpow: 86 | # (using stats.std will calculate the unbiased std) 87 | if log: 88 | zmean = zpow.margin(freqDimName,stats.mean,unit="mean log10 power") 89 | zstd = zpow.margin(freqDimName,stats.std,unit="std of log10 power") 90 | else: 91 | zmean = zpow.margin(freqDimName,stats.mean,unit="mean power") 92 | zstd = zpow.margin(freqDimName,stats.std,unit="std of power") 93 | 94 | # For the transformation {zmean,zstd}.data need to have a compatible shape. 95 | # Calculate the dimensions with which to reshape (all 1 except for the 96 | # frequency dimension): 97 | reshapedims = N.ones(len(powerAll.shape)) 98 | reshapedims[powerAll.dim(freqDimName)] = -1 99 | 100 | # z transform using reshapedims to make the arrays compatible: 101 | powerAll.data = powerAll.data - zmean.data.reshape(reshapedims) 102 | powerAll.data = powerAll.data / zstd.data.reshape(reshapedims) 103 | 104 | if to_return == 'both': 105 | return phaseAll,powerAll,(zmean,zstd) 106 | else: 107 | return powerAll,(zmean,zstd) 108 | 109 | -------------------------------------------------------------------------------- /docs/devel/gitwash/configure_git.rst: -------------------------------------------------------------------------------- 1 | .. _configure-git: 2 | 3 | =============== 4 | Configure git 5 | =============== 6 | 7 | .. _git-config-basic: 8 | 9 | Overview 10 | ======== 11 | 12 | Your personal git configurations are saved in the ``.gitconfig`` file in 13 | your home directory. 14 | 15 | Here is an example ``.gitconfig`` file:: 16 | 17 | [user] 18 | name = Your Name 19 | email = you@yourdomain.example.com 20 | 21 | [alias] 22 | ci = commit -a 23 | co = checkout 24 | st = status 25 | stat = status 26 | br = branch 27 | wdiff = diff --color-words 28 | 29 | [core] 30 | editor = vim 31 | 32 | [merge] 33 | summary = true 34 | 35 | You can edit this file directly or you can use the ``git config --global`` 36 | command:: 37 | 38 | git config --global user.name "Your Name" 39 | git config --global user.email you@yourdomain.example.com 40 | git config --global alias.ci "commit -a" 41 | git config --global alias.co checkout 42 | git config --global alias.st "status -a" 43 | git config --global alias.stat "status -a" 44 | git config --global alias.br branch 45 | git config --global alias.wdiff "diff --color-words" 46 | git config --global core.editor vim 47 | git config --global merge.summary true 48 | 49 | To set up on another computer, you can copy your ``~/.gitconfig`` file, 50 | or run the commands above. 51 | 52 | In detail 53 | ========= 54 | 55 | user.name and user.email 56 | ------------------------ 57 | 58 | It is good practice to tell git_ who you are, for labeling any changes 59 | you make to the code. The simplest way to do this is from the command 60 | line:: 61 | 62 | git config --global user.name "Your Name" 63 | git config --global user.email you@yourdomain.example.com 64 | 65 | This will write the settings into your git configuration file, which 66 | should now contain a user section with your name and email:: 67 | 68 | [user] 69 | name = Your Name 70 | email = you@yourdomain.example.com 71 | 72 | Of course you'll need to replace ``Your Name`` and ``you@yourdomain.example.com`` 73 | with your actual name and email address. 74 | 75 | Aliases 76 | ------- 77 | 78 | You might well benefit from some aliases to common commands. 79 | 80 | For example, you might well want to be able to shorten ``git checkout`` 81 | to ``git co``. Or you may want to alias ``git diff --color-words`` 82 | (which gives a nicely formatted output of the diff) to ``git wdiff`` 83 | 84 | The following ``git config --global`` commands:: 85 | 86 | git config --global alias.ci "commit -a" 87 | git config --global alias.co checkout 88 | git config --global alias.st "status -a" 89 | git config --global alias.stat "status -a" 90 | git config --global alias.br branch 91 | git config --global alias.wdiff "diff --color-words" 92 | 93 | will create an ``alias`` section in your ``.gitconfig`` file with contents 94 | like this:: 95 | 96 | [alias] 97 | ci = commit -a 98 | co = checkout 99 | st = status -a 100 | stat = status -a 101 | br = branch 102 | wdiff = diff --color-words 103 | 104 | Editor 105 | ------ 106 | 107 | You may also want to make sure that your editor of choice is used :: 108 | 109 | git config --global core.editor vim 110 | 111 | Merging 112 | ------- 113 | 114 | To enforce summaries when doing merges (``~/.gitconfig`` file again):: 115 | 116 | [merge] 117 | log = true 118 | 119 | Or from the command line:: 120 | 121 | git config --global merge.log true 122 | 123 | .. _fancy-log: 124 | 125 | Fancy log output 126 | ---------------- 127 | 128 | This is a very nice alias to get a fancy log output; it should go in the 129 | ``alias`` section of your ``.gitconfig`` file:: 130 | 131 | lg = log --graph --pretty=format:'%Cred%h%Creset -%C(yellow)%d%Creset %s %Cgreen(%cr) %C(bold blue)[%an]%Creset' --abbrev-commit --date=relative 132 | 133 | You use the alias with:: 134 | 135 | git lg 136 | 137 | and it gives graph / text output something like this (but with color!):: 138 | 139 | * 6d8e1ee - (HEAD, origin/my-fancy-feature, my-fancy-feature) NF - a fancy file (45 minutes ago) [Matthew Brett] 140 | * d304a73 - (origin/placeholder, placeholder) Merge pull request #48 from hhuuggoo/master (2 weeks ago) [Jonathan Terhorst] 141 | |\ 142 | | * 4aff2a8 - fixed bug 35, and added a test in test_bugfixes (2 weeks ago) [Hugo] 143 | |/ 144 | * a7ff2e5 - Added notes on discussion/proposal made during Data Array Summit. (2 weeks ago) [Corran Webster] 145 | * 68f6752 - Initial implimentation of AxisIndexer - uses 'index_by' which needs to be changed to a call on an Axes object - this is all very sketchy right now. (2 weeks ago) [Corr 146 | * 376adbd - Merge pull request #46 from terhorst/master (2 weeks ago) [Jonathan Terhorst] 147 | |\ 148 | | * b605216 - updated joshu example to current api (3 weeks ago) [Jonathan Terhorst] 149 | | * 2e991e8 - add testing for outer ufunc (3 weeks ago) [Jonathan Terhorst] 150 | | * 7beda5a - prevent axis from throwing an exception if testing equality with non-axis object (3 weeks ago) [Jonathan Terhorst] 151 | | * 65af65e - convert unit testing code to assertions (3 weeks ago) [Jonathan Terhorst] 152 | | * 956fbab - Merge remote-tracking branch 'upstream/master' (3 weeks ago) [Jonathan Terhorst] 153 | | |\ 154 | | |/ 155 | 156 | Thanks to Yury V. Zaytsev for posting it. 157 | 158 | .. include:: links.inc 159 | -------------------------------------------------------------------------------- /ptsa/_arraytools.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | """ 4 | Functions for acting on a axis of an array. 5 | """ 6 | 7 | # From dev version of SciPy for pulling in the new filtfilt 8 | 9 | import numpy as np 10 | 11 | 12 | def axis_slice(a, start=None, stop=None, step=None, axis=-1): 13 | """Take a slice along axis 'axis' from 'a'. 14 | 15 | Parameters 16 | ---------- 17 | a : numpy.ndarray 18 | The array to be sliced. 19 | start, stop, step : int or None 20 | The slice parameters. 21 | axis : int 22 | The axis of `a` to be sliced. 23 | 24 | Examples 25 | -------- 26 | >>> a = array([[1, 2, 3], [4, 5, 6], [7, 8, 9]]) 27 | >>> axis_slice(a, start=0, stop=1, axis=1) 28 | array([[1], 29 | [4], 30 | [7]]) 31 | >>> axis_slice(a, start=1, axis=0) 32 | array([[4, 5, 6], 33 | [7, 8, 9]]) 34 | 35 | Notes 36 | ----- 37 | The keyword arguments start, stop and step are used by calling 38 | slice(start, stop, step). This implies axis_slice() does not 39 | handle its arguments the exacty the same as indexing. To select 40 | a single index k, for example, use 41 | axis_slice(a, start=k, stop=k+1) 42 | In this case, the length of the axis 'axis' in the result will 43 | be 1; the trivial dimension is not removed. (Use numpy.squeeze() 44 | to remove trivial axes.) 45 | """ 46 | a_slice = [slice(None)] * a.ndim 47 | a_slice[axis] = slice(start, stop, step) 48 | b = a[a_slice] 49 | return b 50 | 51 | 52 | def axis_reverse(a, axis=-1): 53 | """Reverse the 1-d slices of `a` along axis `axis`. 54 | 55 | Returns axis_slice(a, step=-1, axis=axis). 56 | """ 57 | return axis_slice(a, step=-1, axis=axis) 58 | 59 | 60 | def odd_ext(x, n, axis=-1): 61 | """Generate a new ndarray by making an odd extension of x along an axis. 62 | 63 | Parameters 64 | ---------- 65 | x : ndarray 66 | The array to be extended. 67 | n : int 68 | The number of elements by which to extend x at each end of the axis. 69 | axis : int 70 | The axis along which to extend x. Default is -1. 71 | 72 | Examples 73 | -------- 74 | >>> a = array([[1.0,2.0,3.0,4.0,5.0], [0.0, 1.0, 4.0, 9.0, 16.0]]) 75 | >>> _odd_ext(a, 2) 76 | array([[-1., 0., 1., 2., 3., 4., 5., 6., 7.], 77 | [-4., -1, 0., 1., 4., 9., 16., 23., 28.]]) 78 | """ 79 | if n < 1: 80 | return x 81 | if n > x.shape[axis] - 1: 82 | raise ValueError(("The extension length n (%d) is too big. " + 83 | "It must not exceed x.shape[axis]-1, which is %d.") 84 | % (n, x.shape[axis] - 1)) 85 | left_end = axis_slice(x, start=0, stop=1, axis=axis) 86 | left_ext = axis_slice(x, start=n, stop=0, step=-1, axis=axis) 87 | right_end = axis_slice(x, start=-1, axis=axis) 88 | right_ext = axis_slice(x, start=-2, stop=-(n + 2), step=-1, axis=axis) 89 | ext = np.concatenate((2 * left_end - left_ext, 90 | x, 91 | 2 * right_end - right_ext), 92 | axis=axis) 93 | return ext 94 | 95 | 96 | def even_ext(x, n, axis=-1): 97 | """Create an ndarray that is an even extension of x along an axis. 98 | 99 | Parameters 100 | ---------- 101 | x : ndarray 102 | The array to be extended. 103 | n : int 104 | The number of elements by which to extend x at each end of the axis. 105 | axis : int 106 | The axis along which to extend x. Default is -1. 107 | 108 | Examples 109 | -------- 110 | >>> a = array([[1.0,2.0,3.0,4.0,5.0], [0.0, 1.0, 4.0, 9.0, 16.0]]) 111 | >>> _even_ext(a, 2) 112 | array([[ 3., 2., 1., 2., 3., 4., 5., 4., 3.], 113 | [ 4., 1., 0., 1., 4., 9., 16., 9., 4.]]) 114 | """ 115 | if n < 1: 116 | return x 117 | if n > x.shape[axis] - 1: 118 | raise ValueError(("The extension length n (%d) is too big. " + 119 | "It must not exceed x.shape[axis]-1, which is %d.") 120 | % (n, x.shape[axis] - 1)) 121 | left_ext = axis_slice(x, start=n, stop=0, step=-1, axis=axis) 122 | right_ext = axis_slice(x, start=-2, stop=-(n + 2), step=-1, axis=axis) 123 | ext = np.concatenate((left_ext, 124 | x, 125 | right_ext), 126 | axis=axis) 127 | return ext 128 | 129 | 130 | def const_ext(x, n, axis=-1): 131 | """Create an ndarray that is a constant extension of x along an axis. 132 | 133 | The extension repeats the values at the first and last element of 134 | the axis. 135 | 136 | Parameters 137 | ---------- 138 | x : ndarray 139 | The array to be extended. 140 | n : int 141 | The number of elements by which to extend x at each end of the axis. 142 | axis : int 143 | The axis along which to extend x. Default is -1. 144 | 145 | Examples 146 | -------- 147 | >>> a = array([[1.0,2.0,3.0,4.0,5.0], [0.0, 1.0, 4.0, 9.0, 16.0]]) 148 | >>> _const_ext(a, 2) 149 | array([[ 1., 1., 1., 2., 3., 4., 5., 5., 5.], 150 | [ 0., 0., 0., 1., 4., 9., 16., 16., 16.]]) 151 | """ 152 | if n < 1: 153 | return x 154 | left_end = axis_slice(x, start=0, stop=1, axis=axis) 155 | ones_shape = [1] * x.ndim 156 | ones_shape[axis] = n 157 | ones = np.ones(ones_shape, dtype=x.dtype) 158 | left_ext = ones * left_end 159 | right_end = axis_slice(x, start=-1, axis=axis) 160 | right_ext = ones * right_end 161 | ext = np.concatenate((left_ext, 162 | x, 163 | right_ext), 164 | axis=axis) 165 | return ext 166 | -------------------------------------------------------------------------------- /ptsa/plotting/HCGSN128.sfp: -------------------------------------------------------------------------------- 1 | FidNz 0 9.071585155 -2.359754454 2 | FidT9 -6.711765 0.040402876 -3.251600355 3 | FidT10 6.711765 0.040402876 -3.251600355 4 | E1 5.787677636 5.520863216 -2.577468644 5 | E2 5.291804727 6.709097557 0.307434896 6 | E3 3.864122447 7.63424051 3.067770143 7 | E4 2.868837559 7.145708546 4.989564557 8 | E5 1.479340453 5.68662139 6.812878187 9 | E6 0 3.806770224 7.891304964 10 | E7 -1.223800252 1.558864431 8.44043914 11 | E8 4.221901505 7.998817387 -1.354789681 12 | E9 2.695405558 8.884820317 1.088308144 13 | E10 1.830882336 8.708839134 3.18709115 14 | E11 0 7.96264703 5.044718001 15 | E12 -1.479340453 5.68662139 6.812878187 16 | E13 -2.435870762 3.254307219 7.608766206 17 | E14 1.270447661 9.479016328 -0.947183306 18 | E15 0 9.087440894 1.333345013 19 | E16 0 9.076490798 3.105438474 20 | E17 0 9.271139705 -2.211516434 21 | E18 -1.830882336 8.708839134 3.18709115 22 | E19 -2.868837559 7.145708546 4.989564557 23 | E20 -3.825797111 5.121648995 5.942844877 24 | E21 -1.270447661 9.479016328 -0.947183306 25 | E22 -2.695405558 8.884820317 1.088308144 26 | E23 -3.864122447 7.63424051 3.067770143 27 | E24 -4.459387187 6.021159964 4.365321482 28 | E25 -4.221901505 7.998817387 -1.354789681 29 | E26 -5.291804727 6.709097557 0.307434896 30 | E27 -5.682547954 5.453384344 2.836565436 31 | E28 -5.546670402 4.157847823 4.627615703 32 | E29 -4.762196763 2.697832099 6.297663028 33 | E30 -3.695490968 0.960411022 7.627828134 34 | E31 -1.955187826 -0.684381878 8.564858511 35 | E32 -5.787677636 5.520863216 -2.577468644 36 | E33 -6.399087198 4.127248875 -0.356852241 37 | E34 -6.823959684 2.968422112 2.430080351 38 | E35 -6.414469893 1.490027747 4.741794544 39 | E36 -5.47913021 0.284948655 6.38332782 40 | E37 -3.909902609 -1.519049882 7.764134929 41 | E38 -6.550732888 3.611543152 -3.353155926 42 | E39 -7.191620108 0.850096251 -0.882936903 43 | E40 -7.391919265 0.032151584 2.143634599 44 | E41 -6.905051715 -0.800953972 4.600056501 45 | E42 -5.956055073 -2.338984312 6.00361353 46 | E43 -6.518995129 2.417299399 -5.253637073 47 | E44 -6.840717711 1.278489412 -3.5553823 48 | E45 -7.304625099 -1.866238006 -0.629182006 49 | E46 -7.312517928 -2.298574078 2.385298838 50 | E47 -6.737313764 -3.011819533 4.178390203 51 | E48 -5.934584124 2.22697797 -7.934360742 52 | E49 -6.298127313 0.41663451 -6.069156425 53 | E50 -6.78248072 -4.023512045 -0.232191092 54 | E51 -6.558030032 -4.667036048 2.749989597 55 | E52 -5.831241498 -4.494821698 4.955347697 56 | E53 -4.193518856 -4.037020083 6.982920038 57 | E54 -2.270752074 -3.414835627 8.204556551 58 | E55 0 -2.138343513 8.791875902 59 | E56 -6.174969392 -2.458138877 -5.637380998 60 | E57 -6.580438308 -3.739554155 -2.991084431 61 | E58 -6.034746843 -5.755782196 0.051843011 62 | E59 -5.204501802 -6.437833018 2.984444293 63 | E60 -4.116929504 -6.061561438 5.365757296 64 | E61 -2.344914884 -5.481057427 7.057748614 65 | E62 0 -6.676694032 6.465208258 66 | E63 -5.333266171 -4.302240169 -5.613509789 67 | E64 -5.404091392 -5.870302681 -2.891640039 68 | E65 -4.645302298 -7.280552408 0.130139701 69 | E66 -3.608293164 -7.665487704 3.129931648 70 | E67 -1.844644417 -7.354417376 5.224001733 71 | E68 -3.784983913 -6.401014415 -5.260040689 72 | E69 -3.528848027 -7.603010836 -2.818037873 73 | E70 -2.738838019 -8.607966849 0.239368223 74 | E71 -1.404967401 -8.437486994 3.277284901 75 | E72 0 -7.829896826 4.687622229 76 | E73 -1.929652202 -7.497197868 -5.136777648 77 | E74 -1.125731192 -8.455208629 -2.632832329 78 | E75 0 -8.996686498 0.487952047 79 | E76 1.404967401 -8.437486994 3.277284901 80 | E77 1.844644417 -7.354417376 5.224001733 81 | E78 2.344914884 -5.481057427 7.057748614 82 | E79 2.270752074 -3.414835627 8.204556551 83 | E80 1.955187826 -0.684381878 8.564858511 84 | E81 0 -7.85891896 -4.945387489 85 | E82 1.125731192 -8.455208629 -2.632832329 86 | E83 2.738838019 -8.607966849 0.239368223 87 | E84 3.608293164 -7.665487704 3.129931648 88 | E85 4.116929504 -6.061561438 5.365757296 89 | E86 4.193518856 -4.037020083 6.982920038 90 | E87 3.909902609 -1.519049882 7.764134929 91 | E88 1.929652202 -7.497197868 -5.136777648 92 | E89 3.528848027 -7.603010836 -2.818037873 93 | E90 4.645302298 -7.280552408 0.130139701 94 | E91 5.204501802 -6.437833018 2.984444293 95 | E92 5.831241498 -4.494821698 4.955347697 96 | E93 5.956055073 -2.338984312 6.00361353 97 | E94 3.784983913 -6.401014415 -5.260040689 98 | E95 5.404091392 -5.870302681 -2.891640039 99 | E96 6.034746843 -5.755782196 0.051843011 100 | E97 6.558030032 -4.667036048 2.749989597 101 | E98 6.737313764 -3.011819533 4.178390203 102 | E99 5.333266171 -4.302240169 -5.613509789 103 | E100 6.580438308 -3.739554155 -2.991084431 104 | E101 6.78248072 -4.023512045 -0.232191092 105 | E102 7.312517928 -2.298574078 2.385298838 106 | E103 6.905051715 -0.800953972 4.600056501 107 | E104 5.47913021 0.284948655 6.38332782 108 | E105 3.695490968 0.960411022 7.627828134 109 | E106 1.223800252 1.558864431 8.44043914 110 | E107 6.174969392 -2.458138877 -5.637380998 111 | E108 7.304625099 -1.866238006 -0.629182006 112 | E109 7.391919265 0.032151584 2.143634599 113 | E110 6.414469893 1.490027747 4.741794544 114 | E111 4.762196763 2.697832099 6.297663028 115 | E112 2.435870762 3.254307219 7.608766206 116 | E113 6.298127313 0.41663451 -6.069156425 117 | E114 6.840717711 1.278489412 -3.5553823 118 | E115 7.191620108 0.850096251 -0.882936903 119 | E116 6.823959684 2.968422112 2.430080351 120 | E117 5.546670402 4.157847823 4.627615703 121 | E118 3.825797111 5.121648995 5.942844877 122 | E119 5.934584124 2.22697797 -7.934360742 123 | E120 6.518995129 2.417299399 -5.253637073 124 | E121 6.550732888 3.611543152 -3.353155926 125 | E122 6.399087198 4.127248875 -0.356852241 126 | E123 5.682547954 5.453384344 2.836565436 127 | E124 4.459387187 6.021159964 4.365321482 128 | E125 6.118458137 4.523870113 -4.409174427 129 | E126 3.743504949 6.649204911 -6.530243068 130 | E127 -3.743504949 6.649204911 -6.530243068 131 | E128 -6.118458137 4.523870113 -4.409174427 132 | 133 | -------------------------------------------------------------------------------- /ptsa/data/tests/test_timeseries.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | import numpy as np 11 | import re 12 | from numpy.testing import TestCase 13 | 14 | from ptsa.data import Dim,DimArray,TimeSeries 15 | from ptsa import filt 16 | 17 | # from numpy.testing import NumpyTest, TestCase 18 | 19 | # class test_Template(TestCase): 20 | 21 | # def setUp(self): pass 22 | # #print "TestCase initialization..." 23 | 24 | # def test_foo(self): pass 25 | # #print "testing foo" 26 | 27 | # def test_bar(self): pass 28 | # #print "testing bar" 29 | 30 | 31 | # if __name__ == '__main__': 32 | # NumpyTest.main() 33 | 34 | # I don't know why I can't just include this 35 | #from testdata import TestData 36 | 37 | class TestData: 38 | def __init__(self): 39 | # create 10 Hz sine waves at 200 and 50 Hz 4000ms long 40 | numSecs = 4. 41 | numPoints = int(numSecs*200.) 42 | Hz = 10 43 | d200_10 = np.sin(np.arange(numPoints,dtype=np.float)*2*np.pi* 44 | Hz*numSecs/numPoints) 45 | Hz = 5 46 | d200_5 = np.sin(np.arange(numPoints,dtype=np.float)*2*np.pi* 47 | Hz*numSecs/numPoints) 48 | self.dat200 = np.array([d200_10,d200_5]) 49 | # calc the time range 50 | offset = -200 51 | duration = numPoints 52 | samplesize = 1./200. 53 | sampStart = offset*samplesize 54 | sampEnd = sampStart + (duration-1)*samplesize 55 | timeRange = np.linspace(sampStart,sampEnd,duration) 56 | self.dims200 = [Dim(np.arange(self.dat200.shape[0]),'channel'), 57 | Dim(timeRange,'time',unit='ms')] 58 | 59 | numSecs = 4. 60 | numPoints = int(numSecs*50.) 61 | Hz = 10 62 | d50_10 = np.sin(np.arange(numPoints,dtype=np.float)*2*np.pi* 63 | Hz*numSecs/numPoints) 64 | Hz = 5 65 | d50_5 = np.sin(np.arange(numPoints,dtype=np.float)*2*np.pi* 66 | Hz*numSecs/numPoints) 67 | self.dat50 = np.array([d50_10,d50_5]) 68 | # calc the time range in MS 69 | offset = -50 70 | duration = numPoints 71 | samplesize = 1000./50. 72 | sampStart = offset*samplesize 73 | sampEnd = sampStart + (duration-1)*samplesize 74 | timeRange = np.linspace(sampStart,sampEnd,duration) 75 | self.dims50 = [Dim(np.arange(self.dat50.shape[0]),'channel'), 76 | Dim(timeRange,'time',unit='ms')] 77 | 78 | # test TimeSeries 79 | class test_TimeSeries(TestCase): 80 | def setUp(self): 81 | td = TestData() 82 | self.dat200 = td.dat200 83 | self.dims200 = td.dims200 84 | self.dat50 = td.dat50 85 | self.dims50 = td.dims50 86 | def test_init(self): 87 | # init a TimeSeries with all combos of options and verify that 88 | # the attributes are correct 89 | 90 | # fewest params 91 | ts = TimeSeries(self.dat200,'time',200,dims = self.dims200) 92 | np.testing.assert_equal(ts[:], self.dat200[:]) 93 | self.assertEquals(ts.shape, self.dat200.shape) 94 | self.assertEquals(ts.taxis, len(self.dat200.shape)-1) 95 | self.assertEquals(ts.samplerate,200) 96 | self.assertRaises(ValueError,TimeSeries,self.dat200, 97 | 'bla',200,dims=self.dims200) 98 | self.assertRaises(ValueError,TimeSeries,self.dat200, 99 | 'time',-200,dims=self.dims200) 100 | 101 | 102 | def test_remove_buffer(self): 103 | buf = 200 104 | numsamp = 4*200 105 | ts = TimeSeries(self.dat200,'time',200, dims=self.dims200) 106 | ts_nobuff = ts.remove_buffer(1) 107 | self.assertEquals(ts_nobuff.shape[ts_nobuff.taxis],numsamp-2*buf) 108 | self.assertEquals(len(ts_nobuff['time']),numsamp-2*buf) 109 | ts_nobuff = ts.remove_buffer((1,1)) 110 | self.assertEquals(ts_nobuff.shape[ts_nobuff.taxis],numsamp-2*buf) 111 | self.assertEquals(len(ts_nobuff['time']),numsamp-2*buf) 112 | # make sure that negative durations throw exception 113 | self.assertRaises(ValueError,ts.remove_buffer,-1) 114 | 115 | def tst_setattr(self): 116 | ts = TimeSeries(self.dat200,'time',200,dims=self.dims200) 117 | self.assertRaises(ValueError,ts.__setattr__,'tdim','bla') 118 | self.assertRaises(ValueError,ts.__setattr__,'samplerate',-1) 119 | 120 | def test_filter(self): 121 | samplerate = 200 122 | filtType='stop' 123 | freqRange = [10,20] 124 | order = 4 125 | ts = TimeSeries(self.dat200,'time',samplerate,dims=self.dims200) 126 | ts_filt = ts.filtered(freqRange, filtType, order) 127 | test = filt.buttfilt(self.dat200,freqRange,samplerate,filtType, 128 | order,axis=ts.taxis) 129 | np.testing.assert_array_almost_equal(ts_filt[:],test[:],decimal=6) 130 | 131 | def test_resample(self): 132 | ts200 = TimeSeries(self.dat200,'time',200,dims=self.dims200) 133 | ts50 = TimeSeries( 134 | self.dat50,'time',50,dims=self.dims50).remove_buffer(1.0) 135 | ts50_200 = ts200.resampled(50).remove_buffer(1.0) 136 | np.testing.assert_equal(ts50_200.shape[:],ts50.shape[:]) 137 | #print type(ts200['time']) 138 | #print type(ts50['time']) 139 | np.testing.assert_array_almost_equal( 140 | ts50_200['time']*1000,ts50['time'],decimal=6) 141 | np.testing.assert_array_almost_equal(ts50_200[:],ts50[:],decimal=6) 142 | 143 | def test_remove_tdim(self): 144 | ts200 = TimeSeries(self.dat200,'time',200,dims=self.dims200) 145 | self.assertTrue(isinstance(ts200.mean('time'),DimArray)) 146 | 147 | 148 | -------------------------------------------------------------------------------- /docs/devguide.rst: -------------------------------------------------------------------------------- 1 | .. -*- mode: rst; fill-column: 79 -*- 2 | .. ex: set sts=4 ts=4 sw=4 et tw=79: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### 4 | # 5 | # See COPYING file distributed along with the PyMVPA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### 9 | 10 | .. _devguide: 11 | 12 | ************************* 13 | PTSA Developer Guidelines 14 | ************************* 15 | 16 | 17 | Documentation 18 | ============= 19 | 20 | Documentation of the code and supplementary material (such as this file) 21 | should be done in reST_ (reStructuredText) light markup language. See `Demo 22 | `__ or a 23 | `Cheatsheet `__ for a 24 | quick demo. 25 | 26 | 27 | Code Documentation 28 | ------------------ 29 | PTSA follows the `NumPy/SciPy documentation guidelines`_. 30 | Example docstrings are discussed in the guidlines and available as separate 31 | files from the `SciPy`_ website: 32 | 33 | * `example.py`_ 34 | * `EXAMPLE_DOCSTRING.txt`_ 35 | 36 | .. _NumPy/SciPy documentation guidelines: http://projects.scipy.org/scipy/numpy/wiki/CodingStyleGuidelines 37 | 38 | .. _SciPy: http://scipy.org 39 | 40 | .. _example.py: http://projects.scipy.org/scipy/numpy/browser/trunk/doc/example.py 41 | 42 | .. _EXAMPLE_DOCSTRING.txt: http://projects.scipy.org/scipy/numpy/browser/trunk/doc/EXAMPLE_DOCSTRING.txt 43 | 44 | 45 | Code Formatting 46 | =============== 47 | 48 | pylint 49 | Code should be conformant with Pylint_ driven by config located at 50 | `doc/misc/pylintrc `__. It assumes camelback notation 51 | (classes start with capitals, functions with lowercase) and indentation 52 | using 4 spaces (i.e. no tabs) Variables are low-case and can have up to 2 53 | _s. To engage, use 1 of 3 methods: 54 | 55 | - place it in *~/.pylintrc* for user-wide installation 56 | - use within a call to pylint:: 57 | 58 | pylint --rcfile=$PWD/doc/misc/pylintrc 59 | 60 | - export environment variable from mvpa sources top directory:: 61 | 62 | export PYLINTRC=$PWD/doc/misc/pylintrc 63 | 64 | module docstring 65 | Each module should start with a docstring describing the module. 66 | 67 | notes 68 | Use following keywords will be caught by pylint to provide a 69 | summary of what yet to be done in the given file 70 | 71 | FIXME 72 | something which needs fixing (sooner than later) 73 | TODO 74 | future plan (i.e. later than sooner) 75 | XXX 76 | some concern/question 77 | YYY 78 | comment/answer to above mentioned XXX concern 79 | WiP 80 | Work in Progress: API and functionality might rapidly change 81 | 82 | 83 | 84 | Coding Conventions 85 | ================== 86 | 87 | __repr__ 88 | most of the classes should provide meaningful and concise summary 89 | over their identity (name + parameters + some summary over results 90 | if any) 91 | 92 | 93 | Tests 94 | ===== 95 | 96 | * Every more or less "interesting" bugfix should be accompanied by a 97 | unittest which might help to prevent it in the future refactoring 98 | * Every new feature should have a unittest 99 | 100 | 101 | 102 | Git Repository 103 | ============== 104 | 105 | Layout 106 | ------ 107 | 108 | The repository is structured by a number of branches. Each developer should 109 | prefix his/her branches with a unique string plus '/' (maybe initials or 110 | similar). Currently there are: 111 | 112 | :per: Per B. Sederberg 113 | :ctw: Christoph T. Weidemann 114 | 115 | The main release branch is called *master*. This is a merge-only branch. 116 | Features finished or updated by some developer are merged from the 117 | corresponding branch into *master*. At a certain point the current state of 118 | *master* is tagged -- a release is done. 119 | 120 | Only usable feature should end-up in *master*. Ideally *master* should be 121 | releasable at all times. Something must not be merged into master if *any* 122 | unit test fails. 123 | 124 | Additionally, there are packaging branches. They are labeled after the package 125 | target (e.g. *debian* for a Debian package). Releases are merged into the 126 | packaging branches, packaging get updated if necessary and the branch gets 127 | tagged when a package version is released. Maintenance (as well as backport) 128 | releases should be done under *maint/codename.flavor* (e.g. *maint/lenny*, 129 | *maint/lenny.security*, *maint/sarge.bpo*). 130 | 131 | 132 | Commits 133 | ------- 134 | 135 | Please prefix all commit summaries with one (or more) of the following labels. 136 | This should help others to easily classify the commits into meaningful 137 | categories: 138 | 139 | * *BF* : bug fix 140 | * *RF* : refactoring 141 | * *NF* : new feature 142 | * *OPT* : optimization 143 | * *BK* : breaks something and/or tests fail 144 | * *PL* : making pylint happier 145 | * *DOC*: for all kinds of documentation related commits 146 | 147 | .. _reST: http://docutils.sourceforge.net/docs/ref/rst/restructuredtext.html 148 | .. _EmacsreST: http://docutils.sourceforge.net/docs/user/emacs.html 149 | .. _Pylint: http://packages.debian.org/unstable/python/pylint 150 | 151 | 152 | Merges 153 | ------ 154 | 155 | For easy tracking of what changes were absorbed during merge, we 156 | advice to enable merge summary within git: 157 | 158 | git-config merge.summary true 159 | 160 | 161 | Changelog 162 | ========= 163 | 164 | The PTSA changelog is located in the toplevel directory of the source tree 165 | in the `Changelog` file. The content of this file should be formated as 166 | restructured text to make it easy to put it into manual appendix and on the 167 | website. 168 | 169 | This changelog should neither replicate the VCS commit log nor the 170 | distribution packaging changelogs (e.g. debian/changelog). It should be 171 | focused on the user perspective and is intended to list rather macroscopic 172 | and/or important changes to the module, like feature additions or bugfixes in 173 | the algorithms with implications to the performance or validity of results. 174 | 175 | It may list references to 3rd party bugtrackers, in case the reported bugs 176 | match the criteria listed above. 177 | 178 | Changelog entries should be tagged with the name of the developer(s) (mainly) 179 | involved in the modification -- initials are sufficient for people 180 | contributing regularly. 181 | 182 | Changelog entries should be added whenever something is ready to be merged 183 | into the master branch, not necessarily with a release already approaching. 184 | -------------------------------------------------------------------------------- /ptsa/data/align.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | 11 | # methods for aligning events and eeg data 12 | 13 | import os 14 | import csv 15 | import numpy as np 16 | from basewrapper import BaseWrapper 17 | 18 | 19 | def times_to_offsets(eeg_times, beh_times, ev_times, blen=10, tolerance=.0015): 20 | """ 21 | Fit line to EEG pulse times and behavioral pulse times and apply to event times. 22 | 23 | """ 24 | start_ind = None 25 | # get starting ind for the beh_times 26 | for i in range(len(beh_times)//2): 27 | if np.abs(np.diff(eeg_times[:blen]) - 28 | np.diff(beh_times[i:blen+i])).sum()<(tolerance*blen): 29 | start_ind = i 30 | break 31 | if start_ind is None: 32 | raise ValueError('No starting point found') 33 | 34 | # iterate, makeing sure each diff is within tolerance 35 | etimes = [] 36 | btimes = [] 37 | j = 0 38 | for i,bt in enumerate(beh_times[start_ind:]): 39 | if (i == 0) or (np.abs((bt-btimes[-1])-(eeg_times[j]-etimes[-1]))<(tolerance)): 40 | # looks good, so append 41 | etimes.append(eeg_times[j]) 42 | btimes.append(bt) 43 | # increment eeg times counter 44 | j += 1 45 | #print i, 46 | else: 47 | # no good, so say we're skipping 48 | print '.', #(np.abs((bt-btimes[-1])-(eeg_times[j]-etimes[-1]))), 49 | print 50 | # convert to arrays 51 | etimes = np.array(etimes) 52 | btimes = np.array(btimes) 53 | print "Num. matching: ", len(etimes) #,len(btimes) 54 | #plot(etimes,btimes,'o') 55 | 56 | # fit a line to convert between behavioral and eeg times 57 | A = np.vstack([btimes, np.ones(len(btimes))]).T 58 | m, c = np.linalg.lstsq(A, etimes)[0] 59 | print "Slope and Offset: ", m ,c 60 | 61 | # convert to get eoffsets 62 | eoffsets = ev_times*m + c 63 | 64 | return eoffsets 65 | 66 | 67 | def load_pyepl_eeg_pulses(logfile, event_label='UP'): 68 | """ 69 | Load and process the default eeg log file from PyEPL. This will 70 | extract only when the pulses turned on (not off), which is what is 71 | usually saved by EEG systems. 72 | """ 73 | # open and read each line 74 | reader = csv.reader(open(logfile,'rU'),dialect=csv.excel_tab) 75 | pulses = [] 76 | for row in reader: 77 | if row[2] == event_label: 78 | pulses.append(long(row[0])) 79 | return np.asarray(pulses) 80 | 81 | def find_needle_in_haystack(needle, haystack, maxdiff): 82 | """ 83 | Look for a matching subsequence in a long sequence. 84 | """ 85 | nlen = len(needle) 86 | found = False 87 | for i in range(len(haystack)-nlen): 88 | if (haystack[i:i+nlen] - needle).max() < maxdiff: 89 | found = True 90 | break 91 | if not found: 92 | i = None 93 | return i 94 | 95 | def times_to_offsets_old(eeg_times, eeg_offsets, beh_times, 96 | samplerate, window=100, thresh_ms=10): 97 | """ 98 | Fit a line to the eeg times to offsets conversion and then apply 99 | it to the provided behavioral event times. 100 | """ 101 | pulse_ms = eeg_times 102 | annot_ms = eeg_offsets 103 | 104 | # pick beginning and end (needle in haystack) 105 | s_ind = None 106 | e_ind = None 107 | for i in xrange(len(annot_ms)-window): 108 | s_ind = find_needle_in_haystack(np.diff(annot_ms[i:i+window]), 109 | np.diff(pulse_ms),thresh_ms) 110 | if not s_ind is None: 111 | break 112 | if s_ind is None: 113 | raise ValueError("Unable to find a start window.") # get better error here 114 | start_annot_vals = annot_ms[i:i+window] 115 | start_pulse_vals = pulse_ms[s_ind:s_ind+window] 116 | 117 | for i in xrange(len(annot_ms)-window): 118 | e_ind = find_needle_in_haystack(np.diff(annot_ms[::-1][i:i+window]), 119 | np.diff(pulse_ms[::-1]),thresh_ms) 120 | if not e_ind is None: 121 | break 122 | if e_ind is None: 123 | raise ValueError("Unable to find a end window.") # get better error here 124 | 125 | # correct the end ind 126 | e_ind = len(pulse_ms) - e_ind - window 127 | 128 | i = len(annot_ms) - i - window 129 | end_annot_vals = annot_ms[i:i+window] 130 | end_pulse_vals = pulse_ms[e_ind:e_ind+window] 131 | 132 | # fit line with regression 133 | x = np.r_[start_pulse_vals,end_pulse_vals] 134 | y = np.r_[start_annot_vals,end_annot_vals] 135 | m,c = np.linalg.lstsq(np.vstack([x-x[0],np.ones(len(x))]).T, y)[0] 136 | c = c - x[0]*m 137 | 138 | # calc the event time in offsets 139 | #samplerate = w.samplerate 140 | #offsets = np.int64(np.round((m*beh_times + c)*samplerate/1000.)) 141 | 142 | # return seconds 143 | offsets = (m*beh_times + c)/1000. 144 | 145 | return offsets 146 | 147 | 148 | def align_pyepl(wrappedfile, eeglog, events, annot_id='S255', 149 | channel_for_sr=0, 150 | window=100, thresh_ms=10, 151 | event_time_id='event_time', eeg_event_label='UP'): 152 | """ 153 | Take an Events instance and add esrc and eoffset, aligning the 154 | events to the data in the supplied wrapped file (i.e., you must 155 | wrap your data with something like EDFWrapper or HDF5Wrapper.) 156 | This extracts the pulse information from the data's annotations 157 | and matches it up with the pyepl eeg.eeglog file passed in. 158 | 159 | It returns the updated Events. 160 | """ 161 | 162 | if(not isinstance(wrappedfile,BaseWrapper)): 163 | raise ValueError('BaseWrapper instance required!') 164 | 165 | # point to wrapper 166 | w = wrappedfile 167 | 168 | # load clean pyepl eeg log 169 | pulse_ms = load_pyepl_eeg_pulses(eeglog, event_label=eeg_event_label) 170 | 171 | # load annotations from edf 172 | annot = w.annotations 173 | 174 | # convert seconds to ms for annot_ms 175 | annot_ms = annot[annot['annotations']==annot_id]['onsets'] * 1000 176 | 177 | # get the offsets 178 | offsets = times_to_offsets(pulse_ms, annot_ms, events[event_time_id], 179 | w.samplerate, window=window, thresh_ms=thresh_ms) 180 | 181 | # add esrc and eoffset to the Events instance 182 | events = events.add_fields(esrc=np.repeat(w,len(events)), 183 | eoffset=offsets) 184 | 185 | # return the updated events 186 | return events 187 | 188 | -------------------------------------------------------------------------------- /docs/conf.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | # 3 | # PTSA documentation build configuration file, created by 4 | # sphinx-quickstart on Mon Feb 23 20:45:04 2009. 5 | # 6 | # This file is execfile()d with the current directory set to its containing dir. 7 | # 8 | # The contents of this file are pickled, so don't put values in the namespace 9 | # that aren't pickleable (module imports are okay, they're removed automatically). 10 | # 11 | # Note that not all possible configuration values are present in this 12 | # autogenerated file. 13 | # 14 | # All configuration values have a default; values that are commented out 15 | # serve to show the default. 16 | 17 | import sys, os 18 | #import sphinx.ext.autodoc 19 | 20 | # If your extensions are in another directory, add it here. If the directory 21 | # is relative to the documentation root, use os.path.abspath to make it 22 | # absolute, like shown here. 23 | sys.path.append(os.path.abspath('.')) 24 | sys.path.append(os.path.abspath('../.')) 25 | sys.path.append(os.path.abspath('sphinxexts')) 26 | 27 | # General configuration 28 | # --------------------- 29 | 30 | # Add any Sphinx extension module names here, as strings. They can be extensions 31 | # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. 32 | extensions = ['sphinx.ext.autodoc','numpydoc'] 33 | 34 | # Add any paths that contain templates here, relative to this directory. 35 | templates_path = ['_templates'] 36 | 37 | # The suffix of source filenames. 38 | source_suffix = '.rst' 39 | 40 | # The encoding of source files. 41 | #source_encoding = 'utf-8' 42 | 43 | # The master toctree document. 44 | master_doc = 'index' 45 | 46 | # General information about the project. 47 | project = u'PTSA' 48 | copyright = u'2009-2010, Per B. Sederberg & Christoph T. Weidemann' 49 | 50 | # The version info for the project you're documenting, acts as replacement for 51 | # |version| and |release|, also used in various other places throughout the 52 | # built documents. 53 | # 54 | # The short X.Y version. 55 | version = '0.0.1' 56 | # The full version, including alpha/beta/rc tags. 57 | release = '0.0.1' 58 | 59 | # The language for content autogenerated by Sphinx. Refer to documentation 60 | # for a list of supported languages. 61 | #language = None 62 | 63 | # There are two options for replacing |today|: either, you set today to some 64 | # non-false value, then it is used: 65 | #today = '' 66 | # Else, today_fmt is used as the format for a strftime call. 67 | #today_fmt = '%B %d, %Y' 68 | 69 | # List of documents that shouldn't be included in the build. 70 | #unused_docs = [] 71 | 72 | # List of directories, relative to source directory, that shouldn't be searched 73 | # for source files. 74 | exclude_trees = ['_build'] 75 | 76 | # The reST default role (used for this markup: `text`) to use for all documents. 77 | #default_role = None 78 | 79 | # If true, '()' will be appended to :func: etc. cross-reference text. 80 | #add_function_parentheses = True 81 | 82 | # If true, the current module name will be prepended to all description 83 | # unit titles (such as .. function::). 84 | #add_module_names = True 85 | 86 | # If true, sectionauthor and moduleauthor directives will be shown in the 87 | # output. They are ignored by default. 88 | #show_authors = False 89 | 90 | # The name of the Pygments (syntax highlighting) style to use. 91 | pygments_style = 'sphinx' 92 | 93 | 94 | # Options for HTML output 95 | # ----------------------- 96 | 97 | # The style sheet to use for HTML and HTML Help pages. A file of that name 98 | # must exist either in Sphinx' static/ path, or in one of the custom paths 99 | # given in html_static_path. 100 | html_style = 'ptsa.css' 101 | 102 | # The name for this set of Sphinx documents. If None, it defaults to 103 | # " v documentation". 104 | #html_title = None 105 | 106 | # A shorter title for the navigation bar. Default is the same as html_title. 107 | #html_short_title = None 108 | 109 | # The name of an image file (relative to this directory) to place at the top 110 | # of the sidebar. 111 | html_logo = None #'_static/logo.png' 112 | 113 | # The name of an image file (within the static path) to use as favicon of the 114 | # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 115 | # pixels large. 116 | #html_favicon = None 117 | 118 | # Add any paths that contain custom static files (such as style sheets) here, 119 | # relative to this directory. They are copied after the builtin static files, 120 | # so a file named "default.css" will overwrite the builtin "default.css". 121 | html_static_path = ['_static'] 122 | 123 | # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, 124 | # using the given strftime format. 125 | #html_last_updated_fmt = '%b %d, %Y' 126 | 127 | # If true, SmartyPants will be used to convert quotes and dashes to 128 | # typographically correct entities. 129 | html_use_smartypants = True 130 | 131 | # Custom sidebar templates, maps document names to template names. 132 | #html_sidebars = {} 133 | 134 | # Additional templates that should be rendered to pages, maps page names to 135 | # template names. 136 | #html_additional_pages = {} 137 | 138 | # If false, no module index is generated. 139 | html_use_modindex = False 140 | 141 | # If false, no index is generated. 142 | #html_use_index = True 143 | 144 | # If true, the index is split into individual pages for each letter. 145 | #html_split_index = False 146 | 147 | # If true, the reST sources are included in the HTML build as _sources/. 148 | #html_copy_source = True 149 | 150 | # If true, an OpenSearch description file will be output, and all pages will 151 | # contain a tag referring to it. The value of this option must be the 152 | # base URL from which the finished HTML is served. 153 | #html_use_opensearch = '' 154 | 155 | # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). 156 | #html_file_suffix = '' 157 | 158 | # Output file base name for HTML help builder. 159 | htmlhelp_basename = 'PTSAdoc' 160 | 161 | 162 | # Options for LaTeX output 163 | # ------------------------ 164 | 165 | # The paper size ('letter' or 'a4'). 166 | #latex_paper_size = 'letter' 167 | 168 | # The font size ('10pt', '11pt' or '12pt'). 169 | #latex_font_size = '10pt' 170 | 171 | # Grouping the document tree into LaTeX files. List of tuples 172 | # (source start file, target name, title, author, document class [howto/manual]). 173 | latex_documents = [ 174 | ('index', 'PTSA.tex', ur'PTSA Documentation', 175 | ur'Per B. Sederberg and Christoph T. Weidemann', 'manual'), 176 | ] 177 | 178 | # The name of an image file (relative to this directory) to place at the top of 179 | # the title page. 180 | #latex_logo = None 181 | 182 | # For "manual" documents, if this is true, then toplevel headings are parts, 183 | # not chapters. 184 | #latex_use_parts = False 185 | 186 | # Additional stuff for the LaTeX preamble. 187 | #latex_preamble = '' 188 | 189 | # Documents to append as an appendix to all manuals. 190 | #latex_appendices = [] 191 | 192 | # If false, no module index is generated. 193 | #latex_use_modindex = True 194 | -------------------------------------------------------------------------------- /ptsa/plotting/ptsa.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%Creator: cairo 1.8.8 (http://cairographics.org) 3 | %%CreationDate: Wed Nov 25 18:45:55 2009 4 | %%Pages: 1 5 | %%BoundingBox: 0 0 50 24 6 | %%DocumentData: Clean7Bit 7 | %%LanguageLevel: 2 8 | %%EndComments 9 | %%BeginProlog 10 | /cairo_eps_state save def 11 | /dict_count countdictstack def 12 | /op_count count 1 sub def 13 | userdict begin 14 | /q { gsave } bind def 15 | /Q { grestore } bind def 16 | /cm { 6 array astore concat } bind def 17 | /w { setlinewidth } bind def 18 | /J { setlinecap } bind def 19 | /j { setlinejoin } bind def 20 | /M { setmiterlimit } bind def 21 | /d { setdash } bind def 22 | /m { moveto } bind def 23 | /l { lineto } bind def 24 | /c { curveto } bind def 25 | /h { closepath } bind def 26 | /re { exch dup neg 3 1 roll 5 3 roll moveto 0 rlineto 27 | 0 exch rlineto 0 rlineto closepath } bind def 28 | /S { stroke } bind def 29 | /f { fill } bind def 30 | /f* { eofill } bind def 31 | /B { fill stroke } bind def 32 | /B* { eofill stroke } bind def 33 | /n { newpath } bind def 34 | /W { clip } bind def 35 | /W* { eoclip } bind def 36 | /BT { } bind def 37 | /ET { } bind def 38 | /pdfmark where { pop globaldict /?pdfmark /exec load put } 39 | { globaldict begin /?pdfmark /pop load def /pdfmark 40 | /cleartomark load def end } ifelse 41 | /BDC { mark 3 1 roll /BDC pdfmark } bind def 42 | /EMC { mark /EMC pdfmark } bind def 43 | /cairo_store_point { /cairo_point_y exch def /cairo_point_x exch def } def 44 | /Tj { show currentpoint cairo_store_point } bind def 45 | /TJ { 46 | { 47 | dup 48 | type /stringtype eq 49 | { show } { -0.001 mul 0 cairo_font_matrix dtransform rmoveto } ifelse 50 | } forall 51 | currentpoint cairo_store_point 52 | } bind def 53 | /cairo_selectfont { cairo_font_matrix aload pop pop pop 0 0 6 array astore 54 | cairo_font exch selectfont cairo_point_x cairo_point_y moveto } bind def 55 | /Tf { pop /cairo_font exch def /cairo_font_matrix where 56 | { pop cairo_selectfont } if } bind def 57 | /Td { matrix translate cairo_font_matrix matrix concatmatrix dup 58 | /cairo_font_matrix exch def dup 4 get exch 5 get cairo_store_point 59 | /cairo_font where { pop cairo_selectfont } if } bind def 60 | /Tm { 2 copy 8 2 roll 6 array astore /cairo_font_matrix exch def 61 | cairo_store_point /cairo_font where { pop cairo_selectfont } if } bind def 62 | /g { setgray } bind def 63 | /rg { setrgbcolor } bind def 64 | /d1 { setcachedevice } bind def 65 | %%EndProlog 66 | %%Page: 1 1 67 | %%BeginPageSetup 68 | %%PageBoundingBox: 0 0 50 24 69 | %%EndPageSetup 70 | q 71 | 0 g 72 | 2.72 w 73 | 1 J 74 | 1 j 75 | [] 0.0 d 76 | 4 M q 1 0 0 -1 0 23.385368 cm 77 | 13.043 3.578 m 36.953 3.578 l S Q 78 | 40.09 19.807 m 35.387 17.088 l 35.387 22.526 l 40.09 19.807 l h 79 | 40.09 19.807 m f* 80 | 0.68 w 81 | 0 J 82 | 0 j 83 | q 1 0 0 -1 0 23.385368 cm 84 | 40.09 3.578 m 35.387 6.297 l 35.387 0.859 l 40.09 3.578 l h 85 | 40.09 3.578 m S Q 86 | 2.72 w 87 | 1 J 88 | 1 j 89 | q 1 0 0 -1 0 23.385368 cm 90 | 18.184 14.848 m 18.184 1.359 l S Q 91 | 16 8.385 m 16 7.124 15.562 6.057 14.688 5.182 c 13.812 4.311 12.734 92 | 3.874 11.457 3.874 c 7.199 3.874 l 6.539 3.874 5.922 3.999 5.344 4.256 93 | c 5.344 3.53 5.449 2.999 5.664 2.655 c 6.656 2.655 l 7.039 2.655 7.359 94 | 2.53 7.617 2.272 c 7.871 2.014 8 1.698 8 1.311 c 8 0.948 7.871 0.639 95 | 7.617 0.385 c 7.359 0.128 7.039 -0.001 6.656 -0.001 c 5.344 -0.001 l 96 | 4.852 -0.001 4.406 0.171 4 0.51 c 3.594 0.171 3.148 -0.001 2.656 -0.001 97 | c 1.344 -0.001 l 0.98 -0.001 0.66 0.128 0.383 0.385 c 0.129 0.62 0 98 | 0.928 0 1.311 c 0 1.698 0.129 2.014 0.383 2.272 c 0.641 2.53 0.961 99 | 2.655 1.344 2.655 c 2.336 2.655 l 2.398 2.807 2.465 3.049 2.527 3.393 c 100 | 2.613 3.713 2.656 4.042 2.656 4.385 c 2.656 12.671 l 2.656 13.12 2.625 101 | 13.483 2.559 13.76 c 2.516 14.057 2.441 14.315 2.336 14.53 c 1.344 102 | 14.53 l 0.961 14.53 0.641 14.655 0.383 14.913 c 0.129 15.167 0 15.487 0 103 | 15.874 c 0 16.233 0.129 16.546 0.383 16.799 c 0.641 17.057 0.961 17.182 104 | 1.344 17.182 c 2.656 17.182 l 3.316 17.182 3.895 16.866 4.383 16.225 c 105 | 5.215 16.866 6.156 17.182 7.199 17.182 c 11.457 17.182 l 12.652 17.182 106 | 13.719 16.745 14.656 15.874 c 15.551 14.999 16 13.932 16 12.671 c 16 107 | 8.385 l 13.344 12.671 m 13.344 13.182 13.164 13.62 12.801 13.983 c 108 | 12.438 14.346 11.988 14.53 11.457 14.53 c 7.199 14.53 l 6.688 14.53 109 | 6.25 14.346 5.887 13.983 c 5.523 13.62 5.344 13.182 5.344 12.671 c 110 | 5.344 8.385 l 5.344 7.874 5.523 7.436 5.887 7.073 c 6.25 6.71 6.688 111 | 6.53 7.199 6.53 c 11.457 6.53 l 11.945 6.53 12.383 6.71 12.77 7.073 c 112 | 13.152 7.436 13.344 7.874 13.344 8.385 c 13.344 12.671 l 13.344 12.671 113 | l f 114 | 30.188 6.53 m 30.188 5.737 31.371 5.1 30.539 4.608 c 29.707 4.116 115 | 30.207 3.874 28.844 3.874 c 21.387 3.874 l 20.191 3.874 19.125 4.311 116 | 18.188 5.182 c 17.289 6.038 16.844 7.104 16.844 8.385 c 19.531 8.385 l 117 | 19.531 7.874 19.711 7.436 20.074 7.073 c 20.438 6.71 20.875 6.53 21.387 118 | 6.53 c 25.645 6.53 l f 119 | 33.547 14.53 m 33.547 14.167 33.422 13.846 33.164 13.569 c 32.93 13.311 120 | 32.621 13.182 32.238 13.182 c 31.895 13.182 31.598 13.292 31.34 13.503 121 | c 31.105 13.737 30.969 14.026 30.926 14.37 c 30.691 14.409 30.414 122 | 14.444 30.094 14.463 c 29.793 14.506 29.441 14.53 29.035 14.53 c 24.75 123 | 14.53 l 24.238 14.53 23.801 14.401 23.438 14.143 c 23.074 13.889 22.895 124 | 13.569 22.895 13.182 c 22.895 12.823 23.074 12.51 23.438 12.256 c 125 | 23.801 11.999 24.238 11.874 24.75 11.874 c 29.035 11.874 l 30.297 126 | 11.874 31.363 11.487 32.238 10.721 c 33.113 9.952 33.547 9.003 33.547 127 | 7.874 c 33.547 6.764 33.102 5.815 32.203 5.022 c 31.332 4.256 30.273 128 | 3.874 29.035 3.874 c 29.035 5.014 l 29.035 6.53 l 29.547 6.53 29.984 129 | 6.655 30.348 6.913 c 30.711 7.167 30.895 7.487 30.895 7.874 c 30.895 130 | 8.233 30.711 8.546 30.348 8.799 c 29.984 9.057 29.547 9.182 29.035 131 | 9.182 c 24.75 9.182 l 23.469 9.182 22.402 9.569 21.547 10.335 c 20.676 132 | 11.104 20.238 12.053 20.238 13.182 c 20.238 14.292 20.676 15.233 21.547 133 | 15.999 c 22.402 16.788 23.469 17.182 24.75 17.182 c 29.035 17.182 l 134 | 30.852 17.182 32.109 16.96 32.812 16.51 c 33.305 16.213 33.547 15.776 135 | 33.547 15.202 c 33.547 14.53 l f 136 | 50.395 5.182 m 50.395 4.823 50.266 4.51 50.012 4.256 c 49.754 3.999 137 | 49.434 3.874 49.051 3.874 c 47.738 3.874 l 47.055 3.874 46.48 4.19 138 | 46.012 4.831 c 45.18 4.19 44.242 3.874 43.195 3.874 c 38.938 3.874 l 139 | 37.746 3.874 36.68 4.311 35.738 5.182 c 34.844 6.038 34.395 7.104 140 | 34.395 8.385 c 34.395 8.671 l 34.395 9.932 34.832 10.999 35.707 11.874 141 | c 36.582 12.745 37.66 13.182 38.938 13.182 c 43.195 13.182 l 43.855 142 | 13.182 44.477 13.057 45.051 12.799 c 45.051 13.268 44.922 13.674 44.668 143 | 14.014 c 44.41 14.358 44.102 14.53 43.738 14.53 c 41.051 14.53 l 40.688 144 | 14.53 40.379 14.655 40.125 14.913 c 39.867 15.167 39.738 15.487 39.738 145 | 15.874 c 39.738 16.233 39.867 16.546 40.125 16.799 c 40.379 17.057 146 | 40.688 17.182 41.051 17.182 c 43.738 17.182 l 44.848 17.182 45.789 147 | 16.737 46.555 15.838 c 47.344 14.987 47.738 13.932 47.738 12.671 c 148 | 47.738 8.385 l 47.738 7.936 47.762 7.561 47.805 7.264 c 47.867 6.987 149 | 47.953 6.741 48.059 6.53 c 49.051 6.53 l 49.434 6.53 49.754 6.401 150 | 50.012 6.143 c 50.266 5.889 50.395 5.569 50.395 5.182 c 45.051 8.671 m 151 | 45.051 9.182 44.871 9.62 44.508 9.983 c 44.145 10.346 43.707 10.53 152 | 43.195 10.53 c 38.938 10.53 l 38.406 10.53 37.957 10.346 37.594 9.983 c 153 | 37.234 9.62 37.051 9.182 37.051 8.671 c 37.051 8.385 l 37.051 7.874 154 | 37.234 7.436 37.594 7.073 c 37.957 6.71 38.406 6.53 38.938 6.53 c 155 | 43.195 6.53 l 43.707 6.53 44.145 6.71 44.508 7.073 c 44.871 7.436 156 | 45.051 7.874 45.051 8.385 c 45.051 8.671 l f 157 | Q 158 | showpage 159 | %%Trailer 160 | count op_count sub {pop} repeat 161 | countdictstack dict_count sub {end} repeat 162 | cairo_eps_state restore 163 | %%EOF 164 | -------------------------------------------------------------------------------- /ptsa/plotting/ptsa_blue.eps: -------------------------------------------------------------------------------- 1 | %!PS-Adobe-3.0 EPSF-3.0 2 | %%Creator: cairo 1.8.8 (http://cairographics.org) 3 | %%CreationDate: Thu Nov 26 01:08:26 2009 4 | %%Pages: 1 5 | %%BoundingBox: 0 0 50 24 6 | %%DocumentData: Clean7Bit 7 | %%LanguageLevel: 2 8 | %%EndComments 9 | %%BeginProlog 10 | /cairo_eps_state save def 11 | /dict_count countdictstack def 12 | /op_count count 1 sub def 13 | userdict begin 14 | /q { gsave } bind def 15 | /Q { grestore } bind def 16 | /cm { 6 array astore concat } bind def 17 | /w { setlinewidth } bind def 18 | /J { setlinecap } bind def 19 | /j { setlinejoin } bind def 20 | /M { setmiterlimit } bind def 21 | /d { setdash } bind def 22 | /m { moveto } bind def 23 | /l { lineto } bind def 24 | /c { curveto } bind def 25 | /h { closepath } bind def 26 | /re { exch dup neg 3 1 roll 5 3 roll moveto 0 rlineto 27 | 0 exch rlineto 0 rlineto closepath } bind def 28 | /S { stroke } bind def 29 | /f { fill } bind def 30 | /f* { eofill } bind def 31 | /B { fill stroke } bind def 32 | /B* { eofill stroke } bind def 33 | /n { newpath } bind def 34 | /W { clip } bind def 35 | /W* { eoclip } bind def 36 | /BT { } bind def 37 | /ET { } bind def 38 | /pdfmark where { pop globaldict /?pdfmark /exec load put } 39 | { globaldict begin /?pdfmark /pop load def /pdfmark 40 | /cleartomark load def end } ifelse 41 | /BDC { mark 3 1 roll /BDC pdfmark } bind def 42 | /EMC { mark /EMC pdfmark } bind def 43 | /cairo_store_point { /cairo_point_y exch def /cairo_point_x exch def } def 44 | /Tj { show currentpoint cairo_store_point } bind def 45 | /TJ { 46 | { 47 | dup 48 | type /stringtype eq 49 | { show } { -0.001 mul 0 cairo_font_matrix dtransform rmoveto } ifelse 50 | } forall 51 | currentpoint cairo_store_point 52 | } bind def 53 | /cairo_selectfont { cairo_font_matrix aload pop pop pop 0 0 6 array astore 54 | cairo_font exch selectfont cairo_point_x cairo_point_y moveto } bind def 55 | /Tf { pop /cairo_font exch def /cairo_font_matrix where 56 | { pop cairo_selectfont } if } bind def 57 | /Td { matrix translate cairo_font_matrix matrix concatmatrix dup 58 | /cairo_font_matrix exch def dup 4 get exch 5 get cairo_store_point 59 | /cairo_font where { pop cairo_selectfont } if } bind def 60 | /Tm { 2 copy 8 2 roll 6 array astore /cairo_font_matrix exch def 61 | cairo_store_point /cairo_font where { pop cairo_selectfont } if } bind def 62 | /g { setgray } bind def 63 | /rg { setrgbcolor } bind def 64 | /d1 { setcachedevice } bind def 65 | %%EndProlog 66 | %%Page: 1 1 67 | %%BeginPageSetup 68 | %%PageBoundingBox: 0 0 50 24 69 | %%EndPageSetup 70 | q 71 | 0.0666667 0.333333 0.486275 rg 72 | 2.72 w 73 | 1 J 74 | 1 j 75 | [] 0.0 d 76 | 4 M q 1 0 0 -1 0 23.385368 cm 77 | 13.043 3.578 m 36.953 3.578 l S Q 78 | 40.09 19.807 m 35.387 17.088 l 35.387 22.526 l 40.09 19.807 l h 79 | 40.09 19.807 m f* 80 | 0.68 w 81 | 0 J 82 | 0 j 83 | q 1 0 0 -1 0 23.385368 cm 84 | 40.09 3.578 m 35.387 6.297 l 35.387 0.859 l 40.09 3.578 l h 85 | 40.09 3.578 m S Q 86 | 2.72 w 87 | 1 J 88 | 1 j 89 | q 1 0 0 -1 0 23.385368 cm 90 | 18.184 14.848 m 18.184 1.359 l S Q 91 | 16 8.385 m 16 7.124 15.562 6.057 14.688 5.182 c 13.812 4.311 12.734 92 | 3.874 11.457 3.874 c 7.199 3.874 l 6.539 3.874 5.922 3.999 5.344 4.256 93 | c 5.344 3.53 5.449 2.999 5.664 2.655 c 6.656 2.655 l 7.039 2.655 7.359 94 | 2.53 7.617 2.272 c 7.871 2.014 8 1.698 8 1.311 c 8 0.948 7.871 0.639 95 | 7.617 0.385 c 7.359 0.128 7.039 -0.001 6.656 -0.001 c 5.344 -0.001 l 96 | 4.852 -0.001 4.406 0.171 4 0.51 c 3.594 0.171 3.148 -0.001 2.656 -0.001 97 | c 1.344 -0.001 l 0.98 -0.001 0.66 0.128 0.383 0.385 c 0.129 0.62 0 98 | 0.928 0 1.311 c 0 1.698 0.129 2.014 0.383 2.272 c 0.641 2.53 0.961 99 | 2.655 1.344 2.655 c 2.336 2.655 l 2.398 2.807 2.465 3.049 2.527 3.393 c 100 | 2.613 3.713 2.656 4.042 2.656 4.385 c 2.656 12.671 l 2.656 13.12 2.625 101 | 13.483 2.559 13.76 c 2.516 14.057 2.441 14.315 2.336 14.53 c 1.344 102 | 14.53 l 0.961 14.53 0.641 14.655 0.383 14.913 c 0.129 15.167 0 15.487 0 103 | 15.874 c 0 16.233 0.129 16.546 0.383 16.799 c 0.641 17.057 0.961 17.182 104 | 1.344 17.182 c 2.656 17.182 l 3.316 17.182 3.895 16.866 4.383 16.225 c 105 | 5.215 16.866 6.156 17.182 7.199 17.182 c 11.457 17.182 l 12.652 17.182 106 | 13.719 16.745 14.656 15.874 c 15.551 14.999 16 13.932 16 12.671 c 16 107 | 8.385 l 13.344 12.671 m 13.344 13.182 13.164 13.62 12.801 13.983 c 108 | 12.438 14.346 11.988 14.53 11.457 14.53 c 7.199 14.53 l 6.688 14.53 109 | 6.25 14.346 5.887 13.983 c 5.523 13.62 5.344 13.182 5.344 12.671 c 110 | 5.344 8.385 l 5.344 7.874 5.523 7.436 5.887 7.073 c 6.25 6.71 6.688 111 | 6.53 7.199 6.53 c 11.457 6.53 l 11.945 6.53 12.383 6.71 12.77 7.073 c 112 | 13.152 7.436 13.344 7.874 13.344 8.385 c 13.344 12.671 l 13.344 12.671 113 | l f 114 | 30.188 6.53 m 30.188 5.737 31.371 5.1 30.539 4.608 c 29.707 4.116 115 | 30.207 3.874 28.844 3.874 c 21.387 3.874 l 20.191 3.874 19.125 4.311 116 | 18.188 5.182 c 17.289 6.038 16.844 7.104 16.844 8.385 c 19.531 8.385 l 117 | 19.531 7.874 19.711 7.436 20.074 7.073 c 20.438 6.71 20.875 6.53 21.387 118 | 6.53 c 25.645 6.53 l f 119 | 33.547 14.53 m 33.547 14.167 33.422 13.846 33.164 13.569 c 32.93 13.311 120 | 32.621 13.182 32.238 13.182 c 31.895 13.182 31.598 13.292 31.34 13.503 121 | c 31.105 13.737 30.969 14.026 30.926 14.37 c 30.691 14.409 30.414 122 | 14.444 30.094 14.463 c 29.793 14.506 29.441 14.53 29.035 14.53 c 24.75 123 | 14.53 l 24.238 14.53 23.801 14.401 23.438 14.143 c 23.074 13.889 22.895 124 | 13.569 22.895 13.182 c 22.895 12.823 23.074 12.51 23.438 12.256 c 125 | 23.801 11.999 24.238 11.874 24.75 11.874 c 29.035 11.874 l 30.297 126 | 11.874 31.363 11.487 32.238 10.721 c 33.113 9.952 33.547 9.003 33.547 127 | 7.874 c 33.547 6.764 33.102 5.815 32.203 5.022 c 31.332 4.256 30.273 128 | 3.874 29.035 3.874 c 29.035 5.014 l 29.035 6.53 l 29.547 6.53 29.984 129 | 6.655 30.348 6.913 c 30.711 7.167 30.895 7.487 30.895 7.874 c 30.895 130 | 8.233 30.711 8.546 30.348 8.799 c 29.984 9.057 29.547 9.182 29.035 131 | 9.182 c 24.75 9.182 l 23.469 9.182 22.402 9.569 21.547 10.335 c 20.676 132 | 11.104 20.238 12.053 20.238 13.182 c 20.238 14.292 20.676 15.233 21.547 133 | 15.999 c 22.402 16.788 23.469 17.182 24.75 17.182 c 29.035 17.182 l 134 | 30.852 17.182 32.109 16.96 32.812 16.51 c 33.305 16.213 33.547 15.776 135 | 33.547 15.202 c 33.547 14.53 l f 136 | 50.395 5.182 m 50.395 4.823 50.266 4.51 50.012 4.256 c 49.754 3.999 137 | 49.434 3.874 49.051 3.874 c 47.738 3.874 l 47.055 3.874 46.48 4.19 138 | 46.012 4.831 c 45.18 4.19 44.242 3.874 43.195 3.874 c 38.938 3.874 l 139 | 37.746 3.874 36.68 4.311 35.738 5.182 c 34.844 6.038 34.395 7.104 140 | 34.395 8.385 c 34.395 8.671 l 34.395 9.932 34.832 10.999 35.707 11.874 141 | c 36.582 12.745 37.66 13.182 38.938 13.182 c 43.195 13.182 l 43.855 142 | 13.182 44.477 13.057 45.051 12.799 c 45.051 13.268 44.922 13.674 44.668 143 | 14.014 c 44.41 14.358 44.102 14.53 43.738 14.53 c 41.051 14.53 l 40.688 144 | 14.53 40.379 14.655 40.125 14.913 c 39.867 15.167 39.738 15.487 39.738 145 | 15.874 c 39.738 16.233 39.867 16.546 40.125 16.799 c 40.379 17.057 146 | 40.688 17.182 41.051 17.182 c 43.738 17.182 l 44.848 17.182 45.789 147 | 16.737 46.555 15.838 c 47.344 14.987 47.738 13.932 47.738 12.671 c 148 | 47.738 8.385 l 47.738 7.936 47.762 7.561 47.805 7.264 c 47.867 6.987 149 | 47.953 6.741 48.059 6.53 c 49.051 6.53 l 49.434 6.53 49.754 6.401 150 | 50.012 6.143 c 50.266 5.889 50.395 5.569 50.395 5.182 c 45.051 8.671 m 151 | 45.051 9.182 44.871 9.62 44.508 9.983 c 44.145 10.346 43.707 10.53 152 | 43.195 10.53 c 38.938 10.53 l 38.406 10.53 37.957 10.346 37.594 9.983 c 153 | 37.234 9.62 37.051 9.182 37.051 8.671 c 37.051 8.385 l 37.051 7.874 154 | 37.234 7.436 37.594 7.073 c 37.957 6.71 38.406 6.53 38.938 6.53 c 155 | 43.195 6.53 l 43.707 6.53 44.145 6.71 44.508 7.073 c 44.871 7.436 156 | 45.051 7.874 45.051 8.385 c 45.051 8.671 l f 157 | Q 158 | showpage 159 | %%Trailer 160 | count op_count sub {pop} repeat 161 | countdictstack dict_count sub {end} repeat 162 | cairo_eps_state restore 163 | %%EOF 164 | -------------------------------------------------------------------------------- /ptsa/data/rawbinarydata.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | # local imports 11 | from datawrapper import DataWrapper 12 | from events import Events,TsEvents 13 | from timeseries import TimeSeries,Dim 14 | 15 | # global imports 16 | import numpy as np 17 | import string 18 | import struct 19 | import os 20 | from scipy.io import loadmat 21 | 22 | class RawBinaryEEG(DataWrapper): 23 | """ 24 | Interface to data stored in binary format with a separate file for 25 | each channel. 26 | """ 27 | def __init__(self,dataroot,samplerate=None,format='int16',gain=1): 28 | """Initialize the interface to the data. You must specify the 29 | dataroot, which is a string that contains the path to and 30 | root, up to the channel numbers, where the data are stored.""" 31 | # set up the basic params of the data 32 | self.dataroot = dataroot 33 | self.samplerate = samplerate 34 | self.format = format 35 | self.gain = gain 36 | 37 | # see if can find them from a params file in dataroot 38 | self.params = self._getParams(dataroot) 39 | 40 | # set what we can from the params 41 | if self.params.has_key('samplerate'): 42 | self.samplerate = self.params['samplerate'] 43 | if self.params.has_key('format'): 44 | self.format = self.params['format'] 45 | if self.params.has_key('dataformat'): 46 | self.format = self.params['dataformat'] 47 | if self.params.has_key('gain'): 48 | self.gain = self.params['gain'] 49 | 50 | # set the nBytes and format str 51 | if self.format == 'single': 52 | self.nBytes = 4 53 | self.fmtStr = 'f' 54 | elif self.format == 'short' or self.format == 'int16': 55 | self.nBytes = 2 56 | self.fmtStr = 'h' 57 | elif self.format == 'double': 58 | self.nBytes = 8 59 | self.fmtStr = 'd' 60 | 61 | def _getParams(self,dataroot): 62 | """Get parameters of the data from the dataroot.""" 63 | # set default params 64 | params = {'samplerate':256.03,'gain':1.} 65 | 66 | # first look for dataroot.params file 67 | paramFile = dataroot + '.params' 68 | if not os.path.isfile(paramFile): 69 | # see if it's params.txt 70 | paramFile = os.path.join(os.path.dirname(dataroot),'params.txt') 71 | if not os.path.isfile(paramFile): 72 | #raise "file not found" # fix this 73 | return params 74 | 75 | # we have a file, so open and process it 76 | for line in open(paramFile,'r').readlines(): 77 | # get the columns by splitting 78 | cols = line.strip().split() 79 | # set the params 80 | params[cols[0]] = eval(string.join(cols[1:])) 81 | 82 | # return the params dict 83 | return params 84 | 85 | 86 | def _load_timeseries(self,channel,eventOffsets,dur_samp,offset_samp): 87 | """ 88 | 89 | """ 90 | 91 | # determine the file 92 | eegfname = '%s.%03i' % (self.dataroot,channel) 93 | if os.path.isfile(eegfname): 94 | efile = open(eegfname,'rb') 95 | else: 96 | # try unpadded lead 97 | eegfname = '%s.%i' % (self.dataroot,channel) 98 | if os.path.isfile(eegfname): 99 | efile = open(eegfname,'rb') 100 | else: 101 | raise IOError('EEG file not found for channel %i and file root %s\n' 102 | % (channel,self.dataroot)) 103 | 104 | # loop over events 105 | eventdata = [] 106 | 107 | # # get the eventOffsets 108 | # if isinstance(eventInfo,TsEvents): 109 | # eventOffsets = eventInfo['eegoffset'] 110 | # else: 111 | # eventOffsets = eventInfo 112 | # eventOffsets = np.asarray(eventOffsets) 113 | # if len(eventOffsets.shape)==0: 114 | # eventOffsets = [eventOffsets] 115 | for evOffset in eventOffsets: 116 | # seek to the position in the file 117 | thetime = offset_samp+evOffset 118 | efile.seek(self.nBytes*thetime,0) 119 | 120 | # read the data 121 | data = efile.read(int(self.nBytes*dur_samp)) 122 | 123 | # convert from string to array based on the format 124 | # hard codes little endian 125 | data = np.array(struct.unpack('<'+str(len(data)/self.nBytes)+self.fmtStr,data)) 126 | 127 | # make sure we got some data 128 | if len(data) < dur_samp: 129 | raise IOError('Event with offset %d is outside the bounds of file %s.\n' 130 | % (evOffset,eegfname)) 131 | 132 | # append it to the events 133 | eventdata.append(data) 134 | 135 | # calc the time range 136 | sampStart = offset_samp*samplesize 137 | sampEnd = sampStart + (dur_samp-1)*samplesize 138 | timeRange = np.linspace(sampStart,sampEnd,dur_samp) 139 | 140 | # make it a timeseries 141 | if isinstance(eventInfo,TsEvents): 142 | dims = [Dim('event', eventInfo.data, 'event'), 143 | Dim('time',timeRange)] 144 | else: 145 | dims = [Dim('eventOffsets', eventOffsets, 'samples'), 146 | Dim('time',timeRange)] 147 | eventdata = TimeSeries(np.array(eventdata), 148 | dims, 149 | tdim='time', 150 | self.samplerate) 151 | 152 | # multiply by the gain 153 | eventdata *= self.gain 154 | 155 | 156 | return eventdata 157 | 158 | 159 | def createEventsFromMatFile(matfile): 160 | """Create an events data array from an events structure saved in a 161 | Matlab mat file.""" 162 | # load the mat file 163 | mat = loadmat(matfile) 164 | 165 | if 'events' not in mat.keys(): 166 | raise "\nError processing the Matlab file: %s\n" + \ 167 | "This file must contain an events structure" + \ 168 | "with the name \"events\" (case sensitive)!\n" +\ 169 | "(All other content of the file is ignored.)" % matfile 170 | 171 | # get num events 172 | numEvents = len(mat['events']) 173 | 174 | # determine the fieldnames and formats 175 | fields = mat['events'][0]._fieldnames 176 | 177 | # create list with array for each field 178 | data = [] 179 | hasEEGInfo = False 180 | for f,field in enumerate(fields): 181 | # handle special cases 182 | if field == 'eegfile': 183 | # we have eeg info 184 | hasEEGInfo = True 185 | 186 | # get unique files 187 | eegfiles = np.unique(map(lambda x: str(x.eegfile),mat['events'])) 188 | 189 | # make dictionary of data wrapers for the eeg files 190 | efile_dict = {} 191 | for eegfile in eegfiles: 192 | efile_dict[eegfile] = RawBinaryEEG(eegfile) 193 | 194 | # Handle when the eegfile field is blank 195 | efile_dict[''] = None 196 | 197 | # set the eegfile to the correct data wrapper 198 | newdat = np.array(map(lambda x: efile_dict[str(x.__getattribute__(field))], 199 | mat['events'])) 200 | 201 | # change field name to eegsrc 202 | fields[f] = 'eegsrc' 203 | else: 204 | # get the data in normal fashion 205 | newdat = np.array(map(lambda x: x.__getattribute__(field),mat['events'])) 206 | 207 | # append the data 208 | data.append(newdat) 209 | 210 | # allocate for new array 211 | newrec = np.rec.fromarrays(data,names=fields) 212 | 213 | # see if process into DataArray or Events 214 | if hasEEGInfo: 215 | newrec = TsEvents(newrec) 216 | else: 217 | newrec = Events(newrec) 218 | 219 | return newrec 220 | 221 | 222 | -------------------------------------------------------------------------------- /ptsa/data/bvwrapper.py: -------------------------------------------------------------------------------- 1 | #emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- 2 | #ex: set sts=4 ts=4 sw=4 et: 3 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 4 | # 5 | # See the COPYING file distributed along with the PTSA package for the 6 | # copyright and license terms. 7 | # 8 | ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ### ## 9 | 10 | # local imports 11 | from basewrapper import BaseWrapper 12 | 13 | # global imports 14 | import numpy as np 15 | import os.path 16 | from ConfigParser import SafeConfigParser 17 | import io 18 | 19 | class BVWrapper(BaseWrapper): 20 | """ 21 | Interface to data stored in a BrainVision Data Format. 22 | """ 23 | def __init__(self, filepath): 24 | """ 25 | Initialize the interface to the data. 26 | 27 | Parameters 28 | ---------- 29 | filepath : string 30 | String specifiying the header filename (*.vhdr), with full 31 | path if applicable. 32 | """ 33 | # set up the basic params of the data 34 | if os.path.exists(filepath): 35 | self.filepath = filepath 36 | self.filedir = os.path.split(filepath)[0] 37 | else: 38 | raise IOError(str(filepath)+'\n does not exist!\n'+ 39 | 'Valid path to data file is needed!') 40 | 41 | # read in the info about the data from the header 42 | cp = SafeConfigParser() 43 | lines = open(filepath,'r').readlines() 44 | 45 | # must remove the first lines b/c they are not standard INI format 46 | # also remove everything after [Comment] b/c it doesn't parse either 47 | ind = None 48 | for i,line in enumerate(lines): 49 | if line.strip() == '[Comment]': 50 | ind = i 51 | break 52 | # try: 53 | # ind = lines.index('[Comment]\r\r\n') 54 | # except ValueError: 55 | # try: 56 | # ind = lines.index('[Comment]\r\n') 57 | # except ValueError: 58 | # try: 59 | # ind = lines.index('[Comment]\n') 60 | # except ValueError: 61 | # ind = None 62 | 63 | # join the lines back now that we've cleaned them up 64 | hdr_string = ''.join(lines[1:ind]) 65 | 66 | # now read it in 67 | cp.readfp(io.BytesIO(hdr_string)) 68 | 69 | # extract the info we need 70 | self._binaryformat = cp.get('Binary Infos','binaryformat') 71 | self._nchannels = int(cp.get('Common Infos','numberofchannels')) 72 | self._data_orient = cp.get('Common Infos','dataorientation') 73 | self._data_file = os.path.join(self.filedir,cp.get('Common Infos','datafile')) 74 | self._samplerate = float(10e5)/int(cp.get('Common Infos','samplinginterval')) 75 | self._markerfile = os.path.join(self.filedir,cp.get('Common Infos','markerfile')) 76 | 77 | # read in scale factors for each channel (and other info) 78 | numbers = [] 79 | names = [] 80 | scales = [] 81 | units = [] 82 | #self._channel_scale = np.ones(self._nchannels) 83 | for i in range(self._nchannels): 84 | info = cp.get('Channel Infos','Ch%d'%(i+1)).split(',') 85 | #self._channel_scale[i] = float(info[2]) 86 | numbers.append(i+1) 87 | names.append(info[0]) 88 | scales.append(float(info[2])) 89 | units.append(unicode(info[3],'utf-8')) 90 | # try and get the impedances 91 | impedances = np.ones(len(names))*-1 92 | for i,line in enumerate(lines[ind:]): 93 | if 'Impedance' in line: 94 | # found impedances, try and read them 95 | skipped = 0 96 | for l,li in enumerate(lines[ind+i+1:]): 97 | info = li.strip().split(' ') 98 | cname = info[1][:-1] 99 | if cname in names: 100 | impedances[names.index(cname)] = int(info[2]) 101 | break 102 | self._channel_info = np.rec.fromarrays([numbers,names,scales, 103 | units,impedances], 104 | names='number,name,scale,unit,impedance') 105 | 106 | # process the binary format 107 | if self._binaryformat == 'INT_16': 108 | self._samplesize = 2 109 | self._dtype = np.dtype(np.int16) 110 | elif self._binaryformat == 'IEEE_FLOAT_32': 111 | self._samplesize = 4 112 | self._dtype = np.dtype(np.float32) 113 | else: 114 | raise ValueError('Unknown binary format: %s\n' % self._binaryformat) 115 | 116 | # open the file to figure out the nsamples 117 | mm = np.memmap(self._data_file,dtype=self._dtype, 118 | mode='r') 119 | self._nsamples = mm.shape[0]/self._nchannels 120 | 121 | 122 | def _get_nchannels(self): 123 | return self._nchannels 124 | 125 | def _get_channel_info(self): 126 | return self._channel_info 127 | 128 | def _get_nsamples(self, channel=None): 129 | return self._nsamples 130 | 131 | def _get_samplerate(self, channel=None): 132 | return self._samplerate 133 | 134 | def _get_annotations(self): 135 | # read in from annotations file (must strip off first lines) 136 | cp = SafeConfigParser() 137 | lines = open(self._markerfile,'r').readlines() 138 | cp.readfp(io.BytesIO(''.join(lines[2:]))) 139 | 140 | # get the marker info 141 | markers = cp.items('Marker Infos') 142 | 143 | # process them 144 | index = [] 145 | onsets = np.empty(len(markers)) 146 | durations = [] 147 | annots = [] 148 | 149 | # see if subtract 1 because starts at 1 instead of 0 150 | sub_one = False 151 | for i in range(len(markers)): 152 | index.append(int(markers[i][0][2:])) 153 | info = markers[i][1].split(',') 154 | annots.append(info[1]) 155 | # convert onset to seconds (subtracting 1 for actual offset) 156 | onsets[i] = (long(info[2]))/self._samplerate 157 | # save duration (for now, keep as string like in EDF) 158 | durations.append(info[3]) 159 | if sub_one == False and info[0] == 'New Segment' and long(info[2])==1: 160 | # we need to sub_one 161 | sub_one = True 162 | if sub_one: 163 | onsets -= long(1)/self._samplerate 164 | 165 | # convert to rec array 166 | annotations = np.rec.fromarrays([onsets,durations,annots], 167 | names='onsets,durations,annotations') 168 | 169 | # sort by index and return 170 | return annotations[np.argsort(index)] 171 | 172 | def _load_data(self,channels,event_offsets,dur_samp,offset_samp): 173 | """ 174 | """ 175 | # allocate for data 176 | eventdata = np.empty((len(channels),len(event_offsets),dur_samp), 177 | dtype=np.float64)*np.nan 178 | 179 | # Memmap to the file 180 | mm = np.memmap(self._data_file,dtype=self._dtype, 181 | mode='r',shape=(self._nsamples,self._nchannels)) 182 | 183 | # loop over events 184 | for e,ev_offset in enumerate(event_offsets): 185 | # set the range 186 | ssamp = offset_samp+ev_offset 187 | 188 | if (ssamp + dur_samp - 1) > self._nsamples: 189 | raise IOError('Event with offset '+str(ev_offset)+ 190 | ' is outside the bounds of the data.') 191 | 192 | # only pick the channels of interest and scale 193 | dat = np.multiply(mm[ssamp:ssamp+dur_samp,channels], 194 | self._channel_info['scale'][channels]) 195 | #self._channel_scale[channels]) 196 | eventdata[:,e,:] = dat.T 197 | 198 | return eventdata 199 | 200 | 201 | -------------------------------------------------------------------------------- /ptsa/data/edf/edf.pyx: -------------------------------------------------------------------------------- 1 | import numpy as np 2 | cimport numpy as np 3 | 4 | # set up the types 5 | dtype_f64 = np.float64 6 | ctypedef np.float64_t dtype_f64_t 7 | 8 | # handle the externs 9 | cdef extern from "edflib.h": 10 | struct edf_hdr_struct: 11 | int handle 12 | int edfsignals 13 | long long annotations_in_file 14 | 15 | # Dummy enums for variables defined by macros in the header 16 | enum: 17 | EDFLIB_TIME_DIMENSION 18 | EDFLIB_DO_NOT_READ_ANNOTATIONS 19 | EDFLIB_READ_ANNOTATIONS 20 | EDFLIB_READ_ALL_ANNOTATIONS 21 | EDFLIB_MAX_ANNOTATION_LEN 22 | 23 | struct edf_annotation_struct: 24 | long long onset 25 | char *duration 26 | char *annotation 27 | 28 | int edf_get_annotation(int handle, int n, edf_annotation_struct *annot) 29 | 30 | int edfclose_file(int handle) 31 | 32 | 33 | cdef extern from "edfwrap.h": 34 | int open_file_readonly(char *filepath, 35 | edf_hdr_struct *hdr, 36 | int read_annot) 37 | long long get_samples_in_file(edf_hdr_struct *hdr, 38 | int edfsignal) 39 | float get_samplerate(edf_hdr_struct *hdr, 40 | int edfsignal) 41 | int read_samples_from_file(edf_hdr_struct *hdr, 42 | int edfsignal, 43 | long long offset, 44 | int n, 45 | double *buf) 46 | 47 | def read_number_of_signals(char *filepath): 48 | """ 49 | read_number_of_signals(filepath) 50 | 51 | Read in number of signals in the EDF/BDF file. 52 | 53 | Parameters 54 | ---------- 55 | filepath : {str} 56 | The path and name of the EDF/BDF file. 57 | 58 | Returns 59 | ------- 60 | num_signals : {int} 61 | Number of signals in the EDF/BDF file. 62 | 63 | """ 64 | # get a header 65 | cdef edf_hdr_struct hdr 66 | 67 | # open the file 68 | if open_file_readonly(filepath, &hdr, EDFLIB_READ_ALL_ANNOTATIONS) < 0: 69 | print "Error opening file." 70 | return None 71 | 72 | # get the signals 73 | cdef int num_signals = hdr.edfsignals 74 | 75 | # close the file 76 | edfclose_file(hdr.handle) 77 | 78 | return num_signals 79 | 80 | def read_annotations(char *filepath): 81 | """ 82 | read_annotations(filepath) 83 | 84 | Read in all the annotations from an EDF/BDF file into a record 85 | array. Note that the onset times are converted to seconds. 86 | 87 | Parameters 88 | ---------- 89 | filepath : {str} 90 | The path and name of the EDF/BDF file. 91 | 92 | Returns 93 | ------- 94 | annotations : {np.recarray} 95 | A record array with onsets, duration, and annotations. 96 | 97 | """ 98 | # get a header 99 | cdef edf_hdr_struct hdr 100 | 101 | # open the file 102 | if open_file_readonly(filepath, &hdr, EDFLIB_READ_ALL_ANNOTATIONS) < 0: 103 | print "Error opening file." 104 | return None 105 | 106 | # allocate for an annotation 107 | cdef edf_annotation_struct annot 108 | 109 | # this could be improved 110 | cdef np.ndarray[dtype_f64_t, ndim=1] onsets = np.empty( 111 | hdr.annotations_in_file,dtype=dtype_f64) 112 | durations = [] 113 | annotations = [] 114 | 115 | # loop over annotations 116 | for i in range(hdr.annotations_in_file): 117 | if edf_get_annotation(hdr.handle, i, &annot): 118 | print "Error reading annotation %d" % (i) 119 | return None 120 | 121 | # append the annotations 122 | onsets[i] = annot.onset #/EDFLIB_TIME_DIMENSION 123 | durations.append(annot.duration) 124 | annotations.append(annot.annotation) 125 | 126 | # close the file 127 | edfclose_file(hdr.handle) 128 | 129 | # return record array of annotations 130 | return np.rec.fromarrays( 131 | [onsets/EDFLIB_TIME_DIMENSION,durations,annotations], 132 | #[onsets,durations,annotations], 133 | names='onsets,durations,annotations') 134 | 135 | def read_number_of_samples(char *filepath, int edfsignal): 136 | """ 137 | read_number_of_samples(filepath, edfsignal) 138 | 139 | Read the number of samples of a signal in an EDF/BDF file. Note 140 | that different signals can have different numbers of samples. 141 | 142 | Parameters 143 | ---------- 144 | filepath : {str} 145 | The path and name of the EDF/BDF file. 146 | edfsignal : {int} 147 | The signal whose samplerate to retrieve. 148 | 149 | Returns 150 | ------- 151 | num_samples : {long} 152 | The number of samples for that signal. 153 | 154 | """ 155 | # get a header 156 | cdef edf_hdr_struct hdr 157 | 158 | # open the file 159 | if open_file_readonly(filepath, &hdr, EDFLIB_DO_NOT_READ_ANNOTATIONS) < 0: 160 | print "Error opening file." 161 | return None 162 | 163 | # get the number of samples 164 | cdef long long num_samples = get_samples_in_file(&hdr, 165 | edfsignal) 166 | 167 | # close the file 168 | edfclose_file(hdr.handle) 169 | 170 | return num_samples 171 | 172 | def read_samplerate(char *filepath, int edfsignal): 173 | """ 174 | read_samplerate(filepath, edfsignal) 175 | 176 | Read the samplerate for a signal in an EDF/BDF file. Note that 177 | different signals can have different samplerates. 178 | 179 | Parameters 180 | ---------- 181 | filepath : {str} 182 | The path and name of the EDF/BDF file. 183 | edfsignal : {int} 184 | The signal whose samplerate to retrieve. 185 | 186 | Returns 187 | ------- 188 | samplerate : {float} 189 | The samplerate for that signal. 190 | 191 | """ 192 | # get a header 193 | cdef edf_hdr_struct hdr 194 | 195 | # open the file 196 | if open_file_readonly(filepath, &hdr, EDFLIB_DO_NOT_READ_ANNOTATIONS) < 0: 197 | print "Error opening file." 198 | return None 199 | 200 | # get the samplerate 201 | cdef float samplerate = get_samplerate(&hdr, 202 | edfsignal) 203 | 204 | # close the file 205 | edfclose_file(hdr.handle) 206 | 207 | return samplerate 208 | 209 | def read_samples(char *filepath, int edfsignal, long offset, int n): 210 | """ 211 | read_samples(filepath, edfsignal, offset, n) 212 | 213 | Read in samples from a signal in an EDF/BDF file. 214 | 215 | Parameters 216 | ---------- 217 | filepath : {str} 218 | The path and name of the EDF/BDF file. 219 | edfsignal : {int} 220 | The signal whose samplerate to retrieve. 221 | offset : {long} 222 | Offset in samples into the file where to start reading. 223 | n : {int} 224 | Number of samples to read, starting at offset. 225 | 226 | Returns 227 | ------- 228 | samples : {np.ndarray} 229 | An ndarray of samples read from the file. 230 | 231 | """ 232 | # allocate space 233 | cdef np.ndarray[dtype_f64_t, ndim=1] buf = np.empty((n),dtype=dtype_f64) 234 | 235 | # get a header 236 | cdef edf_hdr_struct hdr 237 | 238 | # open the file 239 | if open_file_readonly(filepath, &hdr, EDFLIB_DO_NOT_READ_ANNOTATIONS) < 0: 240 | print "Error opening file." 241 | return None 242 | 243 | # read samples into buffer 244 | cdef int nread = read_samples_from_file(&hdr, 245 | edfsignal, 246 | offset, 247 | n, 248 | buf.data) 249 | 250 | if nread < 0: 251 | # we had an error, so return none 252 | print "Error reading samples. Duration may have been misspecified." 253 | return None 254 | 255 | # close the file 256 | edfclose_file(hdr.handle) 257 | 258 | # return the buffer, truncated to the number of samples 259 | return buf[0:nread] 260 | 261 | -------------------------------------------------------------------------------- /tools/gitwash_dumper.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python 2 | ''' Checkout gitwash repo into directory and do search replace on name ''' 3 | 4 | import os 5 | from os.path import join as pjoin 6 | import shutil 7 | import sys 8 | import re 9 | import glob 10 | import fnmatch 11 | import tempfile 12 | from subprocess import call 13 | from optparse import OptionParser 14 | 15 | verbose = False 16 | 17 | 18 | def clone_repo(url, branch): 19 | cwd = os.getcwd() 20 | tmpdir = tempfile.mkdtemp() 21 | try: 22 | cmd = 'git clone %s %s' % (url, tmpdir) 23 | call(cmd, shell=True) 24 | os.chdir(tmpdir) 25 | cmd = 'git checkout %s' % branch 26 | call(cmd, shell=True) 27 | except: 28 | shutil.rmtree(tmpdir) 29 | raise 30 | finally: 31 | os.chdir(cwd) 32 | return tmpdir 33 | 34 | 35 | def cp_files(in_path, globs, out_path): 36 | try: 37 | os.makedirs(out_path) 38 | except OSError: 39 | pass 40 | out_fnames = [] 41 | for in_glob in globs: 42 | in_glob_path = pjoin(in_path, in_glob) 43 | for in_fname in glob.glob(in_glob_path): 44 | out_fname = in_fname.replace(in_path, out_path) 45 | pth, _ = os.path.split(out_fname) 46 | if not os.path.isdir(pth): 47 | os.makedirs(pth) 48 | shutil.copyfile(in_fname, out_fname) 49 | out_fnames.append(out_fname) 50 | return out_fnames 51 | 52 | 53 | def filename_search_replace(sr_pairs, filename, backup=False): 54 | ''' Search and replace for expressions in files 55 | 56 | ''' 57 | in_txt = open(filename, 'rt').read(-1) 58 | out_txt = in_txt[:] 59 | for in_exp, out_exp in sr_pairs: 60 | in_exp = re.compile(in_exp) 61 | out_txt = in_exp.sub(out_exp, out_txt) 62 | if in_txt == out_txt: 63 | return False 64 | open(filename, 'wt').write(out_txt) 65 | if backup: 66 | open(filename + '.bak', 'wt').write(in_txt) 67 | return True 68 | 69 | 70 | def copy_replace(replace_pairs, 71 | repo_path, 72 | out_path, 73 | cp_globs=('*',), 74 | rep_globs=('*',), 75 | renames = ()): 76 | out_fnames = cp_files(repo_path, cp_globs, out_path) 77 | renames = [(re.compile(in_exp), out_exp) for in_exp, out_exp in renames] 78 | fnames = [] 79 | for rep_glob in rep_globs: 80 | fnames += fnmatch.filter(out_fnames, rep_glob) 81 | if verbose: 82 | print '\n'.join(fnames) 83 | for fname in fnames: 84 | filename_search_replace(replace_pairs, fname, False) 85 | for in_exp, out_exp in renames: 86 | new_fname, n = in_exp.subn(out_exp, fname) 87 | if n: 88 | os.rename(fname, new_fname) 89 | break 90 | 91 | 92 | def make_link_targets(proj_name, 93 | user_name, 94 | repo_name, 95 | known_link_fname, 96 | out_link_fname, 97 | url=None, 98 | ml_url=None): 99 | """ Check and make link targets 100 | 101 | If url is None or ml_url is None, check if there are links present for these 102 | in `known_link_fname`. If not, raise error. The check is: 103 | 104 | Look for a target `proj_name`. 105 | Look for a target `proj_name` + ' mailing list' 106 | 107 | Also, look for a target `proj_name` + 'github'. If this exists, don't write 108 | this target into the new file below. 109 | 110 | If we are writing any of the url, ml_url, or github address, then write new 111 | file with these links, of form: 112 | 113 | .. _`proj_name` 114 | .. _`proj_name`: url 115 | .. _`proj_name` mailing list: url 116 | """ 117 | link_contents = open(known_link_fname, 'rt').readlines() 118 | have_url = not url is None 119 | have_ml_url = not ml_url is None 120 | have_gh_url = None 121 | for line in link_contents: 122 | if not have_url: 123 | match = re.match(r'..\s+_`%s`:\s+' % proj_name, line) 124 | if match: 125 | have_url = True 126 | if not have_ml_url: 127 | match = re.match(r'..\s+_`%s mailing list`:\s+' % proj_name, line) 128 | if match: 129 | have_ml_url = True 130 | if not have_gh_url: 131 | match = re.match(r'..\s+_`%s github`:\s+' % proj_name, line) 132 | if match: 133 | have_gh_url = True 134 | if not have_url or not have_ml_url: 135 | raise RuntimeError('Need command line or known project ' 136 | 'and / or mailing list URLs') 137 | lines = [] 138 | if not url is None: 139 | lines.append('.. _`%s`: %s\n' % (proj_name, url)) 140 | if not have_gh_url: 141 | gh_url = 'http://github.com/%s/%s\n' % (user_name, repo_name) 142 | lines.append('.. _`%s github`: %s\n' % (proj_name, gh_url)) 143 | if not ml_url is None: 144 | lines.append('.. _`%s mailing list`: %s\n' % (proj_name, ml_url)) 145 | if len(lines) == 0: 146 | # Nothing to do 147 | return 148 | # A neat little header line 149 | lines = ['.. %s\n' % proj_name] + lines 150 | out_links = open(out_link_fname, 'wt') 151 | out_links.writelines(lines) 152 | out_links.close() 153 | 154 | 155 | USAGE = ''' 156 | 157 | If not set with options, the repository name is the same as the 159 | 160 | If not set with options, the main github user is the same as the 161 | repository name.''' 162 | 163 | 164 | GITWASH_CENTRAL = 'git://github.com/matthew-brett/gitwash.git' 165 | GITWASH_BRANCH = 'master' 166 | 167 | 168 | def main(): 169 | parser = OptionParser() 170 | parser.set_usage(parser.get_usage().strip() + USAGE) 171 | parser.add_option("--repo-name", dest="repo_name", 172 | help="repository name - e.g. nitime", 173 | metavar="REPO_NAME") 174 | parser.add_option("--github-user", dest="main_gh_user", 175 | help="github username for main repo - e.g fperez", 176 | metavar="MAIN_GH_USER") 177 | parser.add_option("--gitwash-url", dest="gitwash_url", 178 | help="URL to gitwash repository - default %s" 179 | % GITWASH_CENTRAL, 180 | default=GITWASH_CENTRAL, 181 | metavar="GITWASH_URL") 182 | parser.add_option("--gitwash-branch", dest="gitwash_branch", 183 | help="branch in gitwash repository - default %s" 184 | % GITWASH_BRANCH, 185 | default=GITWASH_BRANCH, 186 | metavar="GITWASH_BRANCH") 187 | parser.add_option("--source-suffix", dest="source_suffix", 188 | help="suffix of ReST source files - default '.rst'", 189 | default='.rst', 190 | metavar="SOURCE_SUFFIX") 191 | parser.add_option("--project-url", dest="project_url", 192 | help="URL for project web pages", 193 | default=None, 194 | metavar="PROJECT_URL") 195 | parser.add_option("--project-ml-url", dest="project_ml_url", 196 | help="URL for project mailing list", 197 | default=None, 198 | metavar="PROJECT_ML_URL") 199 | (options, args) = parser.parse_args() 200 | if len(args) < 2: 201 | parser.print_help() 202 | sys.exit() 203 | out_path, project_name = args 204 | if options.repo_name is None: 205 | options.repo_name = project_name 206 | if options.main_gh_user is None: 207 | options.main_gh_user = options.repo_name 208 | repo_path = clone_repo(options.gitwash_url, options.gitwash_branch) 209 | try: 210 | copy_replace((('PROJECTNAME', project_name), 211 | ('REPONAME', options.repo_name), 212 | ('MAIN_GH_USER', options.main_gh_user)), 213 | repo_path, 214 | out_path, 215 | cp_globs=(pjoin('gitwash', '*'),), 216 | rep_globs=('*.rst',), 217 | renames=(('\.rst$', options.source_suffix),)) 218 | make_link_targets(project_name, 219 | options.main_gh_user, 220 | options.repo_name, 221 | pjoin(out_path, 'gitwash', 'known_projects.inc'), 222 | pjoin(out_path, 'gitwash', 'this_project.inc'), 223 | options.project_url, 224 | options.project_ml_url) 225 | finally: 226 | shutil.rmtree(repo_path) 227 | 228 | 229 | if __name__ == '__main__': 230 | main() 231 | --------------------------------------------------------------------------------