├── .gitignore ├── .travis.yml ├── CONTRIBUTING.md ├── LICENSE ├── MANIFEST.in ├── README.md ├── docs ├── .nojekyll ├── Makefile ├── doctrees │ ├── environment.pickle │ ├── gaitpy.doctree │ ├── gaitpy_functions.doctree │ ├── index.doctree │ └── modules.doctree ├── html │ ├── .buildinfo │ ├── _sources │ │ ├── gaitpy.rst.txt │ │ ├── gaitpy_functions.rst.txt │ │ ├── index.rst.txt │ │ └── modules.rst.txt │ ├── _static │ │ ├── basic.css │ │ ├── css │ │ │ ├── badge_only.css │ │ │ └── theme.css │ │ ├── doctools.js │ │ ├── documentation_options.js │ │ ├── file.png │ │ ├── fonts │ │ │ ├── Inconsolata-Bold.ttf │ │ │ ├── Inconsolata-Regular.ttf │ │ │ ├── Inconsolata.ttf │ │ │ ├── Lato-Bold.ttf │ │ │ ├── Lato-Regular.ttf │ │ │ ├── Lato │ │ │ │ ├── lato-bold.eot │ │ │ │ ├── lato-bold.ttf │ │ │ │ ├── lato-bold.woff │ │ │ │ ├── lato-bold.woff2 │ │ │ │ ├── lato-bolditalic.eot │ │ │ │ ├── lato-bolditalic.ttf │ │ │ │ ├── lato-bolditalic.woff │ │ │ │ ├── lato-bolditalic.woff2 │ │ │ │ ├── lato-italic.eot │ │ │ │ ├── lato-italic.ttf │ │ │ │ ├── lato-italic.woff │ │ │ │ ├── lato-italic.woff2 │ │ │ │ ├── lato-regular.eot │ │ │ │ ├── lato-regular.ttf │ │ │ │ ├── lato-regular.woff │ │ │ │ └── lato-regular.woff2 │ │ │ ├── RobotoSlab-Bold.ttf │ │ │ ├── RobotoSlab-Regular.ttf │ │ │ ├── RobotoSlab │ │ │ │ ├── roboto-slab-v7-bold.eot │ │ │ │ ├── roboto-slab-v7-bold.ttf │ │ │ │ ├── roboto-slab-v7-bold.woff │ │ │ │ ├── roboto-slab-v7-bold.woff2 │ │ │ │ ├── roboto-slab-v7-regular.eot │ │ │ │ ├── roboto-slab-v7-regular.ttf │ │ │ │ ├── roboto-slab-v7-regular.woff │ │ │ │ └── roboto-slab-v7-regular.woff2 │ │ │ ├── fontawesome-webfont.eot │ │ │ ├── fontawesome-webfont.svg │ │ │ ├── fontawesome-webfont.ttf │ │ │ ├── fontawesome-webfont.woff │ │ │ └── fontawesome-webfont.woff2 │ │ ├── jquery-3.4.1.js │ │ ├── jquery.js │ │ ├── js │ │ │ ├── modernizr.min.js │ │ │ └── theme.js │ │ ├── language_data.js │ │ ├── minus.png │ │ ├── plus.png │ │ ├── pygments.css │ │ ├── searchtools.js │ │ ├── underscore-1.3.1.js │ │ └── underscore.js │ ├── gaitpy.html │ ├── gaitpy_functions.html │ ├── genindex.html │ ├── index.html │ ├── modules.html │ ├── objects.inv │ ├── py-modindex.html │ ├── search.html │ └── searchindex.js ├── index.html ├── make.bat └── source │ ├── conf.py │ ├── gaitpy.rst │ ├── gaitpy_functions.rst │ ├── index.rst │ └── modules.rst ├── gaitpy ├── __init__.py ├── demo │ ├── __init__.py │ ├── demo.py │ ├── demo_classify_bouts.h5 │ ├── demo_data.csv │ ├── demo_gait_features.csv │ └── demo_plot_contacts.html ├── gait.py ├── model │ ├── feature_order.txt │ └── model.pkl ├── signal_features.py ├── tests │ ├── __init__.py │ ├── test_gait.py │ ├── test_signal_features.py │ └── test_util.py └── util.py ├── paper ├── Figure1.png ├── Figure2.png ├── Figure3.png ├── paper.bib └── paper.md ├── requirements.txt └── setup.py /.gitignore: -------------------------------------------------------------------------------- 1 | *.xml 2 | *.iml 3 | *.pyc 4 | .DS_Store 5 | /dist/ 6 | /*.egg-info 7 | .idea/ 8 | .eggs/ 9 | build/ -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "3.6" 4 | script: pytest 5 | -------------------------------------------------------------------------------- /CONTRIBUTING.md: -------------------------------------------------------------------------------- 1 | # Contributing 2 | We encourage any and all contributions to the project. 3 | If you want to contribute, please [fork the master branch](https://github.com/matt002/GaitPy/fork) and open a pull request with your edits. 4 | We will reply to pull requests and are more than happy to discuss any problems or address any questions you may have about the project or process. 5 | 6 | ## Documentation 7 | 8 | Improving documentation is a great way to make first contributions. 9 | If you found the wording of any of the documentation unclear, or the flow of the documentation difficult to follow, 10 | please feel free to suggest improvements to make the package more user friendly. 11 | 12 | ## Code 13 | 14 | If you wish to add new features or improve existing code, we are happy to review and approve your suggested changes or 15 | updates. Please just ensure that your addition is well explained, well justified, and clearly commented so that its 16 | purpose and function are evident to the reviewer. 17 | 18 | ## Issues 19 | 20 | If you find any issues, discover any errors, or can not make the package run feel free to [file an issue](https://github.com/matt002/GaitPy/issues/new). 21 | When filing your issue please be as detailed and explicit as possible, include the libraries you are using and their versions, the operating system you are using, and any other information you feel is relevant. 22 | Please also include as much information as necessary to reproduce the problem you encountered. 23 | In the event that you find an issue and are able to resolve it on your own, please submit a pull request with the updated code to help improve the package for others. 24 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 Matt 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /MANIFEST.in: -------------------------------------------------------------------------------- 1 | include README.md 2 | include requirements.txt 3 | include LICENSE 4 | include gaitpy/model/model.pkl 5 | include gaitpy/model/feature_order.txt 6 | include gaitpy/demo/demo_classify_bouts.h5 7 | include gaitpy/demo/demo_data.csv 8 | include gaitpy/demo/demo_plot_contacts.html 9 | include gaitpy/demo/demo.py 10 | include gaitpy/demo/demo_gait_features.csv -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # GaitPy 2 | Read and process raw vertical accelerometry data from a sensor on the lower back during gait; calculate clinical gait characteristics. 3 | 4 | [![status](https://joss.theoj.org/papers/a2233c9e27db0b6625dc56a3f7363875/status.svg)](https://joss.theoj.org/papers/a2233c9e27db0b6625dc56a3f7363875) 5 | 6 | [![Build Status](https://travis-ci.com/matt002/GaitPy.svg?branch=master)](https://travis-ci.com/matt002/GaitPy) 7 | 8 | ## Disclaimer 9 | This package is not maintained anymore. I would recommend using Scikit Digital Health, a python package that includes a newer version GaitPy and various other sensor processing modules (https://github.com/PfizerRD/scikit-digital-health). 10 | 11 | ## Overview 12 | GaitPy provides python functions to read accelerometry data from a single lumbar-mounted sensor and estimate clinical 13 | characteristics of gait. 14 | 15 | - Device location: lower back/lumbar 16 | - Sensing modality: Accelerometer 17 | - Sensor data: Vertical acceleration 18 | - Minimum sampling rate: 50Hz 19 | 20 | [DOCUMENTATION](https://matt002.github.io/GaitPy/html/index.html) 21 | 22 | [COMMUNITY GUIDELINES](https://github.com/matt002/GaitPy/blob/master/CONTRIBUTING.md) 23 | 24 | ## Installation 25 | GaitPy is compatible with python v3.6 on MacOSX, Windows, and Linux and is available through [Anaconda](https://www.anaconda.com/distribution/). 26 | 27 | **Installation via Anaconda (recommended):** 28 | 29 | Once you have Anaconda installed, open a terminal window and create a new environment using the following command. 30 | ```sh 31 | conda create --name my_env python=3.6 32 | ``` 33 | Then, activate your environment using the following command for Mac and Linux. 34 | ```sh 35 | source activate my_env 36 | ``` 37 | For Windows use the following. 38 | ```sh 39 | activate my_env 40 | ``` 41 | Lastly, to install GaitPy, use the following command. 42 | ```sh 43 | conda install gaitpy 44 | ``` 45 | 46 | **Alternatively, you can install via pip:** 47 | ```sh 48 | pip install gaitpy 49 | ``` 50 | 51 | **You can also install GaitPy from source:** 52 | ```sh 53 | git clone https://github.com/matt002/gaitpy 54 | cd ~/gaitpy 55 | python setup.py install 56 | ``` 57 | 58 | ## Basic usage 59 | GaitPy consists of the following 3 functions: 60 | 1. classify_bouts: If your data consists of gait and non-gait data, run the classify_bouts function to first 61 | classify bouts of gait. If your data is solely during gait, this function is not necessary to use. 62 | 2. extract_features: Extract initial contact (IC) and final contact (FC) events from your data and estimate 63 | various temporal and spatial gait features. 64 | 3. plot_contacts: Plot the resulting bout detections and IC/FC events alongside your raw accelerometer signal. 65 | 66 | GaitPy accepts a csv file or pandas dataframe that includes a column containing unix timestamps and a column containing 67 | vertical acceleration from a lumbar-mounted sensor. GaitPy makes three assumptions by default: 68 | 1. Timestamps and vertical acceleration columns are labeled 'timestamps' and 'y' respectively, however 69 | this can be changed using the 'v_acc_col_name' and 'ts_col_name' arguments respectively. 70 | 2. Timestamps are in Unix milliseconds and data is in meters per second squared, however this can be be changed 71 | using the 'ts_units' and 'v_acc_units' arguments respectively. 72 | 3. Baseline vertical acceleration data is -9.8m/s^2 or -1g. If your baseline data is +9.8m/s^2 or +1g, set the 'flip' 73 | argument to True. 74 | 75 | Additionally, the sample rate of your device (at least 50Hz) and height of the subject must be provided. 76 | 77 | More details about the inputs and ouputs of each of these functions can be found in the [documentation](https://matt002.github.io/GaitPy/html/index.html) and Czech et al. 2019.[![status](https://joss.theoj.org/papers/a2233c9e27db0b6625dc56a3f7363875/status.svg)](https://joss.theoj.org/papers/a2233c9e27db0b6625dc56a3f7363875) 78 | 79 | ```sh 80 | from gaitpy.gait import Gaitpy 81 | 82 | raw_data = 'raw-data-path or pandas dataframe' 83 | sample_rate = 128 # hertz 84 | subject_height = 170 # centimeters 85 | 86 | #### Create an instance of Gaitpy #### 87 | gaitpy = Gaitpy(raw_data, # Raw data consisting of vertical acceleration from lumbar location and unix timestamps 88 | sample_rate, # Sample rate of raw data (in Hertz) 89 | v_acc_col_name='y', # Vertical acceleration column name 90 | ts_col_name='timestamps', # Timestamp column name 91 | v_acc_units='m/s^2', # Units of vertical acceleration 92 | ts_units='ms', # Units of timestamps 93 | flip=False) # If baseline data is at +1g or +9.8m/s^2, set flip=True 94 | 95 | #### Classify bouts of gait - Optional (use if your data consists of gait and non-gait periods)#### 96 | gait_bouts = gaitpy.classify_bouts(result_file='/my/folder/classified_gait.h5') # File to save results to (None by default) 97 | 98 | #### Extract gait characteristics #### 99 | gait_features = gaitpy.extract_features(subject_height, # Subject height 100 | subject_height_units='centimeter', # Units of subject height 101 | result_file='/my/folder/gait_features.csv', # File to save results to (None by default) 102 | classified_gait=gait_bouts) # Pandas Dataframe or .h5 file results of classify_bouts function (None by default) 103 | 104 | #### Plot results of gait feature extraction #### 105 | gaitpy.plot_contacts(gait_features, # Pandas Dataframe or .csv file results of extract_features function 106 | result_file='/my/folder/plot_contacts.html)', # File to save results to (None by default) 107 | show_plot=True) # Specify whether to display plot upon completion (True by default) 108 | 109 | ``` 110 | 111 | ## Running the demo 112 | 113 | The demo file provided lets you to test whether GaitPy outputs the expected results on your system. 114 | 115 | You may run the demo directly from a terminal window: 116 | 117 | ```sh 118 | cd gaitpy/demo 119 | python demo.py 120 | ``` 121 | 122 | You may also run the demo via a python interpreter. In a terminal window start python by typing: 123 | 124 | ```sh 125 | python 126 | ``` 127 | 128 | In the interpreter window you can then import and run the demo with the following two commands: 129 | 130 | ```sh 131 | from gaitpy.demo import demo 132 | demo.run_demo() 133 | ``` 134 | 135 | The demo script will prompt you to type in a results directory. Following the run, results will be saved in the provided 136 | results directory (less than 250kB of data will be saved). Running the demo should take less than a minute, though this 137 | may vary depending on your machine. 138 | 139 | ## Contributing to the project 140 | Please help contribute to the project! See the [CONTRIBUTING.md](https://github.com/matt002/GaitPy/blob/master/CONTRIBUTING.md) file for details. 141 | 142 | ## Acknowledgements 143 | The Digital Medicine & Translational Imaging group at Pfizer, Inc supported the development of this package. 144 | 145 | ## Author 146 | Matthew Czech 147 | 148 | ## License 149 | GaitPy is under the MIT license 150 | -------------------------------------------------------------------------------- /docs/.nojekyll: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/.nojekyll -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line, and also 5 | # from the environment for the first two. 6 | SPHINXOPTS ?= 7 | SPHINXBUILD ?= sphinx-build 8 | SOURCEDIR = source 9 | BUILDDIR = ../docs 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 21 | -------------------------------------------------------------------------------- /docs/doctrees/environment.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/doctrees/environment.pickle -------------------------------------------------------------------------------- /docs/doctrees/gaitpy.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/doctrees/gaitpy.doctree -------------------------------------------------------------------------------- /docs/doctrees/gaitpy_functions.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/doctrees/gaitpy_functions.doctree -------------------------------------------------------------------------------- /docs/doctrees/index.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/doctrees/index.doctree -------------------------------------------------------------------------------- /docs/doctrees/modules.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/doctrees/modules.doctree -------------------------------------------------------------------------------- /docs/html/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: 0e76b30061cf036e087c372b6f9f1733 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /docs/html/_sources/gaitpy.rst.txt: -------------------------------------------------------------------------------- 1 | GaitPy 2 | ----------------------- 3 | 4 | .. automodule:: gaitpy.gait.Gaitpy 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | .. toctree:: 10 | :maxdepth: 4 11 | 12 | gaitpy_functions 13 | -------------------------------------------------------------------------------- /docs/html/_sources/gaitpy_functions.rst.txt: -------------------------------------------------------------------------------- 1 | classify_bouts 2 | ----------------------- 3 | 4 | .. automodule:: gaitpy.gait.Gaitpy.classify_bouts 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | extract_features 10 | ----------------------- 11 | 12 | .. automodule:: gaitpy.gait.Gaitpy.extract_features 13 | :members: 14 | :undoc-members: 15 | :show-inheritance: 16 | 17 | plot_contacts 18 | ----------------------- 19 | 20 | .. automodule:: gaitpy.gait.Gaitpy.plot_contacts 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | -------------------------------------------------------------------------------- /docs/html/_sources/index.rst.txt: -------------------------------------------------------------------------------- 1 | .. GaitPy documentation master file, created by 2 | sphinx-quickstart on Mon Oct 14 13:09:53 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to GaitPy's documentation! 7 | ================================== 8 | 9 | GaitPy provides python functions to read accelerometry data from a single lumbar-mounted sensor and estimate clinical 10 | characteristics of gait. 11 | 12 | The source code is available on Github: `github.com/matt002/GaitPy `_ 13 | 14 | - Device location: lower back/lumbar 15 | - Sensing modality: Accelerometer 16 | - Sensor data: Vertical acceleration 17 | - Minimum sampling rate: 50Hz 18 | 19 | Installation 20 | ------------ 21 | GaitPy is compatible with python v3.6 on MacOSX, Windows, and Linux. 22 | 23 | Installation via pip: 24 | 25 | .. code:: 26 | 27 | pip install gaitpy 28 | 29 | You can also install it from source: 30 | 31 | .. code:: 32 | 33 | git clone https://github.com/matt002/gaitpy 34 | cd gaitpy 35 | python setup.py install 36 | 37 | Basic usage 38 | ----------- 39 | Gaitpy consists of the following 3 functions: 40 | 41 | 1. classify_bouts: If your data consists of gait and non-gait data, run the classify_bouts function to first classify bouts of gait. If your data is solely during gait, this function is not necessary to use. 42 | 2. extract_features: Extract initial contact (IC) and final contact (FC) events from your data and estimate various temporal and spatial gait features. 43 | 3. plot_contacts: Plot the resulting bout detections and IC/FC events alongside your raw accelerometer signal. 44 | 45 | Gaitpy accepts a csv file or pandas dataframe that includes a column containing unix timestamps and a column containing 46 | vertical acceleration from a lumbar-mounted sensor. Gaitpy makes three assumptions by default: 47 | 48 | 1. Timestamps and vertical acceleration columns are labeled 'timestamps' and 'y' respectively, however this can be changed using the 'v_acc_col_name' and 'ts_col_name' arguments respectively. 49 | 2. Timestamps are in Unix milliseconds and data is in meters per second squared, however this can be be changed using the 'ts_units' and 'v_acc_units' arguments respectively. 50 | 3. Baseline vertical acceleration data is -9.8m/s^2 or -1g. If your baseline data is +9.8m/s^2 or +1g, set the 'flip' argument to True. 51 | 52 | Additionally, the sample rate of your device (at least 50Hz) and height of the subject must be provided. 53 | 54 | More details about the inputs and ouputs of each of these functions can be found in `Czech et al. 2019 (in preparation) `_. 55 | 56 | .. code:: 57 | 58 | from gaitpy.gait import Gaitpy 59 | 60 | raw_data = 'raw-data-path or pandas dataframe' 61 | sample_rate = 128 # hertz 62 | subject_height = 170 # centimeters 63 | 64 | #### Create an instance of Gaitpy #### 65 | gaitpy = Gaitpy(raw_data, # Raw data consisting of vertical acceleration from lumbar location and unix timestamps 66 | sample_rate, # Sample rate of raw data (in Hertz) 67 | v_acc_col_name='y', # Vertical acceleration column name 68 | ts_col_name='timestamps', # Timestamp column name 69 | v_acc_units='m/s^2', # Units of vertical acceleration 70 | ts_units='ms', # Units of timestamps 71 | flip=False) # If baseline data is at +1g or +9.8m/s^2, set flip=True 72 | 73 | #### Classify bouts of gait - Optional (use if your data consists of gait and non-gait periods)#### 74 | gait_bouts = gaitpy.classify_bouts(result_file='/my/folder/classified_gait.h5') # File to save results to (None by default) 75 | 76 | #### Extract gait characteristics #### 77 | gait_features = gaitpy.extract_features(subject_height, # Subject height 78 | subject_height_units='centimeter', # Units of subject height 79 | result_file='/my/folder/gait_features.csv', # File to save results to (None by default) 80 | classified_gait=gait_bouts) # Pandas Dataframe or .h5 file results of classify_bouts function (None by default) 81 | 82 | #### Plot results of gait feature extraction #### 83 | gaitpy.plot_contacts(gait_features, # Pandas Dataframe or .csv file results of extract_features function 84 | result_file='/my/folder/plot_contacts.html)', # File to save results to (None by default) 85 | show_plot=True) # Specify whether to display plot upon completion (True by default) 86 | 87 | Running the demo 88 | ---------------- 89 | The demo file provided lets you to test whether GaitPy outputs the expected results on your system. 90 | 91 | You may run the demo directly from a terminal window: 92 | 93 | .. code:: 94 | 95 | cd gaitpy/demo 96 | python demo.py 97 | 98 | You may also run the demo via a python interpreter. In a terminal window start python by typing: 99 | 100 | .. code:: 101 | 102 | python 103 | 104 | 105 | In the interpreter window you can then import and run the demo with the following two commands: 106 | 107 | .. code:: 108 | 109 | from gaitpy.demo import demo 110 | demo.run_demo() 111 | 112 | The demo script will prompt you to type in a results directory. Following the run, results will be saved in the provided 113 | results directory (less than 250kB of data will be saved). Running the demo should take less than a minute, though this 114 | may vary depending on your machine. 115 | 116 | 117 | .. toctree:: 118 | :maxdepth: 4 119 | :caption: Contents: 120 | 121 | modules 122 | 123 | Acknowledgements 124 | ---------------- 125 | The Digital Medicine & Translational Imaging group at Pfizer, Inc supported the development of this package. 126 | 127 | License 128 | ------- 129 | Gaitpy is under the MIT license 130 | 131 | 132 | 133 | -------------------------------------------------------------------------------- /docs/html/_sources/modules.rst.txt: -------------------------------------------------------------------------------- 1 | Modules 2 | --------- 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | gaitpy 8 | -------------------------------------------------------------------------------- /docs/html/_static/basic.css: -------------------------------------------------------------------------------- 1 | /* 2 | * basic.css 3 | * ~~~~~~~~~ 4 | * 5 | * Sphinx stylesheet -- basic theme. 6 | * 7 | * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /* -- main layout ----------------------------------------------------------- */ 13 | 14 | div.clearer { 15 | clear: both; 16 | } 17 | 18 | /* -- relbar ---------------------------------------------------------------- */ 19 | 20 | div.related { 21 | width: 100%; 22 | font-size: 90%; 23 | } 24 | 25 | div.related h3 { 26 | display: none; 27 | } 28 | 29 | div.related ul { 30 | margin: 0; 31 | padding: 0 0 0 10px; 32 | list-style: none; 33 | } 34 | 35 | div.related li { 36 | display: inline; 37 | } 38 | 39 | div.related li.right { 40 | float: right; 41 | margin-right: 5px; 42 | } 43 | 44 | /* -- sidebar --------------------------------------------------------------- */ 45 | 46 | div.sphinxsidebarwrapper { 47 | padding: 10px 5px 0 10px; 48 | } 49 | 50 | div.sphinxsidebar { 51 | float: left; 52 | width: 230px; 53 | margin-left: -100%; 54 | font-size: 90%; 55 | word-wrap: break-word; 56 | overflow-wrap : break-word; 57 | } 58 | 59 | div.sphinxsidebar ul { 60 | list-style: none; 61 | } 62 | 63 | div.sphinxsidebar ul ul, 64 | div.sphinxsidebar ul.want-points { 65 | margin-left: 20px; 66 | list-style: square; 67 | } 68 | 69 | div.sphinxsidebar ul ul { 70 | margin-top: 0; 71 | margin-bottom: 0; 72 | } 73 | 74 | div.sphinxsidebar form { 75 | margin-top: 10px; 76 | } 77 | 78 | div.sphinxsidebar input { 79 | border: 1px solid #98dbcc; 80 | font-family: sans-serif; 81 | font-size: 1em; 82 | } 83 | 84 | div.sphinxsidebar #searchbox form.search { 85 | overflow: hidden; 86 | } 87 | 88 | div.sphinxsidebar #searchbox input[type="text"] { 89 | float: left; 90 | width: 80%; 91 | padding: 0.25em; 92 | box-sizing: border-box; 93 | } 94 | 95 | div.sphinxsidebar #searchbox input[type="submit"] { 96 | float: left; 97 | width: 20%; 98 | border-left: none; 99 | padding: 0.25em; 100 | box-sizing: border-box; 101 | } 102 | 103 | 104 | img { 105 | border: 0; 106 | max-width: 100%; 107 | } 108 | 109 | /* -- search page ----------------------------------------------------------- */ 110 | 111 | ul.search { 112 | margin: 10px 0 0 20px; 113 | padding: 0; 114 | } 115 | 116 | ul.search li { 117 | padding: 5px 0 5px 20px; 118 | background-image: url(file.png); 119 | background-repeat: no-repeat; 120 | background-position: 0 7px; 121 | } 122 | 123 | ul.search li a { 124 | font-weight: bold; 125 | } 126 | 127 | ul.search li div.context { 128 | color: #888; 129 | margin: 2px 0 0 30px; 130 | text-align: left; 131 | } 132 | 133 | ul.keywordmatches li.goodmatch a { 134 | font-weight: bold; 135 | } 136 | 137 | /* -- index page ------------------------------------------------------------ */ 138 | 139 | table.contentstable { 140 | width: 90%; 141 | margin-left: auto; 142 | margin-right: auto; 143 | } 144 | 145 | table.contentstable p.biglink { 146 | line-height: 150%; 147 | } 148 | 149 | a.biglink { 150 | font-size: 1.3em; 151 | } 152 | 153 | span.linkdescr { 154 | font-style: italic; 155 | padding-top: 5px; 156 | font-size: 90%; 157 | } 158 | 159 | /* -- general index --------------------------------------------------------- */ 160 | 161 | table.indextable { 162 | width: 100%; 163 | } 164 | 165 | table.indextable td { 166 | text-align: left; 167 | vertical-align: top; 168 | } 169 | 170 | table.indextable ul { 171 | margin-top: 0; 172 | margin-bottom: 0; 173 | list-style-type: none; 174 | } 175 | 176 | table.indextable > tbody > tr > td > ul { 177 | padding-left: 0em; 178 | } 179 | 180 | table.indextable tr.pcap { 181 | height: 10px; 182 | } 183 | 184 | table.indextable tr.cap { 185 | margin-top: 10px; 186 | background-color: #f2f2f2; 187 | } 188 | 189 | img.toggler { 190 | margin-right: 3px; 191 | margin-top: 3px; 192 | cursor: pointer; 193 | } 194 | 195 | div.modindex-jumpbox { 196 | border-top: 1px solid #ddd; 197 | border-bottom: 1px solid #ddd; 198 | margin: 1em 0 1em 0; 199 | padding: 0.4em; 200 | } 201 | 202 | div.genindex-jumpbox { 203 | border-top: 1px solid #ddd; 204 | border-bottom: 1px solid #ddd; 205 | margin: 1em 0 1em 0; 206 | padding: 0.4em; 207 | } 208 | 209 | /* -- domain module index --------------------------------------------------- */ 210 | 211 | table.modindextable td { 212 | padding: 2px; 213 | border-collapse: collapse; 214 | } 215 | 216 | /* -- general body styles --------------------------------------------------- */ 217 | 218 | div.body { 219 | min-width: 450px; 220 | max-width: 800px; 221 | } 222 | 223 | div.body p, div.body dd, div.body li, div.body blockquote { 224 | -moz-hyphens: auto; 225 | -ms-hyphens: auto; 226 | -webkit-hyphens: auto; 227 | hyphens: auto; 228 | } 229 | 230 | a.headerlink { 231 | visibility: hidden; 232 | } 233 | 234 | a.brackets:before, 235 | span.brackets > a:before{ 236 | content: "["; 237 | } 238 | 239 | a.brackets:after, 240 | span.brackets > a:after { 241 | content: "]"; 242 | } 243 | 244 | h1:hover > a.headerlink, 245 | h2:hover > a.headerlink, 246 | h3:hover > a.headerlink, 247 | h4:hover > a.headerlink, 248 | h5:hover > a.headerlink, 249 | h6:hover > a.headerlink, 250 | dt:hover > a.headerlink, 251 | caption:hover > a.headerlink, 252 | p.caption:hover > a.headerlink, 253 | div.code-block-caption:hover > a.headerlink { 254 | visibility: visible; 255 | } 256 | 257 | div.body p.caption { 258 | text-align: inherit; 259 | } 260 | 261 | div.body td { 262 | text-align: left; 263 | } 264 | 265 | .first { 266 | margin-top: 0 !important; 267 | } 268 | 269 | p.rubric { 270 | margin-top: 30px; 271 | font-weight: bold; 272 | } 273 | 274 | img.align-left, .figure.align-left, object.align-left { 275 | clear: left; 276 | float: left; 277 | margin-right: 1em; 278 | } 279 | 280 | img.align-right, .figure.align-right, object.align-right { 281 | clear: right; 282 | float: right; 283 | margin-left: 1em; 284 | } 285 | 286 | img.align-center, .figure.align-center, object.align-center { 287 | display: block; 288 | margin-left: auto; 289 | margin-right: auto; 290 | } 291 | 292 | img.align-default, .figure.align-default { 293 | display: block; 294 | margin-left: auto; 295 | margin-right: auto; 296 | } 297 | 298 | .align-left { 299 | text-align: left; 300 | } 301 | 302 | .align-center { 303 | text-align: center; 304 | } 305 | 306 | .align-default { 307 | text-align: center; 308 | } 309 | 310 | .align-right { 311 | text-align: right; 312 | } 313 | 314 | /* -- sidebars -------------------------------------------------------------- */ 315 | 316 | div.sidebar { 317 | margin: 0 0 0.5em 1em; 318 | border: 1px solid #ddb; 319 | padding: 7px 7px 0 7px; 320 | background-color: #ffe; 321 | width: 40%; 322 | float: right; 323 | } 324 | 325 | p.sidebar-title { 326 | font-weight: bold; 327 | } 328 | 329 | /* -- topics ---------------------------------------------------------------- */ 330 | 331 | div.topic { 332 | border: 1px solid #ccc; 333 | padding: 7px 7px 0 7px; 334 | margin: 10px 0 10px 0; 335 | } 336 | 337 | p.topic-title { 338 | font-size: 1.1em; 339 | font-weight: bold; 340 | margin-top: 10px; 341 | } 342 | 343 | /* -- admonitions ----------------------------------------------------------- */ 344 | 345 | div.admonition { 346 | margin-top: 10px; 347 | margin-bottom: 10px; 348 | padding: 7px; 349 | } 350 | 351 | div.admonition dt { 352 | font-weight: bold; 353 | } 354 | 355 | div.admonition dl { 356 | margin-bottom: 0; 357 | } 358 | 359 | p.admonition-title { 360 | margin: 0px 10px 5px 0px; 361 | font-weight: bold; 362 | } 363 | 364 | div.body p.centered { 365 | text-align: center; 366 | margin-top: 25px; 367 | } 368 | 369 | /* -- tables ---------------------------------------------------------------- */ 370 | 371 | table.docutils { 372 | border: 0; 373 | border-collapse: collapse; 374 | } 375 | 376 | table.align-center { 377 | margin-left: auto; 378 | margin-right: auto; 379 | } 380 | 381 | table.align-default { 382 | margin-left: auto; 383 | margin-right: auto; 384 | } 385 | 386 | table caption span.caption-number { 387 | font-style: italic; 388 | } 389 | 390 | table caption span.caption-text { 391 | } 392 | 393 | table.docutils td, table.docutils th { 394 | padding: 1px 8px 1px 5px; 395 | border-top: 0; 396 | border-left: 0; 397 | border-right: 0; 398 | border-bottom: 1px solid #aaa; 399 | } 400 | 401 | table.footnote td, table.footnote th { 402 | border: 0 !important; 403 | } 404 | 405 | th { 406 | text-align: left; 407 | padding-right: 5px; 408 | } 409 | 410 | table.citation { 411 | border-left: solid 1px gray; 412 | margin-left: 1px; 413 | } 414 | 415 | table.citation td { 416 | border-bottom: none; 417 | } 418 | 419 | th > p:first-child, 420 | td > p:first-child { 421 | margin-top: 0px; 422 | } 423 | 424 | th > p:last-child, 425 | td > p:last-child { 426 | margin-bottom: 0px; 427 | } 428 | 429 | /* -- figures --------------------------------------------------------------- */ 430 | 431 | div.figure { 432 | margin: 0.5em; 433 | padding: 0.5em; 434 | } 435 | 436 | div.figure p.caption { 437 | padding: 0.3em; 438 | } 439 | 440 | div.figure p.caption span.caption-number { 441 | font-style: italic; 442 | } 443 | 444 | div.figure p.caption span.caption-text { 445 | } 446 | 447 | /* -- field list styles ----------------------------------------------------- */ 448 | 449 | table.field-list td, table.field-list th { 450 | border: 0 !important; 451 | } 452 | 453 | .field-list ul { 454 | margin: 0; 455 | padding-left: 1em; 456 | } 457 | 458 | .field-list p { 459 | margin: 0; 460 | } 461 | 462 | .field-name { 463 | -moz-hyphens: manual; 464 | -ms-hyphens: manual; 465 | -webkit-hyphens: manual; 466 | hyphens: manual; 467 | } 468 | 469 | /* -- hlist styles ---------------------------------------------------------- */ 470 | 471 | table.hlist td { 472 | vertical-align: top; 473 | } 474 | 475 | 476 | /* -- other body styles ----------------------------------------------------- */ 477 | 478 | ol.arabic { 479 | list-style: decimal; 480 | } 481 | 482 | ol.loweralpha { 483 | list-style: lower-alpha; 484 | } 485 | 486 | ol.upperalpha { 487 | list-style: upper-alpha; 488 | } 489 | 490 | ol.lowerroman { 491 | list-style: lower-roman; 492 | } 493 | 494 | ol.upperroman { 495 | list-style: upper-roman; 496 | } 497 | 498 | li > p:first-child { 499 | margin-top: 0px; 500 | } 501 | 502 | li > p:last-child { 503 | margin-bottom: 0px; 504 | } 505 | 506 | dl.footnote > dt, 507 | dl.citation > dt { 508 | float: left; 509 | } 510 | 511 | dl.footnote > dd, 512 | dl.citation > dd { 513 | margin-bottom: 0em; 514 | } 515 | 516 | dl.footnote > dd:after, 517 | dl.citation > dd:after { 518 | content: ""; 519 | clear: both; 520 | } 521 | 522 | dl.field-list { 523 | display: grid; 524 | grid-template-columns: fit-content(30%) auto; 525 | } 526 | 527 | dl.field-list > dt { 528 | font-weight: bold; 529 | word-break: break-word; 530 | padding-left: 0.5em; 531 | padding-right: 5px; 532 | } 533 | 534 | dl.field-list > dt:after { 535 | content: ":"; 536 | } 537 | 538 | dl.field-list > dd { 539 | padding-left: 0.5em; 540 | margin-top: 0em; 541 | margin-left: 0em; 542 | margin-bottom: 0em; 543 | } 544 | 545 | dl { 546 | margin-bottom: 15px; 547 | } 548 | 549 | dd > p:first-child { 550 | margin-top: 0px; 551 | } 552 | 553 | dd ul, dd table { 554 | margin-bottom: 10px; 555 | } 556 | 557 | dd { 558 | margin-top: 3px; 559 | margin-bottom: 10px; 560 | margin-left: 30px; 561 | } 562 | 563 | dt:target, span.highlighted { 564 | background-color: #fbe54e; 565 | } 566 | 567 | rect.highlighted { 568 | fill: #fbe54e; 569 | } 570 | 571 | dl.glossary dt { 572 | font-weight: bold; 573 | font-size: 1.1em; 574 | } 575 | 576 | .optional { 577 | font-size: 1.3em; 578 | } 579 | 580 | .sig-paren { 581 | font-size: larger; 582 | } 583 | 584 | .versionmodified { 585 | font-style: italic; 586 | } 587 | 588 | .system-message { 589 | background-color: #fda; 590 | padding: 5px; 591 | border: 3px solid red; 592 | } 593 | 594 | .footnote:target { 595 | background-color: #ffa; 596 | } 597 | 598 | .line-block { 599 | display: block; 600 | margin-top: 1em; 601 | margin-bottom: 1em; 602 | } 603 | 604 | .line-block .line-block { 605 | margin-top: 0; 606 | margin-bottom: 0; 607 | margin-left: 1.5em; 608 | } 609 | 610 | .guilabel, .menuselection { 611 | font-family: sans-serif; 612 | } 613 | 614 | .accelerator { 615 | text-decoration: underline; 616 | } 617 | 618 | .classifier { 619 | font-style: oblique; 620 | } 621 | 622 | .classifier:before { 623 | font-style: normal; 624 | margin: 0.5em; 625 | content: ":"; 626 | } 627 | 628 | abbr, acronym { 629 | border-bottom: dotted 1px; 630 | cursor: help; 631 | } 632 | 633 | /* -- code displays --------------------------------------------------------- */ 634 | 635 | pre { 636 | overflow: auto; 637 | overflow-y: hidden; /* fixes display issues on Chrome browsers */ 638 | } 639 | 640 | span.pre { 641 | -moz-hyphens: none; 642 | -ms-hyphens: none; 643 | -webkit-hyphens: none; 644 | hyphens: none; 645 | } 646 | 647 | td.linenos pre { 648 | padding: 5px 0px; 649 | border: 0; 650 | background-color: transparent; 651 | color: #aaa; 652 | } 653 | 654 | table.highlighttable { 655 | margin-left: 0.5em; 656 | } 657 | 658 | table.highlighttable td { 659 | padding: 0 0.5em 0 0.5em; 660 | } 661 | 662 | div.code-block-caption { 663 | padding: 2px 5px; 664 | font-size: small; 665 | } 666 | 667 | div.code-block-caption code { 668 | background-color: transparent; 669 | } 670 | 671 | div.code-block-caption + div > div.highlight > pre { 672 | margin-top: 0; 673 | } 674 | 675 | div.code-block-caption span.caption-number { 676 | padding: 0.1em 0.3em; 677 | font-style: italic; 678 | } 679 | 680 | div.code-block-caption span.caption-text { 681 | } 682 | 683 | div.literal-block-wrapper { 684 | padding: 1em 1em 0; 685 | } 686 | 687 | div.literal-block-wrapper div.highlight { 688 | margin: 0; 689 | } 690 | 691 | code.descname { 692 | background-color: transparent; 693 | font-weight: bold; 694 | font-size: 1.2em; 695 | } 696 | 697 | code.descclassname { 698 | background-color: transparent; 699 | } 700 | 701 | code.xref, a code { 702 | background-color: transparent; 703 | font-weight: bold; 704 | } 705 | 706 | h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { 707 | background-color: transparent; 708 | } 709 | 710 | .viewcode-link { 711 | float: right; 712 | } 713 | 714 | .viewcode-back { 715 | float: right; 716 | font-family: sans-serif; 717 | } 718 | 719 | div.viewcode-block:target { 720 | margin: -1px -10px; 721 | padding: 0 10px; 722 | } 723 | 724 | /* -- math display ---------------------------------------------------------- */ 725 | 726 | img.math { 727 | vertical-align: middle; 728 | } 729 | 730 | div.body div.math p { 731 | text-align: center; 732 | } 733 | 734 | span.eqno { 735 | float: right; 736 | } 737 | 738 | span.eqno a.headerlink { 739 | position: relative; 740 | left: 0px; 741 | z-index: 1; 742 | } 743 | 744 | div.math:hover a.headerlink { 745 | visibility: visible; 746 | } 747 | 748 | /* -- printout stylesheet --------------------------------------------------- */ 749 | 750 | @media print { 751 | div.document, 752 | div.documentwrapper, 753 | div.bodywrapper { 754 | margin: 0 !important; 755 | width: 100%; 756 | } 757 | 758 | div.sphinxsidebar, 759 | div.related, 760 | div.footer, 761 | #top-link { 762 | display: none; 763 | } 764 | } -------------------------------------------------------------------------------- /docs/html/_static/css/badge_only.css: -------------------------------------------------------------------------------- 1 | .fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../fonts/fontawesome-webfont.eot");src:url("../fonts/fontawesome-webfont.eot?#iefix") format("embedded-opentype"),url("../fonts/fontawesome-webfont.woff") format("woff"),url("../fonts/fontawesome-webfont.ttf") format("truetype"),url("../fonts/fontawesome-webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-book:before{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} 2 | -------------------------------------------------------------------------------- /docs/html/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Sphinx JavaScript utilities for all documentation. 6 | * 7 | * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /** 13 | * select a different prefix for underscore 14 | */ 15 | $u = _.noConflict(); 16 | 17 | /** 18 | * make the code below compatible with browsers without 19 | * an installed firebug like debugger 20 | if (!window.console || !console.firebug) { 21 | var names = ["log", "debug", "info", "warn", "error", "assert", "dir", 22 | "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", 23 | "profile", "profileEnd"]; 24 | window.console = {}; 25 | for (var i = 0; i < names.length; ++i) 26 | window.console[names[i]] = function() {}; 27 | } 28 | */ 29 | 30 | /** 31 | * small helper function to urldecode strings 32 | */ 33 | jQuery.urldecode = function(x) { 34 | return decodeURIComponent(x).replace(/\+/g, ' '); 35 | }; 36 | 37 | /** 38 | * small helper function to urlencode strings 39 | */ 40 | jQuery.urlencode = encodeURIComponent; 41 | 42 | /** 43 | * This function returns the parsed url parameters of the 44 | * current request. Multiple values per key are supported, 45 | * it will always return arrays of strings for the value parts. 46 | */ 47 | jQuery.getQueryParameters = function(s) { 48 | if (typeof s === 'undefined') 49 | s = document.location.search; 50 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 51 | var result = {}; 52 | for (var i = 0; i < parts.length; i++) { 53 | var tmp = parts[i].split('=', 2); 54 | var key = jQuery.urldecode(tmp[0]); 55 | var value = jQuery.urldecode(tmp[1]); 56 | if (key in result) 57 | result[key].push(value); 58 | else 59 | result[key] = [value]; 60 | } 61 | return result; 62 | }; 63 | 64 | /** 65 | * highlight a given string on a jquery object by wrapping it in 66 | * span elements with the given class name. 67 | */ 68 | jQuery.fn.highlightText = function(text, className) { 69 | function highlight(node, addItems) { 70 | if (node.nodeType === 3) { 71 | var val = node.nodeValue; 72 | var pos = val.toLowerCase().indexOf(text); 73 | if (pos >= 0 && 74 | !jQuery(node.parentNode).hasClass(className) && 75 | !jQuery(node.parentNode).hasClass("nohighlight")) { 76 | var span; 77 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg"); 78 | if (isInSVG) { 79 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan"); 80 | } else { 81 | span = document.createElement("span"); 82 | span.className = className; 83 | } 84 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 85 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 86 | document.createTextNode(val.substr(pos + text.length)), 87 | node.nextSibling)); 88 | node.nodeValue = val.substr(0, pos); 89 | if (isInSVG) { 90 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect"); 91 | var bbox = node.parentElement.getBBox(); 92 | rect.x.baseVal.value = bbox.x; 93 | rect.y.baseVal.value = bbox.y; 94 | rect.width.baseVal.value = bbox.width; 95 | rect.height.baseVal.value = bbox.height; 96 | rect.setAttribute('class', className); 97 | addItems.push({ 98 | "parent": node.parentNode, 99 | "target": rect}); 100 | } 101 | } 102 | } 103 | else if (!jQuery(node).is("button, select, textarea")) { 104 | jQuery.each(node.childNodes, function() { 105 | highlight(this, addItems); 106 | }); 107 | } 108 | } 109 | var addItems = []; 110 | var result = this.each(function() { 111 | highlight(this, addItems); 112 | }); 113 | for (var i = 0; i < addItems.length; ++i) { 114 | jQuery(addItems[i].parent).before(addItems[i].target); 115 | } 116 | return result; 117 | }; 118 | 119 | /* 120 | * backward compatibility for jQuery.browser 121 | * This will be supported until firefox bug is fixed. 122 | */ 123 | if (!jQuery.browser) { 124 | jQuery.uaMatch = function(ua) { 125 | ua = ua.toLowerCase(); 126 | 127 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 128 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 129 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 130 | /(msie) ([\w.]+)/.exec(ua) || 131 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 132 | []; 133 | 134 | return { 135 | browser: match[ 1 ] || "", 136 | version: match[ 2 ] || "0" 137 | }; 138 | }; 139 | jQuery.browser = {}; 140 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 141 | } 142 | 143 | /** 144 | * Small JavaScript module for the documentation. 145 | */ 146 | var Documentation = { 147 | 148 | init : function() { 149 | this.fixFirefoxAnchorBug(); 150 | this.highlightSearchWords(); 151 | this.initIndexTable(); 152 | if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) { 153 | this.initOnKeyListeners(); 154 | } 155 | }, 156 | 157 | /** 158 | * i18n support 159 | */ 160 | TRANSLATIONS : {}, 161 | PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; }, 162 | LOCALE : 'unknown', 163 | 164 | // gettext and ngettext don't access this so that the functions 165 | // can safely bound to a different name (_ = Documentation.gettext) 166 | gettext : function(string) { 167 | var translated = Documentation.TRANSLATIONS[string]; 168 | if (typeof translated === 'undefined') 169 | return string; 170 | return (typeof translated === 'string') ? translated : translated[0]; 171 | }, 172 | 173 | ngettext : function(singular, plural, n) { 174 | var translated = Documentation.TRANSLATIONS[singular]; 175 | if (typeof translated === 'undefined') 176 | return (n == 1) ? singular : plural; 177 | return translated[Documentation.PLURALEXPR(n)]; 178 | }, 179 | 180 | addTranslations : function(catalog) { 181 | for (var key in catalog.messages) 182 | this.TRANSLATIONS[key] = catalog.messages[key]; 183 | this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); 184 | this.LOCALE = catalog.locale; 185 | }, 186 | 187 | /** 188 | * add context elements like header anchor links 189 | */ 190 | addContextElements : function() { 191 | $('div[id] > :header:first').each(function() { 192 | $('\u00B6'). 193 | attr('href', '#' + this.id). 194 | attr('title', _('Permalink to this headline')). 195 | appendTo(this); 196 | }); 197 | $('dt[id]').each(function() { 198 | $('\u00B6'). 199 | attr('href', '#' + this.id). 200 | attr('title', _('Permalink to this definition')). 201 | appendTo(this); 202 | }); 203 | }, 204 | 205 | /** 206 | * workaround a firefox stupidity 207 | * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 208 | */ 209 | fixFirefoxAnchorBug : function() { 210 | if (document.location.hash && $.browser.mozilla) 211 | window.setTimeout(function() { 212 | document.location.href += ''; 213 | }, 10); 214 | }, 215 | 216 | /** 217 | * highlight the search words provided in the url in the text 218 | */ 219 | highlightSearchWords : function() { 220 | var params = $.getQueryParameters(); 221 | var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; 222 | if (terms.length) { 223 | var body = $('div.body'); 224 | if (!body.length) { 225 | body = $('body'); 226 | } 227 | window.setTimeout(function() { 228 | $.each(terms, function() { 229 | body.highlightText(this.toLowerCase(), 'highlighted'); 230 | }); 231 | }, 10); 232 | $('') 234 | .appendTo($('#searchbox')); 235 | } 236 | }, 237 | 238 | /** 239 | * init the domain index toggle buttons 240 | */ 241 | initIndexTable : function() { 242 | var togglers = $('img.toggler').click(function() { 243 | var src = $(this).attr('src'); 244 | var idnum = $(this).attr('id').substr(7); 245 | $('tr.cg-' + idnum).toggle(); 246 | if (src.substr(-9) === 'minus.png') 247 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); 248 | else 249 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); 250 | }).css('display', ''); 251 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { 252 | togglers.click(); 253 | } 254 | }, 255 | 256 | /** 257 | * helper function to hide the search marks again 258 | */ 259 | hideSearchWords : function() { 260 | $('#searchbox .highlight-link').fadeOut(300); 261 | $('span.highlighted').removeClass('highlighted'); 262 | }, 263 | 264 | /** 265 | * make the url absolute 266 | */ 267 | makeURL : function(relativeURL) { 268 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; 269 | }, 270 | 271 | /** 272 | * get the current relative url 273 | */ 274 | getCurrentURL : function() { 275 | var path = document.location.pathname; 276 | var parts = path.split(/\//); 277 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { 278 | if (this === '..') 279 | parts.pop(); 280 | }); 281 | var url = parts.join('/'); 282 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1); 283 | }, 284 | 285 | initOnKeyListeners: function() { 286 | $(document).keyup(function(event) { 287 | var activeElementType = document.activeElement.tagName; 288 | // don't navigate when in search box or textarea 289 | if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT') { 290 | switch (event.keyCode) { 291 | case 37: // left 292 | var prevHref = $('link[rel="prev"]').prop('href'); 293 | if (prevHref) { 294 | window.location.href = prevHref; 295 | return false; 296 | } 297 | case 39: // right 298 | var nextHref = $('link[rel="next"]').prop('href'); 299 | if (nextHref) { 300 | window.location.href = nextHref; 301 | return false; 302 | } 303 | } 304 | } 305 | }); 306 | } 307 | }; 308 | 309 | // quick alias for translations 310 | _ = Documentation.gettext; 311 | 312 | $(document).ready(function() { 313 | Documentation.init(); 314 | }); 315 | -------------------------------------------------------------------------------- /docs/html/_static/documentation_options.js: -------------------------------------------------------------------------------- 1 | var DOCUMENTATION_OPTIONS = { 2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'), 3 | VERSION: '1.5', 4 | LANGUAGE: 'None', 5 | COLLAPSE_INDEX: false, 6 | FILE_SUFFIX: '.html', 7 | HAS_SOURCE: true, 8 | SOURCELINK_SUFFIX: '.txt', 9 | NAVIGATION_WITH_KEYS: false 10 | }; -------------------------------------------------------------------------------- /docs/html/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/file.png -------------------------------------------------------------------------------- /docs/html/_static/fonts/Inconsolata-Bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Inconsolata-Bold.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/Inconsolata-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Inconsolata-Regular.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/Inconsolata.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Inconsolata.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato-Bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato-Bold.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato-Regular.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-bold.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-bold.eot -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-bold.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-bold.woff -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-bold.woff2 -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-bolditalic.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-bolditalic.eot -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-bolditalic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-bolditalic.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-bolditalic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-bolditalic.woff -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-bolditalic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-bolditalic.woff2 -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-italic.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-italic.eot -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-italic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-italic.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-italic.woff -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-italic.woff2 -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-regular.eot -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-regular.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-regular.woff -------------------------------------------------------------------------------- /docs/html/_static/fonts/Lato/lato-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/Lato/lato-regular.woff2 -------------------------------------------------------------------------------- /docs/html/_static/fonts/RobotoSlab-Bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/RobotoSlab-Bold.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/RobotoSlab-Regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/RobotoSlab-Regular.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot -------------------------------------------------------------------------------- /docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff -------------------------------------------------------------------------------- /docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 -------------------------------------------------------------------------------- /docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot -------------------------------------------------------------------------------- /docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff -------------------------------------------------------------------------------- /docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 -------------------------------------------------------------------------------- /docs/html/_static/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /docs/html/_static/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /docs/html/_static/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /docs/html/_static/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /docs/html/_static/js/modernizr.min.js: -------------------------------------------------------------------------------- 1 | /* Modernizr 2.6.2 (Custom Build) | MIT & BSD 2 | * Build: http://modernizr.com/download/#-fontface-backgroundsize-borderimage-borderradius-boxshadow-flexbox-hsla-multiplebgs-opacity-rgba-textshadow-cssanimations-csscolumns-generatedcontent-cssgradients-cssreflections-csstransforms-csstransforms3d-csstransitions-applicationcache-canvas-canvastext-draganddrop-hashchange-history-audio-video-indexeddb-input-inputtypes-localstorage-postmessage-sessionstorage-websockets-websqldatabase-webworkers-geolocation-inlinesvg-smil-svg-svgclippaths-touch-webgl-shiv-mq-cssclasses-addtest-prefixed-teststyles-testprop-testallprops-hasevent-prefixes-domprefixes-load 3 | */ 4 | ;window.Modernizr=function(a,b,c){function D(a){j.cssText=a}function E(a,b){return D(n.join(a+";")+(b||""))}function F(a,b){return typeof a===b}function G(a,b){return!!~(""+a).indexOf(b)}function H(a,b){for(var d in a){var e=a[d];if(!G(e,"-")&&j[e]!==c)return b=="pfx"?e:!0}return!1}function I(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:F(f,"function")?f.bind(d||b):f}return!1}function J(a,b,c){var d=a.charAt(0).toUpperCase()+a.slice(1),e=(a+" "+p.join(d+" ")+d).split(" ");return F(b,"string")||F(b,"undefined")?H(e,b):(e=(a+" "+q.join(d+" ")+d).split(" "),I(e,b,c))}function K(){e.input=function(c){for(var d=0,e=c.length;d',a,""].join(""),l.id=h,(m?l:n).innerHTML+=f,n.appendChild(l),m||(n.style.background="",n.style.overflow="hidden",k=g.style.overflow,g.style.overflow="hidden",g.appendChild(n)),i=c(l,a),m?l.parentNode.removeChild(l):(n.parentNode.removeChild(n),g.style.overflow=k),!!i},z=function(b){var c=a.matchMedia||a.msMatchMedia;if(c)return c(b).matches;var d;return y("@media "+b+" { #"+h+" { position: absolute; } }",function(b){d=(a.getComputedStyle?getComputedStyle(b,null):b.currentStyle)["position"]=="absolute"}),d},A=function(){function d(d,e){e=e||b.createElement(a[d]||"div"),d="on"+d;var f=d in e;return f||(e.setAttribute||(e=b.createElement("div")),e.setAttribute&&e.removeAttribute&&(e.setAttribute(d,""),f=F(e[d],"function"),F(e[d],"undefined")||(e[d]=c),e.removeAttribute(d))),e=null,f}var a={select:"input",change:"input",submit:"form",reset:"form",error:"img",load:"img",abort:"img"};return d}(),B={}.hasOwnProperty,C;!F(B,"undefined")&&!F(B.call,"undefined")?C=function(a,b){return B.call(a,b)}:C=function(a,b){return b in a&&F(a.constructor.prototype[b],"undefined")},Function.prototype.bind||(Function.prototype.bind=function(b){var c=this;if(typeof c!="function")throw new TypeError;var d=w.call(arguments,1),e=function(){if(this instanceof e){var a=function(){};a.prototype=c.prototype;var f=new a,g=c.apply(f,d.concat(w.call(arguments)));return Object(g)===g?g:f}return c.apply(b,d.concat(w.call(arguments)))};return e}),s.flexbox=function(){return J("flexWrap")},s.canvas=function(){var a=b.createElement("canvas");return!!a.getContext&&!!a.getContext("2d")},s.canvastext=function(){return!!e.canvas&&!!F(b.createElement("canvas").getContext("2d").fillText,"function")},s.webgl=function(){return!!a.WebGLRenderingContext},s.touch=function(){var c;return"ontouchstart"in a||a.DocumentTouch&&b instanceof DocumentTouch?c=!0:y(["@media (",n.join("touch-enabled),("),h,")","{#modernizr{top:9px;position:absolute}}"].join(""),function(a){c=a.offsetTop===9}),c},s.geolocation=function(){return"geolocation"in navigator},s.postmessage=function(){return!!a.postMessage},s.websqldatabase=function(){return!!a.openDatabase},s.indexedDB=function(){return!!J("indexedDB",a)},s.hashchange=function(){return A("hashchange",a)&&(b.documentMode===c||b.documentMode>7)},s.history=function(){return!!a.history&&!!history.pushState},s.draganddrop=function(){var a=b.createElement("div");return"draggable"in a||"ondragstart"in a&&"ondrop"in a},s.websockets=function(){return"WebSocket"in a||"MozWebSocket"in a},s.rgba=function(){return D("background-color:rgba(150,255,150,.5)"),G(j.backgroundColor,"rgba")},s.hsla=function(){return D("background-color:hsla(120,40%,100%,.5)"),G(j.backgroundColor,"rgba")||G(j.backgroundColor,"hsla")},s.multiplebgs=function(){return D("background:url(https://),url(https://),red url(https://)"),/(url\s*\(.*?){3}/.test(j.background)},s.backgroundsize=function(){return J("backgroundSize")},s.borderimage=function(){return J("borderImage")},s.borderradius=function(){return J("borderRadius")},s.boxshadow=function(){return J("boxShadow")},s.textshadow=function(){return b.createElement("div").style.textShadow===""},s.opacity=function(){return E("opacity:.55"),/^0.55$/.test(j.opacity)},s.cssanimations=function(){return J("animationName")},s.csscolumns=function(){return J("columnCount")},s.cssgradients=function(){var a="background-image:",b="gradient(linear,left top,right bottom,from(#9f9),to(white));",c="linear-gradient(left top,#9f9, white);";return D((a+"-webkit- ".split(" ").join(b+a)+n.join(c+a)).slice(0,-a.length)),G(j.backgroundImage,"gradient")},s.cssreflections=function(){return J("boxReflect")},s.csstransforms=function(){return!!J("transform")},s.csstransforms3d=function(){var a=!!J("perspective");return a&&"webkitPerspective"in g.style&&y("@media (transform-3d),(-webkit-transform-3d){#modernizr{left:9px;position:absolute;height:3px;}}",function(b,c){a=b.offsetLeft===9&&b.offsetHeight===3}),a},s.csstransitions=function(){return J("transition")},s.fontface=function(){var a;return y('@font-face {font-family:"font";src:url("https://")}',function(c,d){var e=b.getElementById("smodernizr"),f=e.sheet||e.styleSheet,g=f?f.cssRules&&f.cssRules[0]?f.cssRules[0].cssText:f.cssText||"":"";a=/src/i.test(g)&&g.indexOf(d.split(" ")[0])===0}),a},s.generatedcontent=function(){var a;return y(["#",h,"{font:0/0 a}#",h,':after{content:"',l,'";visibility:hidden;font:3px/1 a}'].join(""),function(b){a=b.offsetHeight>=3}),a},s.video=function(){var a=b.createElement("video"),c=!1;try{if(c=!!a.canPlayType)c=new Boolean(c),c.ogg=a.canPlayType('video/ogg; codecs="theora"').replace(/^no$/,""),c.h264=a.canPlayType('video/mp4; codecs="avc1.42E01E"').replace(/^no$/,""),c.webm=a.canPlayType('video/webm; codecs="vp8, vorbis"').replace(/^no$/,"")}catch(d){}return c},s.audio=function(){var a=b.createElement("audio"),c=!1;try{if(c=!!a.canPlayType)c=new Boolean(c),c.ogg=a.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,""),c.mp3=a.canPlayType("audio/mpeg;").replace(/^no$/,""),c.wav=a.canPlayType('audio/wav; codecs="1"').replace(/^no$/,""),c.m4a=(a.canPlayType("audio/x-m4a;")||a.canPlayType("audio/aac;")).replace(/^no$/,"")}catch(d){}return c},s.localstorage=function(){try{return localStorage.setItem(h,h),localStorage.removeItem(h),!0}catch(a){return!1}},s.sessionstorage=function(){try{return sessionStorage.setItem(h,h),sessionStorage.removeItem(h),!0}catch(a){return!1}},s.webworkers=function(){return!!a.Worker},s.applicationcache=function(){return!!a.applicationCache},s.svg=function(){return!!b.createElementNS&&!!b.createElementNS(r.svg,"svg").createSVGRect},s.inlinesvg=function(){var a=b.createElement("div");return a.innerHTML="",(a.firstChild&&a.firstChild.namespaceURI)==r.svg},s.smil=function(){return!!b.createElementNS&&/SVGAnimate/.test(m.call(b.createElementNS(r.svg,"animate")))},s.svgclippaths=function(){return!!b.createElementNS&&/SVGClipPath/.test(m.call(b.createElementNS(r.svg,"clipPath")))};for(var L in s)C(s,L)&&(x=L.toLowerCase(),e[x]=s[L](),v.push((e[x]?"":"no-")+x));return e.input||K(),e.addTest=function(a,b){if(typeof a=="object")for(var d in a)C(a,d)&&e.addTest(d,a[d]);else{a=a.toLowerCase();if(e[a]!==c)return e;b=typeof b=="function"?b():b,typeof f!="undefined"&&f&&(g.className+=" "+(b?"":"no-")+a),e[a]=b}return e},D(""),i=k=null,function(a,b){function k(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function l(){var a=r.elements;return typeof a=="string"?a.split(" "):a}function m(a){var b=i[a[g]];return b||(b={},h++,a[g]=h,i[h]=b),b}function n(a,c,f){c||(c=b);if(j)return c.createElement(a);f||(f=m(c));var g;return f.cache[a]?g=f.cache[a].cloneNode():e.test(a)?g=(f.cache[a]=f.createElem(a)).cloneNode():g=f.createElem(a),g.canHaveChildren&&!d.test(a)?f.frag.appendChild(g):g}function o(a,c){a||(a=b);if(j)return a.createDocumentFragment();c=c||m(a);var d=c.frag.cloneNode(),e=0,f=l(),g=f.length;for(;e",f="hidden"in a,j=a.childNodes.length==1||function(){b.createElement("a");var a=b.createDocumentFragment();return typeof a.cloneNode=="undefined"||typeof a.createDocumentFragment=="undefined"||typeof a.createElement=="undefined"}()}catch(c){f=!0,j=!0}})();var r={elements:c.elements||"abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup mark meter nav output progress section summary time video",shivCSS:c.shivCSS!==!1,supportsUnknownElements:j,shivMethods:c.shivMethods!==!1,type:"default",shivDocument:q,createElement:n,createDocumentFragment:o};a.html5=r,q(b)}(this,b),e._version=d,e._prefixes=n,e._domPrefixes=q,e._cssomPrefixes=p,e.mq=z,e.hasEvent=A,e.testProp=function(a){return H([a])},e.testAllProps=J,e.testStyles=y,e.prefixed=function(a,b,c){return b?J(a,b,c):J(a,"pfx")},g.className=g.className.replace(/(^|\s)no-js(\s|$)/,"$1$2")+(f?" js "+v.join(" "):""),e}(this,this.document),function(a,b,c){function d(a){return"[object Function]"==o.call(a)}function e(a){return"string"==typeof a}function f(){}function g(a){return!a||"loaded"==a||"complete"==a||"uninitialized"==a}function h(){var a=p.shift();q=1,a?a.t?m(function(){("c"==a.t?B.injectCss:B.injectJs)(a.s,0,a.a,a.x,a.e,1)},0):(a(),h()):q=0}function i(a,c,d,e,f,i,j){function k(b){if(!o&&g(l.readyState)&&(u.r=o=1,!q&&h(),l.onload=l.onreadystatechange=null,b)){"img"!=a&&m(function(){t.removeChild(l)},50);for(var d in y[c])y[c].hasOwnProperty(d)&&y[c][d].onload()}}var j=j||B.errorTimeout,l=b.createElement(a),o=0,r=0,u={t:d,s:c,e:f,a:i,x:j};1===y[c]&&(r=1,y[c]=[]),"object"==a?l.data=c:(l.src=c,l.type=a),l.width=l.height="0",l.onerror=l.onload=l.onreadystatechange=function(){k.call(this,r)},p.splice(e,0,u),"img"!=a&&(r||2===y[c]?(t.insertBefore(l,s?null:n),m(k,j)):y[c].push(l))}function j(a,b,c,d,f){return q=0,b=b||"j",e(a)?i("c"==b?v:u,a,b,this.i++,c,d,f):(p.splice(this.i++,0,a),1==p.length&&h()),this}function k(){var a=B;return a.loader={load:j,i:0},a}var l=b.documentElement,m=a.setTimeout,n=b.getElementsByTagName("script")[0],o={}.toString,p=[],q=0,r="MozAppearance"in l.style,s=r&&!!b.createRange().compareNode,t=s?l:n.parentNode,l=a.opera&&"[object Opera]"==o.call(a.opera),l=!!b.attachEvent&&!l,u=r?"object":l?"script":"img",v=l?"script":u,w=Array.isArray||function(a){return"[object Array]"==o.call(a)},x=[],y={},z={timeout:function(a,b){return b.length&&(a.timeout=b[0]),a}},A,B;B=function(a){function b(a){var a=a.split("!"),b=x.length,c=a.pop(),d=a.length,c={url:c,origUrl:c,prefixes:a},e,f,g;for(f=0;f"),i("table.docutils.footnote").wrap("
"),i("table.docutils.citation").wrap("
"),i(".wy-menu-vertical ul").not(".simple").siblings("a").each(function(){var e=i(this);expand=i(''),expand.on("click",function(n){return t.toggleCurrent(e),n.stopPropagation(),!1}),e.prepend(expand)})},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),i=e.find('[href="'+n+'"]');if(0===i.length){var t=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(i=e.find('[href="#'+t.attr("id")+'"]')).length&&(i=e.find('[href="#"]'))}0this.docHeight||(this.navBar.scrollTop(i),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",function(){this.linkScroll=!1})},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current"),e.siblings().find("li.current").removeClass("current"),e.find("> ul li.current").removeClass("current"),e.toggleClass("current")}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:e.exports.ThemeNav,StickyNav:e.exports.ThemeNav}),function(){for(var r=0,n=["ms","moz","webkit","o"],e=0;e0 62 | var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1 63 | var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1 64 | var s_v = "^(" + C + ")?" + v; // vowel in stem 65 | 66 | this.stemWord = function (w) { 67 | var stem; 68 | var suffix; 69 | var firstch; 70 | var origword = w; 71 | 72 | if (w.length < 3) 73 | return w; 74 | 75 | var re; 76 | var re2; 77 | var re3; 78 | var re4; 79 | 80 | firstch = w.substr(0,1); 81 | if (firstch == "y") 82 | w = firstch.toUpperCase() + w.substr(1); 83 | 84 | // Step 1a 85 | re = /^(.+?)(ss|i)es$/; 86 | re2 = /^(.+?)([^s])s$/; 87 | 88 | if (re.test(w)) 89 | w = w.replace(re,"$1$2"); 90 | else if (re2.test(w)) 91 | w = w.replace(re2,"$1$2"); 92 | 93 | // Step 1b 94 | re = /^(.+?)eed$/; 95 | re2 = /^(.+?)(ed|ing)$/; 96 | if (re.test(w)) { 97 | var fp = re.exec(w); 98 | re = new RegExp(mgr0); 99 | if (re.test(fp[1])) { 100 | re = /.$/; 101 | w = w.replace(re,""); 102 | } 103 | } 104 | else if (re2.test(w)) { 105 | var fp = re2.exec(w); 106 | stem = fp[1]; 107 | re2 = new RegExp(s_v); 108 | if (re2.test(stem)) { 109 | w = stem; 110 | re2 = /(at|bl|iz)$/; 111 | re3 = new RegExp("([^aeiouylsz])\\1$"); 112 | re4 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 113 | if (re2.test(w)) 114 | w = w + "e"; 115 | else if (re3.test(w)) { 116 | re = /.$/; 117 | w = w.replace(re,""); 118 | } 119 | else if (re4.test(w)) 120 | w = w + "e"; 121 | } 122 | } 123 | 124 | // Step 1c 125 | re = /^(.+?)y$/; 126 | if (re.test(w)) { 127 | var fp = re.exec(w); 128 | stem = fp[1]; 129 | re = new RegExp(s_v); 130 | if (re.test(stem)) 131 | w = stem + "i"; 132 | } 133 | 134 | // Step 2 135 | re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/; 136 | if (re.test(w)) { 137 | var fp = re.exec(w); 138 | stem = fp[1]; 139 | suffix = fp[2]; 140 | re = new RegExp(mgr0); 141 | if (re.test(stem)) 142 | w = stem + step2list[suffix]; 143 | } 144 | 145 | // Step 3 146 | re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/; 147 | if (re.test(w)) { 148 | var fp = re.exec(w); 149 | stem = fp[1]; 150 | suffix = fp[2]; 151 | re = new RegExp(mgr0); 152 | if (re.test(stem)) 153 | w = stem + step3list[suffix]; 154 | } 155 | 156 | // Step 4 157 | re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/; 158 | re2 = /^(.+?)(s|t)(ion)$/; 159 | if (re.test(w)) { 160 | var fp = re.exec(w); 161 | stem = fp[1]; 162 | re = new RegExp(mgr1); 163 | if (re.test(stem)) 164 | w = stem; 165 | } 166 | else if (re2.test(w)) { 167 | var fp = re2.exec(w); 168 | stem = fp[1] + fp[2]; 169 | re2 = new RegExp(mgr1); 170 | if (re2.test(stem)) 171 | w = stem; 172 | } 173 | 174 | // Step 5 175 | re = /^(.+?)e$/; 176 | if (re.test(w)) { 177 | var fp = re.exec(w); 178 | stem = fp[1]; 179 | re = new RegExp(mgr1); 180 | re2 = new RegExp(meq1); 181 | re3 = new RegExp("^" + C + v + "[^aeiouwxy]$"); 182 | if (re.test(stem) || (re2.test(stem) && !(re3.test(stem)))) 183 | w = stem; 184 | } 185 | re = /ll$/; 186 | re2 = new RegExp(mgr1); 187 | if (re.test(w) && re2.test(w)) { 188 | re = /.$/; 189 | w = w.replace(re,""); 190 | } 191 | 192 | // and turn initial Y back to y 193 | if (firstch == "y") 194 | w = firstch.toLowerCase() + w.substr(1); 195 | return w; 196 | } 197 | } 198 | 199 | 200 | 201 | 202 | 203 | var splitChars = (function() { 204 | var result = {}; 205 | var singles = [96, 180, 187, 191, 215, 247, 749, 885, 903, 907, 909, 930, 1014, 1648, 206 | 1748, 1809, 2416, 2473, 2481, 2526, 2601, 2609, 2612, 2615, 2653, 2702, 207 | 2706, 2729, 2737, 2740, 2857, 2865, 2868, 2910, 2928, 2948, 2961, 2971, 208 | 2973, 3085, 3089, 3113, 3124, 3213, 3217, 3241, 3252, 3295, 3341, 3345, 209 | 3369, 3506, 3516, 3633, 3715, 3721, 3736, 3744, 3748, 3750, 3756, 3761, 210 | 3781, 3912, 4239, 4347, 4681, 4695, 4697, 4745, 4785, 4799, 4801, 4823, 211 | 4881, 5760, 5901, 5997, 6313, 7405, 8024, 8026, 8028, 8030, 8117, 8125, 212 | 8133, 8181, 8468, 8485, 8487, 8489, 8494, 8527, 11311, 11359, 11687, 11695, 213 | 11703, 11711, 11719, 11727, 11735, 12448, 12539, 43010, 43014, 43019, 43587, 214 | 43696, 43713, 64286, 64297, 64311, 64317, 64319, 64322, 64325, 65141]; 215 | var i, j, start, end; 216 | for (i = 0; i < singles.length; i++) { 217 | result[singles[i]] = true; 218 | } 219 | var ranges = [[0, 47], [58, 64], [91, 94], [123, 169], [171, 177], [182, 184], [706, 709], 220 | [722, 735], [741, 747], [751, 879], [888, 889], [894, 901], [1154, 1161], 221 | [1318, 1328], [1367, 1368], [1370, 1376], [1416, 1487], [1515, 1519], [1523, 1568], 222 | [1611, 1631], [1642, 1645], [1750, 1764], [1767, 1773], [1789, 1790], [1792, 1807], 223 | [1840, 1868], [1958, 1968], [1970, 1983], [2027, 2035], [2038, 2041], [2043, 2047], 224 | [2070, 2073], [2075, 2083], [2085, 2087], [2089, 2307], [2362, 2364], [2366, 2383], 225 | [2385, 2391], [2402, 2405], [2419, 2424], [2432, 2436], [2445, 2446], [2449, 2450], 226 | [2483, 2485], [2490, 2492], [2494, 2509], [2511, 2523], [2530, 2533], [2546, 2547], 227 | [2554, 2564], [2571, 2574], [2577, 2578], [2618, 2648], [2655, 2661], [2672, 2673], 228 | [2677, 2692], [2746, 2748], [2750, 2767], [2769, 2783], [2786, 2789], [2800, 2820], 229 | [2829, 2830], [2833, 2834], [2874, 2876], [2878, 2907], [2914, 2917], [2930, 2946], 230 | [2955, 2957], [2966, 2968], [2976, 2978], [2981, 2983], [2987, 2989], [3002, 3023], 231 | [3025, 3045], [3059, 3076], [3130, 3132], [3134, 3159], [3162, 3167], [3170, 3173], 232 | [3184, 3191], [3199, 3204], [3258, 3260], [3262, 3293], [3298, 3301], [3312, 3332], 233 | [3386, 3388], [3390, 3423], [3426, 3429], [3446, 3449], [3456, 3460], [3479, 3481], 234 | [3518, 3519], [3527, 3584], [3636, 3647], [3655, 3663], [3674, 3712], [3717, 3718], 235 | [3723, 3724], [3726, 3731], [3752, 3753], [3764, 3772], [3774, 3775], [3783, 3791], 236 | [3802, 3803], [3806, 3839], [3841, 3871], [3892, 3903], [3949, 3975], [3980, 4095], 237 | [4139, 4158], [4170, 4175], [4182, 4185], [4190, 4192], [4194, 4196], [4199, 4205], 238 | [4209, 4212], [4226, 4237], [4250, 4255], [4294, 4303], [4349, 4351], [4686, 4687], 239 | [4702, 4703], [4750, 4751], [4790, 4791], [4806, 4807], [4886, 4887], [4955, 4968], 240 | [4989, 4991], [5008, 5023], [5109, 5120], [5741, 5742], [5787, 5791], [5867, 5869], 241 | [5873, 5887], [5906, 5919], [5938, 5951], [5970, 5983], [6001, 6015], [6068, 6102], 242 | [6104, 6107], [6109, 6111], [6122, 6127], [6138, 6159], [6170, 6175], [6264, 6271], 243 | [6315, 6319], [6390, 6399], [6429, 6469], [6510, 6511], [6517, 6527], [6572, 6592], 244 | [6600, 6607], [6619, 6655], [6679, 6687], [6741, 6783], [6794, 6799], [6810, 6822], 245 | [6824, 6916], [6964, 6980], [6988, 6991], [7002, 7042], [7073, 7085], [7098, 7167], 246 | [7204, 7231], [7242, 7244], [7294, 7400], [7410, 7423], [7616, 7679], [7958, 7959], 247 | [7966, 7967], [8006, 8007], [8014, 8015], [8062, 8063], [8127, 8129], [8141, 8143], 248 | [8148, 8149], [8156, 8159], [8173, 8177], [8189, 8303], [8306, 8307], [8314, 8318], 249 | [8330, 8335], [8341, 8449], [8451, 8454], [8456, 8457], [8470, 8472], [8478, 8483], 250 | [8506, 8507], [8512, 8516], [8522, 8525], [8586, 9311], [9372, 9449], [9472, 10101], 251 | [10132, 11263], [11493, 11498], [11503, 11516], [11518, 11519], [11558, 11567], 252 | [11622, 11630], [11632, 11647], [11671, 11679], [11743, 11822], [11824, 12292], 253 | [12296, 12320], [12330, 12336], [12342, 12343], [12349, 12352], [12439, 12444], 254 | [12544, 12548], [12590, 12592], [12687, 12689], [12694, 12703], [12728, 12783], 255 | [12800, 12831], [12842, 12880], [12896, 12927], [12938, 12976], [12992, 13311], 256 | [19894, 19967], [40908, 40959], [42125, 42191], [42238, 42239], [42509, 42511], 257 | [42540, 42559], [42592, 42593], [42607, 42622], [42648, 42655], [42736, 42774], 258 | [42784, 42785], [42889, 42890], [42893, 43002], [43043, 43055], [43062, 43071], 259 | [43124, 43137], [43188, 43215], [43226, 43249], [43256, 43258], [43260, 43263], 260 | [43302, 43311], [43335, 43359], [43389, 43395], [43443, 43470], [43482, 43519], 261 | [43561, 43583], [43596, 43599], [43610, 43615], [43639, 43641], [43643, 43647], 262 | [43698, 43700], [43703, 43704], [43710, 43711], [43715, 43738], [43742, 43967], 263 | [44003, 44015], [44026, 44031], [55204, 55215], [55239, 55242], [55292, 55295], 264 | [57344, 63743], [64046, 64047], [64110, 64111], [64218, 64255], [64263, 64274], 265 | [64280, 64284], [64434, 64466], [64830, 64847], [64912, 64913], [64968, 65007], 266 | [65020, 65135], [65277, 65295], [65306, 65312], [65339, 65344], [65371, 65381], 267 | [65471, 65473], [65480, 65481], [65488, 65489], [65496, 65497]]; 268 | for (i = 0; i < ranges.length; i++) { 269 | start = ranges[i][0]; 270 | end = ranges[i][1]; 271 | for (j = start; j <= end; j++) { 272 | result[j] = true; 273 | } 274 | } 275 | return result; 276 | })(); 277 | 278 | function splitQuery(query) { 279 | var result = []; 280 | var start = -1; 281 | for (var i = 0; i < query.length; i++) { 282 | if (splitChars[query.charCodeAt(i)]) { 283 | if (start !== -1) { 284 | result.push(query.slice(start, i)); 285 | start = -1; 286 | } 287 | } else if (start === -1) { 288 | start = i; 289 | } 290 | } 291 | if (start !== -1) { 292 | result.push(query.slice(start)); 293 | } 294 | return result; 295 | } 296 | 297 | 298 | -------------------------------------------------------------------------------- /docs/html/_static/minus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/minus.png -------------------------------------------------------------------------------- /docs/html/_static/plus.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/_static/plus.png -------------------------------------------------------------------------------- /docs/html/_static/pygments.css: -------------------------------------------------------------------------------- 1 | .highlight .hll { background-color: #ffffcc } 2 | .highlight { background: #f8f8f8; } 3 | .highlight .c { color: #408080; font-style: italic } /* Comment */ 4 | .highlight .err { border: 1px solid #FF0000 } /* Error */ 5 | .highlight .k { color: #008000; font-weight: bold } /* Keyword */ 6 | .highlight .o { color: #666666 } /* Operator */ 7 | .highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */ 8 | .highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */ 9 | .highlight .cp { color: #BC7A00 } /* Comment.Preproc */ 10 | .highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */ 11 | .highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */ 12 | .highlight .cs { color: #408080; font-style: italic } /* Comment.Special */ 13 | .highlight .gd { color: #A00000 } /* Generic.Deleted */ 14 | .highlight .ge { font-style: italic } /* Generic.Emph */ 15 | .highlight .gr { color: #FF0000 } /* Generic.Error */ 16 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */ 17 | .highlight .gi { color: #00A000 } /* Generic.Inserted */ 18 | .highlight .go { color: #888888 } /* Generic.Output */ 19 | .highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */ 20 | .highlight .gs { font-weight: bold } /* Generic.Strong */ 21 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */ 22 | .highlight .gt { color: #0044DD } /* Generic.Traceback */ 23 | .highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */ 24 | .highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */ 25 | .highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */ 26 | .highlight .kp { color: #008000 } /* Keyword.Pseudo */ 27 | .highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */ 28 | .highlight .kt { color: #B00040 } /* Keyword.Type */ 29 | .highlight .m { color: #666666 } /* Literal.Number */ 30 | .highlight .s { color: #BA2121 } /* Literal.String */ 31 | .highlight .na { color: #7D9029 } /* Name.Attribute */ 32 | .highlight .nb { color: #008000 } /* Name.Builtin */ 33 | .highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */ 34 | .highlight .no { color: #880000 } /* Name.Constant */ 35 | .highlight .nd { color: #AA22FF } /* Name.Decorator */ 36 | .highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */ 37 | .highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */ 38 | .highlight .nf { color: #0000FF } /* Name.Function */ 39 | .highlight .nl { color: #A0A000 } /* Name.Label */ 40 | .highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */ 41 | .highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */ 42 | .highlight .nv { color: #19177C } /* Name.Variable */ 43 | .highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */ 44 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */ 45 | .highlight .mb { color: #666666 } /* Literal.Number.Bin */ 46 | .highlight .mf { color: #666666 } /* Literal.Number.Float */ 47 | .highlight .mh { color: #666666 } /* Literal.Number.Hex */ 48 | .highlight .mi { color: #666666 } /* Literal.Number.Integer */ 49 | .highlight .mo { color: #666666 } /* Literal.Number.Oct */ 50 | .highlight .sa { color: #BA2121 } /* Literal.String.Affix */ 51 | .highlight .sb { color: #BA2121 } /* Literal.String.Backtick */ 52 | .highlight .sc { color: #BA2121 } /* Literal.String.Char */ 53 | .highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */ 54 | .highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */ 55 | .highlight .s2 { color: #BA2121 } /* Literal.String.Double */ 56 | .highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */ 57 | .highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */ 58 | .highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */ 59 | .highlight .sx { color: #008000 } /* Literal.String.Other */ 60 | .highlight .sr { color: #BB6688 } /* Literal.String.Regex */ 61 | .highlight .s1 { color: #BA2121 } /* Literal.String.Single */ 62 | .highlight .ss { color: #19177C } /* Literal.String.Symbol */ 63 | .highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */ 64 | .highlight .fm { color: #0000FF } /* Name.Function.Magic */ 65 | .highlight .vc { color: #19177C } /* Name.Variable.Class */ 66 | .highlight .vg { color: #19177C } /* Name.Variable.Global */ 67 | .highlight .vi { color: #19177C } /* Name.Variable.Instance */ 68 | .highlight .vm { color: #19177C } /* Name.Variable.Magic */ 69 | .highlight .il { color: #666666 } /* Literal.Number.Integer.Long */ -------------------------------------------------------------------------------- /docs/html/_static/underscore.js: -------------------------------------------------------------------------------- 1 | // Underscore.js 1.3.1 2 | // (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc. 3 | // Underscore is freely distributable under the MIT license. 4 | // Portions of Underscore are inspired or borrowed from Prototype, 5 | // Oliver Steele's Functional, and John Resig's Micro-Templating. 6 | // For all details and documentation: 7 | // http://documentcloud.github.com/underscore 8 | (function(){function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)return a===c;if(a._chain)a=a._wrapped;if(c._chain)c=c._wrapped;if(a.isEqual&&b.isFunction(a.isEqual))return a.isEqual(c);if(c.isEqual&&b.isFunction(c.isEqual))return c.isEqual(a);var e=l.call(a);if(e!=l.call(c))return false;switch(e){case "[object String]":return a==String(c);case "[object Number]":return a!=+a?c!=+c:a==0?1/a==1/c:a==+c;case "[object Date]":case "[object Boolean]":return+a==+c;case "[object RegExp]":return a.source== 9 | c.source&&a.global==c.global&&a.multiline==c.multiline&&a.ignoreCase==c.ignoreCase}if(typeof a!="object"||typeof c!="object")return false;for(var f=d.length;f--;)if(d[f]==a)return true;d.push(a);var f=0,g=true;if(e=="[object Array]"){if(f=a.length,g=f==c.length)for(;f--;)if(!(g=f in a==f in c&&q(a[f],c[f],d)))break}else{if("constructor"in a!="constructor"in c||a.constructor!=c.constructor)return false;for(var h in a)if(b.has(a,h)&&(f++,!(g=b.has(c,h)&&q(a[h],c[h],d))))break;if(g){for(h in c)if(b.has(c, 10 | h)&&!f--)break;g=!f}}d.pop();return g}var r=this,G=r._,n={},k=Array.prototype,o=Object.prototype,i=k.slice,H=k.unshift,l=o.toString,I=o.hasOwnProperty,w=k.forEach,x=k.map,y=k.reduce,z=k.reduceRight,A=k.filter,B=k.every,C=k.some,p=k.indexOf,D=k.lastIndexOf,o=Array.isArray,J=Object.keys,s=Function.prototype.bind,b=function(a){return new m(a)};if(typeof exports!=="undefined"){if(typeof module!=="undefined"&&module.exports)exports=module.exports=b;exports._=b}else r._=b;b.VERSION="1.3.1";var j=b.each= 11 | b.forEach=function(a,c,d){if(a!=null)if(w&&a.forEach===w)a.forEach(c,d);else if(a.length===+a.length)for(var e=0,f=a.length;e2;a== 12 | null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect= 13 | function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e= 14 | e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck= 15 | function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;bd?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a, 17 | c,d){d||(d=b.identity);for(var e=0,f=a.length;e>1;d(a[g])=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e=0;d--)b=[a[d].apply(this,b)];return b[0]}}; 24 | b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments, 25 | 1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)}; 26 | b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"}; 27 | b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e/g,">").replace(/"/g,""").replace(/'/g,"'").replace(/\//g,"/")};b.mixin=function(a){j(b.functions(a), 28 | function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+ 29 | u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]= 30 | function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain= 31 | true;return this};m.prototype.value=function(){return this._wrapped}}).call(this); 32 | -------------------------------------------------------------------------------- /docs/html/gaitpy.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | GaitPy — GaitPy 1.5 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 |
46 | 47 | 101 | 102 |
103 | 104 | 105 | 111 | 112 | 113 |
114 | 115 |
116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 | 133 |
134 | 135 |
    136 | 137 |
  • Docs »
  • 138 | 139 |
  • Modules »
  • 140 | 141 |
  • GaitPy
  • 142 | 143 | 144 |
  • 145 | 146 | 147 | View page source 148 | 149 | 150 |
  • 151 | 152 |
153 | 154 | 155 |
156 |
157 |
158 |
159 | 160 |
161 |

GaitPy

162 |

Gait feature extraction and bout classification from single accelerometer in the lumbar location. This class includes functions for:

163 |
    164 |
  • Continuous wavelet based method of gait kinematic feature extraction.

  • 165 |
  • Machine learning based method of bout classification.

  • 166 |
  • Visualizing results.

  • 167 |
168 |
169 |
Parameters:
170 |
data: str or pandas.core.frame.DataFrame
    171 |
  • Option 1: Pandas dataframe containing unix timestamp column and vertical acceleration data during gait, both of type float

  • 172 |
  • Option 2: File path of .csv file containing timestamp column and vertical acceleration data during gait. One column should contain unix timestamps of type float (by default gaitpy will assume the column title is ‘timestamps’ with units in milliseconds). A second column should be vertical acceleration of type float (by default gaitpy will assume the column title is ‘y’ with units in m/s^2).

  • 173 |
174 |
175 |
sample_rate: int or float

Sampling rate of accelerometer data in Hertz.

176 |
177 |
v_acc_col_name: str

Column name of the vertical acceleration data (‘y’ by default)

178 |
179 |
ts_col_name: str

Column name of the timestamps (‘timestamps’ by default)

180 |
181 |
v_acc_units: str

Units of vertical acceleration data (‘m/s^2’ by default). Options:

182 |
    183 |
  • ‘m/s^2’ = meters per second squared

  • 184 |
  • ‘g’ = standard gravity

  • 185 |
186 |
187 |
ts_units: str

Units of timestamps (‘ms’ by default). Options:

188 |
    189 |
  • ‘s’ = seconds

  • 190 |
  • ‘ms’ = milli-seconds

  • 191 |
  • ‘us’ = microseconds

  • 192 |
193 |
194 |
flip: bool

Boolean specifying whether to flip vertical acceleration data before analysis (False by default). Algorithm 195 | assumes that baseline vertical acceleration data is at -9.8 m/s^2 or -1g. (ie. If baseline data in vertical 196 | direction is 1g, set ‘flip’ argument to True)

197 |
198 |
199 |
200 |
201 |
202 | 207 |
208 |
209 | 210 | 211 |
212 | 213 |
214 |
215 | 216 | 224 | 225 | 226 |
227 | 228 |
229 |

230 | © Copyright 2019, Matt D. Czech 231 | 232 |

233 |
234 | Built with Sphinx using a theme provided by Read the Docs. 235 | 236 |
237 | 238 |
239 |
240 | 241 |
242 | 243 |
244 | 245 | 246 | 247 | 252 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | -------------------------------------------------------------------------------- /docs/html/gaitpy_functions.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | classify_bouts — GaitPy 1.5 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 |
45 | 46 | 100 | 101 |
102 | 103 | 104 | 110 | 111 | 112 |
113 | 114 |
115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 |
133 | 134 |
    135 | 136 |
  • Docs »
  • 137 | 138 |
  • Modules »
  • 139 | 140 |
  • GaitPy »
  • 141 | 142 |
  • classify_bouts
  • 143 | 144 | 145 |
  • 146 | 147 | 148 | View page source 149 | 150 | 151 |
  • 152 | 153 |
154 | 155 | 156 |
157 |
158 |
159 |
160 | 161 |
162 |

classify_bouts

163 |

Gait bout classification using acceleration data in the vertical direction from the lumbar location.

164 |
165 |
Parameters:
166 |
result_file: str

Optional argument that accepts .h5 filepath string to save resulting predictions to. 167 | None by default. (ie. myfolder/myfile.h5)

168 |
169 |
170 |
171 |
Returns:
172 |
pandas.core.frame.DataFrame

Pandas dataframe containing results of bout classification procedure (classify_bouts)

173 |
174 |
175 |
176 |
177 |
178 |
179 |

extract_features

180 |

Inverted pendulum and continuous wavelet based method of gait feature detection

181 |
182 |
Parameters:
183 |
subject_height: int or float

Height of the subject. Accepts centimeters by default.

184 |
185 |
subject_height_units: str

Units of provided subject height. Centimeters by default.

186 |
    187 |
  • options: ‘centimeters’, ‘inches’, ‘meters’

  • 188 |
189 |
190 |
sensor_height_ratio: float

Height of the sensor relative to subject height. Calculated: sensor height / subject height

191 |
192 |
result_file: str

Optional argument that accepts .csv filepath string to save resulting gait feature dataframe to. 193 | None by default. (ie. myfolder/myfile.csv)

194 |
195 |
classified_gait: str or pandas.core.frame.DataFrame

Pandas dataframe containing results of gait bout classification procedure (classify_bouts)

196 |

OR

197 |

File path of .h5 file containing results of gait bout classification procedure (classify_bouts)

198 |
199 |
ic_prom: int

Prominance of initial contact peak detection

200 |
201 |
fc_prom: int

Prominance of final contact peak detection

202 |
203 |
204 |
205 |
Returns:
206 |
pandas.core.frame.DataFrame

Pandas dataframe containing results of feature extraction procedure (extract_features)

207 |
208 |
209 |
210 |
211 |
212 |
213 |

plot_contacts

214 |

Visualization of bouts, initial contacts, and final contacts of lumbar based gait feature extraction

215 |
216 |
Parameters:
217 |
gait_features: pandas.DataFrame or str

Pandas dataframe containing results of extract_features function

218 |

OR

219 |

File path of .csv file containing results of extract_features function

220 |
221 |
result_file: str

Optional argument that accepts .html filepath string to save resulting gait event plot to. 222 | None by default. (ie. myfolder/myfile.html)

223 |
224 |
show_plot: bool

Optional boolean argument that specifies whether your plot is displayed. True by default.

225 |
226 |
227 |
228 |
229 |
230 | 231 | 232 |
233 | 234 |
235 |
236 | 237 | 243 | 244 | 245 |
246 | 247 |
248 |

249 | © Copyright 2019, Matt D. Czech 250 | 251 |

252 |
253 | Built with Sphinx using a theme provided by Read the Docs. 254 | 255 |
256 | 257 |
258 |
259 | 260 |
261 | 262 |
263 | 264 | 265 | 266 | 271 | 272 | 273 | 274 | 275 | 276 | 277 | 278 | -------------------------------------------------------------------------------- /docs/html/genindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Index — GaitPy 1.5 documentation 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 |
45 | 46 | 92 | 93 |
94 | 95 | 96 | 102 | 103 | 104 |
105 | 106 |
107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 |
125 | 126 |
    127 | 128 |
  • Docs »
  • 129 | 130 |
  • Index
  • 131 | 132 | 133 |
  • 134 | 135 | 136 | 137 |
  • 138 | 139 |
140 | 141 | 142 |
143 |
144 |
145 |
146 | 147 | 148 |

Index

149 | 150 |
151 | G 152 | 153 |
154 |

G

155 | 156 | 162 | 168 |
169 | 170 | 171 | 172 |
173 | 174 |
175 |
176 | 177 | 178 |
179 | 180 |
181 |

182 | © Copyright 2019, Matt D. Czech 183 | 184 |

185 |
186 | Built with Sphinx using a theme provided by Read the Docs. 187 | 188 |
189 | 190 |
191 |
192 | 193 |
194 | 195 |
196 | 197 | 198 | 199 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | -------------------------------------------------------------------------------- /docs/html/modules.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Modules — GaitPy 1.5 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 |
46 | 47 | 96 | 97 |
98 | 99 | 100 | 106 | 107 | 108 |
109 | 110 |
111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 |
129 | 130 |
    131 | 132 |
  • Docs »
  • 133 | 134 |
  • Modules
  • 135 | 136 | 137 |
  • 138 | 139 | 140 | View page source 141 | 142 | 143 |
  • 144 | 145 |
146 | 147 | 148 |
149 |
150 |
151 |
152 | 153 |
154 |

Modules

155 |
156 | 164 |
165 |
166 | 167 | 168 |
169 | 170 |
171 |
172 | 173 | 181 | 182 | 183 |
184 | 185 |
186 |

187 | © Copyright 2019, Matt D. Czech 188 | 189 |

190 |
191 | Built with Sphinx using a theme provided by Read the Docs. 192 | 193 |
194 | 195 |
196 |
197 | 198 |
199 | 200 |
201 | 202 | 203 | 204 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 216 | -------------------------------------------------------------------------------- /docs/html/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/docs/html/objects.inv -------------------------------------------------------------------------------- /docs/html/py-modindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Python Module Index — GaitPy 1.5 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 |
47 | 48 | 94 | 95 |
96 | 97 | 98 | 104 | 105 | 106 |
107 | 108 |
109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 |
127 | 128 |
    129 | 130 |
  • Docs »
  • 131 | 132 |
  • Python Module Index
  • 133 | 134 | 135 |
  • 136 | 137 |
  • 138 | 139 |
140 | 141 | 142 |
143 |
144 |
145 |
146 | 147 | 148 |

Python Module Index

149 | 150 |
151 | g 152 |
153 | 154 | 155 | 156 | 158 | 159 | 161 | 164 | 165 | 166 | 169 | 170 | 171 | 174 | 175 | 176 | 179 | 180 | 181 | 184 |
 
157 | g
162 | gaitpy 163 |
    167 | gaitpy.gait.Gaitpy 168 |
    172 | gaitpy.gait.Gaitpy.classify_bouts 173 |
    177 | gaitpy.gait.Gaitpy.extract_features 178 |
    182 | gaitpy.gait.Gaitpy.plot_contacts 183 |
185 | 186 | 187 |
188 | 189 |
190 |
191 | 192 | 193 |
194 | 195 |
196 |

197 | © Copyright 2019, Matt D. Czech 198 | 199 |

200 |
201 | Built with Sphinx using a theme provided by Read the Docs. 202 | 203 |
204 | 205 |
206 |
207 | 208 |
209 | 210 |
211 | 212 | 213 | 214 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | 226 | -------------------------------------------------------------------------------- /docs/html/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Search — GaitPy 1.5 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 |
45 | 46 | 92 | 93 |
94 | 95 | 96 | 102 | 103 | 104 |
105 | 106 |
107 | 108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 |
125 | 126 |
    127 | 128 |
  • Docs »
  • 129 | 130 |
  • Search
  • 131 | 132 | 133 |
  • 134 | 135 | 136 | 137 |
  • 138 | 139 |
140 | 141 | 142 |
143 |
144 |
145 |
146 | 147 | 155 | 156 | 157 |
158 | 159 |
160 | 161 |
162 | 163 |
164 |
165 | 166 | 167 |
168 | 169 |
170 |

171 | © Copyright 2019, Matt D. Czech 172 | 173 |

174 |
175 | Built with Sphinx using a theme provided by Read the Docs. 176 | 177 |
178 | 179 |
180 |
181 | 182 |
183 | 184 |
185 | 186 | 187 | 188 | 193 | 194 | 195 | 196 | 197 | 198 | 201 | 202 | 203 | 204 | 205 | 206 | 207 | -------------------------------------------------------------------------------- /docs/html/searchindex.js: -------------------------------------------------------------------------------- 1 | Search.setIndex({docnames:["gaitpy","gaitpy_functions","index","modules"],envversion:{"sphinx.domains.c":1,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":1,"sphinx.domains.javascript":1,"sphinx.domains.math":2,"sphinx.domains.python":1,"sphinx.domains.rst":1,"sphinx.domains.std":1,"sphinx.ext.intersphinx":1,"sphinx.ext.viewcode":1,sphinx:56},filenames:["gaitpy.rst","gaitpy_functions.rst","index.rst","modules.rst"],objects:{"gaitpy.gait":{Gaitpy:[0,0,0,"-"]},"gaitpy.gait.Gaitpy":{classify_bouts:[1,0,0,"-"],extract_features:[1,0,0,"-"],plot_contacts:[1,0,0,"-"]}},objnames:{"0":["py","module","Python module"]},objtypes:{"0":"py:module"},terms:{"250kb":2,"50hz":2,"boolean":[0,1],"class":0,"default":[0,1,2],"final":[1,2],"float":[0,1],"function":[0,1,2],"import":2,"int":[0,1],"return":1,"true":[0,1,2],One:0,The:2,a2233c9e27db0b6625dc56a3f7363875:[],about:2,acceler:[0,1,2],acceleromet:[0,2],accelerometri:2,accept:[1,2],addition:2,algorithm:0,all_bout_gait_featur:[],alongsid:2,also:2,analysi:0,argument:[0,1,2],assum:0,assumpt:2,avail:2,back:2,base:[0,1],baselin:[0,2],been:[],befor:0,bool:[0,1],both:0,bout:[0,1,2],calcul:1,can:2,centimet:[1,2],chang:2,characterist:2,classif:[0,1],classifi:2,classified_gait:[1,2],classify_bout:[0,2,3],clinic:2,clone:2,code:2,column:[0,2],com:2,command:2,compat:2,complet:2,consist:2,contact:[1,2],contain:[0,1,2],content:2,continu:[0,1],core:[0,1],creat:2,csv:[0,1,2],czech:2,data:[0,1,2],datafram:[0,1,2],depend:2,detail:2,detect:[1,2],develop:2,devic:2,differ:[],digit:2,direct:[0,1],directli:2,directori:2,displai:[1,2],down_sampl:[],downsampl:[],dure:[0,2],each:2,estim:2,event:[1,2],expect:2,extract:[0,1,2],extract_featur:[0,2,3],fals:[0,2],fc_prom:1,featur:[0,1,2],file:[0,1,2],filepath:1,first:2,flip:[0,2],folder:2,follow:2,found:2,frame:[0,1],from:[0,1,2],gait:[0,1,2],gait_bout:2,gait_featur:[1,2],gaitpi:3,git:2,github:2,graviti:0,group:2,has:[],height:[1,2],help:[],hertz:[0,2],host:[],howev:2,html:[1,2],http:2,ic_prom:1,imag:2,inc:2,inch:1,includ:[0,2],index:[],initi:[1,2],input:2,instanc:2,interpret:2,invert:1,joss:[],kinemat:0,label:2,learn:0,least:2,less:2,let:2,linux:2,locat:[0,1,2],lower:2,lumbar:[0,1,2],mac:[],machin:[0,2],macosx:2,mai:2,make:2,matt002:2,medicin:2,meter:[0,1,2],method:[0,1],microsecond:0,milli:0,millisecond:[0,2],minimum:2,minut:2,mit:2,modal:2,modul:2,more:2,mount:2,multipl:[],must:2,myfil:1,myfold:1,name:[0,2],necessari:2,non:2,none:[1,2],optim:[],option:[0,1,2],org:[],osx:[],ouput:2,output:2,packag:2,panda:[0,1,2],paper:[],paramet:[0,1],path:[0,1,2],peak:1,pendulum:1,per:[0,2],period:2,pfizer:2,pip:2,plot:[1,2],plot_contact:[0,2,3],predict:1,prepar:2,procedur:1,promin:1,prompt:2,provid:[1,2],python:2,rate:[0,2],raw:2,raw_data:2,read:2,rel:1,respect:2,result:[0,1,2],result_fil:[1,2],run_demo:2,sampl:[0,2],sample_r:[0,2],save:[1,2],script:2,second:[0,2],sens:2,sensor:[1,2],sensor_height_ratio:1,set:[0,2],setup:2,should:[0,2],show_plot:[1,2],signal:2,singl:[0,2],sole:2,sourc:2,spatial:2,specifi:[0,1,2],squar:[0,2],standard:0,start:2,statu:[],str:[0,1],string:1,subject:[1,2],subject_height:[1,2],subject_height_unit:[1,2],support:2,svg:[],system:2,take:2,tempor:2,termin:2,test:2,than:2,theoj:[],thi:[0,2],though:2,three:2,timestamp:[0,2],titl:0,transform:[],translat:2,ts_col_nam:[0,2],ts_unit:[0,2],two:2,type:[0,2],under:2,unit:[0,1,2],unix:[0,2],upon:2,use:2,using:[1,2],v_acc_col_nam:[0,2],v_acc_unit:[0,2],vari:2,variou:2,vertic:[0,1,2],via:2,visual:[0,1],wavelet:[0,1],whether:[0,1,2],window:2,you:2,your:[1,2]},titles:["GaitPy","classify_bouts","Welcome to GaitPy\u2019s documentation!","Modules"],titleterms:{acknowledg:2,basic:2,classify_bout:1,demo:2,document:2,extract_featur:1,gaitpi:[0,2],instal:2,licens:2,modul:3,plot_contact:1,run:2,usag:2,welcom:2}}) -------------------------------------------------------------------------------- /docs/index.html: -------------------------------------------------------------------------------- 1 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | 13 | if "%1" == "" goto help 14 | 15 | %SPHINXBUILD% >NUL 2>NUL 16 | if errorlevel 9009 ( 17 | echo. 18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 19 | echo.installed, then set the SPHINXBUILD environment variable to point 20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 21 | echo.may add the Sphinx directory to PATH. 22 | echo. 23 | echo.If you don't have Sphinx installed, grab it from 24 | echo.http://sphinx-doc.org/ 25 | exit /b 1 26 | ) 27 | 28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 29 | goto end 30 | 31 | :help 32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% 33 | 34 | :end 35 | popd 36 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | # Configuration file for the Sphinx documentation builder. 2 | # 3 | # This file only contains a selection of the most common options. For a full 4 | # list see the documentation: 5 | # https://www.sphinx-doc.org/en/master/usage/configuration.html 6 | 7 | # -- Path setup -------------------------------------------------------------- 8 | 9 | # If extensions (or modules to document with autodoc) are in another directory, 10 | # add these directories to sys.path here. If the directory is relative to the 11 | # documentation root, use os.path.abspath to make it absolute, like shown here. 12 | 13 | import os 14 | import sys 15 | sys.path.insert(0, os.path.abspath('../../')) 16 | 17 | # -- Project information ----------------------------------------------------- 18 | 19 | project = 'GaitPy' 20 | copyright = '2019, Matt D. Czech' 21 | author = 'Matt D. Czech' 22 | 23 | # The full version, including alpha/beta/rc tags 24 | release = '1.5' 25 | 26 | # -- General configuration --------------------------------------------------- 27 | 28 | # Add any Sphinx extension module names here, as strings. They can be 29 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 30 | # ones. 31 | extensions = ['sphinx.ext.autodoc', 32 | 'sphinx.ext.doctest', 33 | 'sphinx.ext.intersphinx', 34 | 'sphinx.ext.viewcode'] 35 | 36 | # Class functions ordered by how they are in code 37 | autodoc_member_order = 'bysource' 38 | 39 | # Add any paths that contain templates here, relative to this directory. 40 | templates_path = ['_templates'] 41 | 42 | # source_suffix = ['.rst', '.md'] 43 | source_suffix = '.rst' 44 | 45 | # The master toctree document. 46 | master_doc = 'index' 47 | 48 | # List of patterns, relative to source directory, that match files and 49 | # directories to ignore when looking for source files. 50 | # This pattern also affects html_static_path and html_extra_path. 51 | exclude_patterns = [] 52 | 53 | # -- Options for HTML output ------------------------------------------------- 54 | 55 | # The theme to use for HTML and HTML Help pages. See the documentation for 56 | # a list of builtin themes. 57 | # 58 | html_theme = 'sphinx_rtd_theme' 59 | 60 | # Add any paths that contain custom static files (such as style sheets) here, 61 | # relative to this directory. They are copied after the builtin static files, 62 | # so a file named "default.css" will overwrite the builtin "default.css". 63 | html_static_path = ['_static'] -------------------------------------------------------------------------------- /docs/source/gaitpy.rst: -------------------------------------------------------------------------------- 1 | GaitPy 2 | ----------------------- 3 | 4 | .. automodule:: gaitpy.gait.Gaitpy 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | .. toctree:: 10 | :maxdepth: 4 11 | 12 | gaitpy_functions 13 | -------------------------------------------------------------------------------- /docs/source/gaitpy_functions.rst: -------------------------------------------------------------------------------- 1 | classify_bouts 2 | ----------------------- 3 | 4 | .. automodule:: gaitpy.gait.Gaitpy.classify_bouts 5 | :members: 6 | :undoc-members: 7 | :show-inheritance: 8 | 9 | extract_features 10 | ----------------------- 11 | 12 | .. automodule:: gaitpy.gait.Gaitpy.extract_features 13 | :members: 14 | :undoc-members: 15 | :show-inheritance: 16 | 17 | plot_contacts 18 | ----------------------- 19 | 20 | .. automodule:: gaitpy.gait.Gaitpy.plot_contacts 21 | :members: 22 | :undoc-members: 23 | :show-inheritance: 24 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. GaitPy documentation master file, created by 2 | sphinx-quickstart on Mon Oct 14 13:09:53 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to GaitPy's documentation! 7 | ================================== 8 | 9 | GaitPy provides python functions to read accelerometry data from a single lumbar-mounted sensor and estimate clinical 10 | characteristics of gait. 11 | 12 | The source code is available on Github: `github.com/matt002/GaitPy `_ 13 | 14 | - Device location: lower back/lumbar 15 | - Sensing modality: Accelerometer 16 | - Sensor data: Vertical acceleration 17 | - Minimum sampling rate: 50Hz 18 | 19 | Installation 20 | ------------ 21 | GaitPy is compatible with python v3.6 on MacOSX, Windows, and Linux. 22 | 23 | Installation via pip: 24 | 25 | .. code:: 26 | 27 | pip install gaitpy 28 | 29 | You can also install it from source: 30 | 31 | .. code:: 32 | 33 | git clone https://github.com/matt002/gaitpy 34 | cd gaitpy 35 | python setup.py install 36 | 37 | Basic usage 38 | ----------- 39 | Gaitpy consists of the following 3 functions: 40 | 41 | 1. classify_bouts: If your data consists of gait and non-gait data, run the classify_bouts function to first classify bouts of gait. If your data is solely during gait, this function is not necessary to use. 42 | 2. extract_features: Extract initial contact (IC) and final contact (FC) events from your data and estimate various temporal and spatial gait features. 43 | 3. plot_contacts: Plot the resulting bout detections and IC/FC events alongside your raw accelerometer signal. 44 | 45 | Gaitpy accepts a csv file or pandas dataframe that includes a column containing unix timestamps and a column containing 46 | vertical acceleration from a lumbar-mounted sensor. Gaitpy makes three assumptions by default: 47 | 48 | 1. Timestamps and vertical acceleration columns are labeled 'timestamps' and 'y' respectively, however this can be changed using the 'v_acc_col_name' and 'ts_col_name' arguments respectively. 49 | 2. Timestamps are in Unix milliseconds and data is in meters per second squared, however this can be be changed using the 'ts_units' and 'v_acc_units' arguments respectively. 50 | 3. Baseline vertical acceleration data is -9.8m/s^2 or -1g. If your baseline data is +9.8m/s^2 or +1g, set the 'flip' argument to True. 51 | 52 | Additionally, the sample rate of your device (at least 50Hz) and height of the subject must be provided. 53 | 54 | More details about the inputs and ouputs of each of these functions can be found in `Czech et al. 2019 (in preparation) `_. 55 | 56 | .. code:: 57 | 58 | from gaitpy.gait import Gaitpy 59 | 60 | raw_data = 'raw-data-path or pandas dataframe' 61 | sample_rate = 128 # hertz 62 | subject_height = 170 # centimeters 63 | 64 | #### Create an instance of Gaitpy #### 65 | gaitpy = Gaitpy(raw_data, # Raw data consisting of vertical acceleration from lumbar location and unix timestamps 66 | sample_rate, # Sample rate of raw data (in Hertz) 67 | v_acc_col_name='y', # Vertical acceleration column name 68 | ts_col_name='timestamps', # Timestamp column name 69 | v_acc_units='m/s^2', # Units of vertical acceleration 70 | ts_units='ms', # Units of timestamps 71 | flip=False) # If baseline data is at +1g or +9.8m/s^2, set flip=True 72 | 73 | #### Classify bouts of gait - Optional (use if your data consists of gait and non-gait periods)#### 74 | gait_bouts = gaitpy.classify_bouts(result_file='/my/folder/classified_gait.h5') # File to save results to (None by default) 75 | 76 | #### Extract gait characteristics #### 77 | gait_features = gaitpy.extract_features(subject_height, # Subject height 78 | subject_height_units='centimeter', # Units of subject height 79 | result_file='/my/folder/gait_features.csv', # File to save results to (None by default) 80 | classified_gait=gait_bouts) # Pandas Dataframe or .h5 file results of classify_bouts function (None by default) 81 | 82 | #### Plot results of gait feature extraction #### 83 | gaitpy.plot_contacts(gait_features, # Pandas Dataframe or .csv file results of extract_features function 84 | result_file='/my/folder/plot_contacts.html)', # File to save results to (None by default) 85 | show_plot=True) # Specify whether to display plot upon completion (True by default) 86 | 87 | Running the demo 88 | ---------------- 89 | The demo file provided lets you to test whether GaitPy outputs the expected results on your system. 90 | 91 | You may run the demo directly from a terminal window: 92 | 93 | .. code:: 94 | 95 | cd gaitpy/demo 96 | python demo.py 97 | 98 | You may also run the demo via a python interpreter. In a terminal window start python by typing: 99 | 100 | .. code:: 101 | 102 | python 103 | 104 | 105 | In the interpreter window you can then import and run the demo with the following two commands: 106 | 107 | .. code:: 108 | 109 | from gaitpy.demo import demo 110 | demo.run_demo() 111 | 112 | The demo script will prompt you to type in a results directory. Following the run, results will be saved in the provided 113 | results directory (less than 250kB of data will be saved). Running the demo should take less than a minute, though this 114 | may vary depending on your machine. 115 | 116 | 117 | .. toctree:: 118 | :maxdepth: 4 119 | :caption: Contents: 120 | 121 | modules 122 | 123 | Acknowledgements 124 | ---------------- 125 | The Digital Medicine & Translational Imaging group at Pfizer, Inc supported the development of this package. 126 | 127 | License 128 | ------- 129 | Gaitpy is under the MIT license 130 | 131 | 132 | 133 | -------------------------------------------------------------------------------- /docs/source/modules.rst: -------------------------------------------------------------------------------- 1 | Modules 2 | --------- 3 | 4 | .. toctree:: 5 | :maxdepth: 4 6 | 7 | gaitpy 8 | -------------------------------------------------------------------------------- /gaitpy/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/gaitpy/__init__.py -------------------------------------------------------------------------------- /gaitpy/demo/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/gaitpy/demo/__init__.py -------------------------------------------------------------------------------- /gaitpy/demo/demo.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pandas as pd 3 | from pandas.testing import assert_frame_equal 4 | import warnings 5 | warnings.simplefilter(action='ignore', category=FutureWarning) 6 | warnings.simplefilter(action='ignore', category=UserWarning) 7 | import numpy as np 8 | import time 9 | from gaitpy.gait import * 10 | 11 | def run_gaitpy(src, sample_rate, subject_height, dst): 12 | # Load/format data 13 | raw_data = pd.read_csv(src, skiprows=99, names=['timestamps', 'x', 'y', 'z'], usecols=[0, 1, 2, 3]) 14 | raw_data['unix_timestamps'] = pd.to_datetime(raw_data.timestamps, format="%Y-%m-%d %H:%M:%S:%f").values.astype(np.int64) // 10**6 15 | 16 | ### Create an instance of GaitPy ### 17 | gaitpy = Gaitpy(raw_data, 18 | sample_rate, 19 | v_acc_col_name='y', 20 | ts_col_name='unix_timestamps', 21 | v_acc_units='g', 22 | ts_units='ms', 23 | flip=False) 24 | 25 | #### Classify bouts of gait #### 26 | gait_bouts = gaitpy.classify_bouts(result_file=os.path.join(dst,'classify_bouts.h5')) 27 | 28 | #### Extract gait characteristics #### 29 | gait_features = gaitpy.extract_features(subject_height, 30 | subject_height_units='centimeters', 31 | result_file=os.path.join(dst,'gait_features.csv'), 32 | classified_gait=gait_bouts) 33 | 34 | #### Plot results of gait feature extraction #### 35 | gaitpy.plot_contacts(gait_features, result_file=os.path.join(dst, 'plot_contacts.html'), show_plot=False) 36 | 37 | def run_demo(): 38 | # Set source and destination directories 39 | src = __file__.split(".py")[0] + "_data.csv" 40 | dst = input("Please provide a path to a results directory: ") 41 | while not os.path.isdir(dst): 42 | dst = input( 43 | "\nYour previous entry was not appropriate." 44 | "\nIt should follow a format similar to /Users/username/Desktop/Results" 45 | "\nPlease provide a path to a results directory: " 46 | ) 47 | 48 | # Run gaitpy 49 | st = time.time() 50 | try: 51 | sample_rate = 50 # hertz 52 | subject_height = 177 # centimeters 53 | run_gaitpy(src, sample_rate, subject_height, dst) 54 | except Exception as e: 55 | print("Error processing: {}\nError: {}".format(src, e)) 56 | stp = time.time() 57 | print("total run time: {} seconds".format(round(stp-st, 2))) 58 | 59 | # Confirm expected results 60 | print("Checking extract_features endpoints...") 61 | expected_gait_features = pd.read_csv(__file__.split(".py")[0] + '_gait_features.csv') 62 | obtained_gait_features = pd.read_csv(os.path.join(dst, 'gait_features.csv')) 63 | assert_frame_equal(expected_gait_features, obtained_gait_features) 64 | print("Checking classify_bouts endpoints...") 65 | expected_classify_bouts = pd.read_hdf(__file__.split(".py")[0] + '_classify_bouts.h5') 66 | obtained_classify_bouts = pd.read_hdf(os.path.join(dst, 'classify_bouts.h5')) 67 | assert_frame_equal(expected_classify_bouts, obtained_classify_bouts) 68 | print("All tests passed") 69 | 70 | if __name__ == "__main__": 71 | run_demo() 72 | -------------------------------------------------------------------------------- /gaitpy/demo/demo_classify_bouts.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/gaitpy/demo/demo_classify_bouts.h5 -------------------------------------------------------------------------------- /gaitpy/model/feature_order.txt: -------------------------------------------------------------------------------- 1 | lumbar_y_bp_filt_[0.5, 3.0]_dom_freq_ratio 2 | lumbar_y_bp_filt_[0.5, 3.0]_dom_freq_value 3 | lumbar_y_bp_filt_[0.5, 3.0]_mean_cross_rate 4 | lumbar_y_bp_filt_[0.5, 3.0]_range 5 | lumbar_y_bp_filt_[0.5, 3.0]_rms 6 | lumbar_y_bp_filt_[0.5, 3.0]_signal_entropy 7 | lumbar_y_bp_filt_[0.5, 3.0]_spectral_entropy 8 | lumbar_y_bp_filt_[0.5, 3.0]_spectral_flatness 9 | -------------------------------------------------------------------------------- /gaitpy/model/model.pkl: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/gaitpy/model/model.pkl -------------------------------------------------------------------------------- /gaitpy/signal_features.py: -------------------------------------------------------------------------------- 1 | import pandas as pd 2 | import numpy as np 3 | 4 | def _signal_features(window_data_df, channels, fs): 5 | features = pd.DataFrame() 6 | 7 | # Compute signal entropy 8 | feat_df_signal_entropy = _signal_entropy(window_data_df, channels) 9 | 10 | # Compute RMS 11 | feat_df_signal_rms = _signal_rms(window_data_df, channels) 12 | 13 | # Compute range 14 | feat_df_signal_range = _signal_range(window_data_df, channels) 15 | 16 | # Compute Dominant Frequency 17 | sampling_rate = fs 18 | frequncy_cutoff = 12.0 19 | feat_df_dom_freq = _dominant_frequency(window_data_df, sampling_rate, frequncy_cutoff, channels) 20 | 21 | # Compute mean cross rate 22 | feat_df_mean_cross_rate = _mean_cross_rate(window_data_df, channels) 23 | 24 | features = features.join(feat_df_signal_entropy, how='outer') 25 | features = features.join(feat_df_signal_rms, how='outer') 26 | features = features.join(feat_df_signal_range, how='outer') 27 | features = features.join(feat_df_dom_freq, how='outer') 28 | features = features.join(feat_df_mean_cross_rate, how='outer') 29 | 30 | return features 31 | 32 | def _signal_entropy(signal_df, channels): 33 | signal_entropy_df = pd.DataFrame() 34 | 35 | for channel in channels: 36 | data_norm = signal_df[channel]/np.std(signal_df[channel]) 37 | h, d = _histogram(data_norm) 38 | 39 | lowerbound = d[0] 40 | upperbound = d[1] 41 | ncell = int(d[2]) 42 | 43 | estimate = 0 44 | sigma = 0 45 | count = 0 46 | 47 | for n in range(ncell): 48 | if h[n] != 0: 49 | logf = np.log(h[n]) 50 | else: 51 | logf = 0 52 | count = count + h[n] 53 | estimate = estimate - h[n] * logf 54 | sigma = sigma + h[n] * logf ** 2 55 | 56 | nbias = -(float(ncell) - 1) / (2 * count) 57 | 58 | estimate = estimate / count 59 | estimate = estimate + np.log(count) + np.log((upperbound - lowerbound) / ncell) - nbias 60 | 61 | # Scale the entropy estimate to stretch the range 62 | estimate = np.exp(estimate ** 2) - np.exp(0) - 1 63 | 64 | signal_entropy_df[channel + '_signal_entropy'] = [estimate] 65 | 66 | return signal_entropy_df 67 | 68 | def _signal_rms(signal_df, channels): 69 | rms_df = pd.DataFrame() 70 | 71 | for channel in channels: 72 | rms_df[channel + '_rms'] = [np.std(signal_df[channel] - signal_df[channel].mean())] 73 | 74 | return rms_df 75 | 76 | def _signal_range(signal_df, channels): 77 | range_df = pd.DataFrame() 78 | 79 | for channel in channels: 80 | range_df[channel + '_range'] = [signal_df[channel].max(skipna=True) - signal_df[channel].min(skipna=True)] 81 | 82 | return range_df 83 | 84 | def _dominant_frequency(signal_df, sampling_rate, cutoff, channels): 85 | from scipy import stats 86 | 87 | dominant_freq_df = pd.DataFrame() 88 | for channel in channels: 89 | signal_x = signal_df[channel] 90 | 91 | padfactor = 1 92 | dim = signal_x.shape 93 | nfft = 2 ** ((dim[0] * padfactor).bit_length()) 94 | 95 | freq_hat = np.fft.fftfreq(nfft) * sampling_rate 96 | freq = freq_hat[0:nfft // 2] 97 | 98 | idx1 = freq <= cutoff 99 | idx_cutoff = np.argwhere(idx1) 100 | freq = freq[idx_cutoff] 101 | 102 | sp_hat = np.fft.fft(signal_x, nfft) 103 | sp = sp_hat[0:nfft // 2] * np.conjugate(sp_hat[0:nfft // 2]) 104 | sp = sp[idx_cutoff] 105 | sp_norm = sp / sum(sp) 106 | 107 | max_freq = freq[sp_norm.argmax()][0] 108 | max_freq_val = sp_norm.max().real 109 | 110 | idx2 = (freq > max_freq - 0.5) * (freq < max_freq + 0.5) 111 | idx_freq_range = np.where(idx2)[0] 112 | dom_freq_ratio = sp_norm[idx_freq_range].real.sum() 113 | 114 | # Calculate spectral flatness 115 | spectral_flatness = 10.0*np.log10(stats.mstats.gmean(sp_norm)/np.mean(sp_norm)) 116 | 117 | # Estimate spectral entropy 118 | spectral_entropy_estimate = 0 119 | for isess in range(len(sp_norm)): 120 | if sp_norm[isess] != 0: 121 | logps = np.log2(sp_norm[isess]) 122 | else: 123 | logps = 0 124 | spectral_entropy_estimate = spectral_entropy_estimate - logps * sp_norm[isess] 125 | 126 | spectral_entropy_estimate = spectral_entropy_estimate / np.log2(len(sp_norm)) 127 | # spectral_entropy_estimate = (spectral_entropy_estimate - 0.5) / (1.5 - spectral_entropy_estimate) 128 | 129 | dominant_freq_df[channel + '_dom_freq_value'] = [max_freq] 130 | dominant_freq_df[channel + '_dom_freq_magnitude'] = [max_freq_val] 131 | dominant_freq_df[channel + '_dom_freq_ratio'] = [dom_freq_ratio] 132 | dominant_freq_df[channel + '_spectral_flatness'] = [spectral_flatness[0].real] 133 | dominant_freq_df[channel + '_spectral_entropy'] = [spectral_entropy_estimate[0].real] 134 | 135 | return dominant_freq_df 136 | 137 | def _mean_cross_rate(signal_df, channels): 138 | ''' 139 | Compute mean cross rate of sensor signals. 140 | 141 | :param signal_df: dataframe housing desired sensor signals 142 | :param channels: channels of signal to measure mean cross rate 143 | :return: dataframe housing calculated mean cross rate for each signal channel 144 | ''' 145 | mean_cross_rate_df = pd.DataFrame() 146 | signal_df_mean = signal_df[channels] - signal_df[channels].mean() 147 | 148 | for channel in channels: 149 | MCR = 0 150 | 151 | for i in range(len(signal_df_mean) - 1): 152 | if np.sign(signal_df_mean.loc[i, channel]) != np.sign(signal_df_mean.loc[i + 1, channel]): 153 | MCR += 1 154 | 155 | MCR = float(MCR) / len(signal_df_mean) 156 | 157 | mean_cross_rate_df[channel + '_mean_cross_rate'] = [MCR] 158 | 159 | return mean_cross_rate_df 160 | 161 | def _histogram(signal_x): 162 | descriptor = np.zeros(3) 163 | 164 | ncell = np.ceil(np.sqrt(len(signal_x))) 165 | 166 | max_val = np.nanmax(signal_x.values) 167 | min_val = np.nanmin(signal_x.values) 168 | 169 | delta = (max_val - min_val) / (len(signal_x) - 1) 170 | 171 | descriptor[0] = min_val - delta / 2 172 | descriptor[1] = max_val + delta / 2 173 | descriptor[2] = ncell 174 | 175 | h = np.histogram(signal_x, ncell.astype(int), range=(min_val, max_val)) 176 | 177 | return h[0], descriptor -------------------------------------------------------------------------------- /gaitpy/tests/__init__.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/gaitpy/tests/__init__.py -------------------------------------------------------------------------------- /gaitpy/tests/test_gait.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pandas as pd 3 | from pandas.testing import assert_frame_equal 4 | import warnings 5 | warnings.simplefilter(action='ignore', category=FutureWarning) 6 | import numpy as np 7 | from gaitpy.gait import * 8 | 9 | def run_gaitpy(src, sample_rate, subject_height): 10 | # Load/format data 11 | raw_data = pd.read_csv(src, skiprows=99, names=['timestamps', 'x', 'y', 'z'], usecols=[0, 1, 2, 3]) 12 | raw_data['unix_timestamps'] = pd.to_datetime(raw_data.timestamps, format="%Y-%m-%d %H:%M:%S:%f").values.astype(np.int64) // 10**6 13 | 14 | ### Create an instance of GaitPy ### 15 | gaitpy = Gaitpy(raw_data, 16 | sample_rate, 17 | v_acc_col_name='y', 18 | ts_col_name='unix_timestamps', 19 | v_acc_units='g', 20 | ts_units='ms', 21 | flip=False) 22 | 23 | #### Classify bouts of gait #### 24 | gait_bouts = gaitpy.classify_bouts() 25 | 26 | #### Extract gait characteristics #### 27 | gait_features = gaitpy.extract_features(subject_height, 28 | subject_height_units='centimeters', 29 | classified_gait=gait_bouts) 30 | return gait_bouts, gait_features 31 | 32 | def test_gaitpy(): 33 | # Set source and destination directories 34 | src = os.path.abspath(__file__ + '/../../')+'/demo/demo_data.csv' 35 | 36 | # Run gaitpy 37 | obtained_classify_bouts, obtained_gait_features = run_gaitpy(src, 50, 177) 38 | 39 | # Confirm expected results 40 | expected_classify_bouts = pd.read_hdf(os.path.abspath(__file__ + '/../../')+'/demo/demo_classify_bouts.h5') 41 | assert_frame_equal(expected_classify_bouts, obtained_classify_bouts) 42 | 43 | expected_gait_features = pd.read_csv(os.path.abspath(__file__ + '/../../')+'/demo/demo_gait_features.csv') 44 | expected_gait_features['bout_start_time'] = pd.to_datetime(expected_gait_features['bout_start_time'], 45 | format='%Y-%m-%d %H:%M:%S.%f') 46 | assert_frame_equal(expected_gait_features, obtained_gait_features) 47 | -------------------------------------------------------------------------------- /gaitpy/tests/test_signal_features.py: -------------------------------------------------------------------------------- 1 | import os 2 | from pandas.testing import assert_frame_equal 3 | import warnings 4 | warnings.simplefilter(action='ignore', category=FutureWarning) 5 | from gaitpy.signal_features import * 6 | from test_gait import run_gaitpy 7 | 8 | def test__signal_features(): 9 | # Set source and destination directories 10 | src = os.path.abspath(__file__ + '/../../')+'/demo/demo_data.csv' 11 | 12 | # Run gaitpy 13 | sample_rate = 50 # hertz 14 | subject_height = 177 # centimeters 15 | obtained_classify_bouts, obtained_gait_features = run_gaitpy(src, sample_rate, subject_height) 16 | 17 | # Confirm expected results 18 | expected_classify_bouts = pd.read_hdf(os.path.abspath(__file__ + '/../../')+'/demo/demo_classify_bouts.h5') 19 | assert_frame_equal(expected_classify_bouts, obtained_classify_bouts) 20 | 21 | expected_gait_features = pd.read_csv(os.path.abspath(__file__ + '/../../')+'/demo/demo_gait_features.csv') 22 | expected_gait_features['bout_start_time'] = pd.to_datetime(expected_gait_features['bout_start_time'], 23 | format='%Y-%m-%d %H:%M:%S.%f') 24 | assert_frame_equal(expected_gait_features, obtained_gait_features) 25 | -------------------------------------------------------------------------------- /gaitpy/tests/test_util.py: -------------------------------------------------------------------------------- 1 | import os 2 | import pandas as pd 3 | from pandas.testing import assert_frame_equal 4 | import warnings 5 | warnings.simplefilter(action='ignore', category=FutureWarning) 6 | import numpy as np 7 | import gaitpy.util as util 8 | from gaitpy.gait import * 9 | 10 | def test_load_data(): 11 | # Load/format data 12 | src = os.path.abspath(__file__ + '/../../')+'/demo/demo_data.csv' 13 | raw_data = pd.read_csv(src, skiprows=99, names=['timestamps', 'x', 'y', 'z'], usecols=[0, 1, 2, 3]) 14 | raw_data['unix_timestamps'] = pd.to_datetime(raw_data.timestamps, format="%Y-%m-%d %H:%M:%S:%f").values.astype(np.int64) // 10**6 15 | raw_data = raw_data.iloc[:10,:] 16 | 17 | # Create an instance of GaitPy 18 | gaitpy = Gaitpy(raw_data, 19 | 50, 20 | v_acc_col_name='y', 21 | ts_col_name='unix_timestamps', 22 | v_acc_units='g', 23 | ts_units='ms', 24 | flip=False) 25 | 26 | # Run function being tested 27 | obtained_y_accel, obtained_ts = util._load_data(gaitpy, gaitpy.down_sample) 28 | 29 | # Confirm expected results 30 | expected_y_accel = pd.Series(np.array([7.138261,7.177487,7.177487,7.215733,6.177209,7.868856,7.676646,5.792788,4.831736,10.713765]), name='y') 31 | pd.testing.assert_series_equal(obtained_y_accel, expected_y_accel) 32 | 33 | expected_ts = pd.Series(np.array([1565087150000,1565087150020,1565087150040,1565087150060,1565087150080, 34 | 1565087150100,1565087150120,1565087150140,1565087150160,1565087150180]), name='unix_timestamps') 35 | pd.testing.assert_series_equal(obtained_ts, expected_ts) 36 | 37 | def test_extract_signal_features(): 38 | # Load/format data 39 | src = os.path.abspath(__file__ + '/../../')+'/demo/demo_data.csv' 40 | raw_data = pd.read_csv(src, skiprows=99, names=['timestamps', 'x', 'y', 'z'], usecols=[0, 1, 2, 3]) 41 | raw_data['unix_timestamps'] = pd.to_datetime(raw_data.timestamps, format="%Y-%m-%d %H:%M:%S:%f").values.astype(np.int64) // 10**6 42 | data = pd.DataFrame({'y': raw_data.iloc[:150,:].y}) 43 | timestamps = pd.DatetimeIndex(raw_data.iloc[:150,:].unix_timestamps.astype('datetime64[ms]')) 44 | 45 | # Run function being tested 46 | obtained_feature_set, obtained_start_times_list, obtained_end_times_list = util._extract_signal_features(data, timestamps, 50) 47 | 48 | # Confirm expected results 49 | expected_feature_set = pd.DataFrame([[2.67261662, 0.04452866, 0.19341188, 0.5859375, 0.28124919, 0.65097933, -12.20492315, 0.59233586, 0.04666667]], 50 | columns=['lumbar_y_bp_filt_[0.5, 3.0]_signal_entropy','lumbar_y_bp_filt_[0.5, 3.0]_rms', 51 | 'lumbar_y_bp_filt_[0.5, 3.0]_range','lumbar_y_bp_filt_[0.5, 3.0]_dom_freq_value', 52 | 'lumbar_y_bp_filt_[0.5, 3.0]_dom_freq_magnitude','lumbar_y_bp_filt_[0.5, 3.0]_dom_freq_ratio', 53 | 'lumbar_y_bp_filt_[0.5, 3.0]_spectral_flatness','lumbar_y_bp_filt_[0.5, 3.0]_spectral_entropy', 54 | 'lumbar_y_bp_filt_[0.5, 3.0]_mean_cross_rate'], index=[0]) 55 | assert_frame_equal(expected_feature_set, obtained_feature_set) 56 | 57 | expected_start_times_list = [pd.Timestamp(year=2019, month=8, day=6, hour=10, minute=25, second=50)] 58 | assert expected_start_times_list == obtained_start_times_list 59 | 60 | expected_end_times_list = [pd.Timestamp(year=2019, month=8, day=6, hour=10, minute=25, second=52, microsecond=980000)] 61 | assert expected_end_times_list == obtained_end_times_list 62 | 63 | def test_concatenate_bouts(): 64 | # Load/format data 65 | classify_bouts = pd.read_hdf(os.path.abspath(__file__ + '/../../')+'/demo/demo_classify_bouts.h5') 66 | gait_windows = classify_bouts[classify_bouts['prediction'] == 1][0:10] 67 | 68 | # Run function being tested 69 | obtained_gait_bouts = util._concatenate_windows(gait_windows, window_length=3) 70 | 71 | # Confirm expected results 72 | expected_values = [[3.0, pd.Timestamp(year=2019, month=8, day=6, hour=10, minute=26, second=2, microsecond=500000), pd.Timestamp(year=2019, month=8, day=6, hour=10, minute=25, second=59, microsecond=500000)], 73 | [9.0, pd.Timestamp(year=2019, month=8, day=6, hour=10, minute=26, second=17, microsecond=500000), pd.Timestamp(year=2019, month=8, day=6, hour=10, minute=26, second=8, microsecond=500000)], 74 | [18.0, pd.Timestamp(year=2019, month=8, day=6, hour=10, minute=26, second=38, microsecond=500000), pd.Timestamp(year=2019, month=8, day=6, hour=10, minute=26, second=20, microsecond=500000)]] 75 | expected_gait_bouts = pd.DataFrame(expected_values, columns=['bout_length', 'end_time', 'start_time']) 76 | pd.testing.assert_frame_equal(expected_gait_bouts, obtained_gait_bouts) 77 | 78 | def test_cwt(): 79 | # Load/format data 80 | src = os.path.abspath(__file__ + '/../../')+'/demo/demo_data.csv' 81 | raw_data = pd.read_csv(src, skiprows=99, names=['timestamps', 'x', 'y', 'z'], usecols=[0, 1, 2, 3]) 82 | raw_data = raw_data.iloc[3500:3700,:] 83 | raw_data['y'] = raw_data['y'] * 9.80665 84 | 85 | # Run function being tested 86 | obtained_ic_peaks, obtained_fc_peaks = util._cwt(raw_data.y, 50, 5, 10) 87 | 88 | # Confirm expected results 89 | np.testing.assert_array_equal(obtained_ic_peaks, [10, 43, 75, 110, 141, 171]) 90 | np.testing.assert_array_equal(obtained_fc_peaks, [19, 50, 84, 117, 148, 179]) 91 | 92 | def test_optimization(): 93 | # Load/format data 94 | src = os.path.abspath(__file__ + '/../../')+'/demo/demo_data.csv' 95 | raw_data = pd.read_csv(src, skiprows=99, names=['timestamps', 'x', 'y', 'z'], usecols=[0, 1, 2, 3]) 96 | raw_data['unix_timestamps'] = pd.to_datetime(raw_data.timestamps, format="%Y-%m-%d %H:%M:%S:%f").values.astype(np.int64) // 10**6 97 | raw_data = raw_data.iloc[:500,:] 98 | ic_peaks = np.array([10, 43, 75, 110, 141, 171]) 99 | fc_peaks = np.array([19, 50, 84, 117, 148, 179]) 100 | 101 | # Run function being tested 102 | obtained_optimization = util._optimization(raw_data['unix_timestamps'], ic_peaks, fc_peaks) 103 | 104 | # Confirm expected results 105 | expected_optimization = pd.DataFrame([[1565087150200, 1565087151000, 1565087150380, np.nan, 1], 106 | [1565087150860, 1565087151680, 1565087151000, np.nan, 1], 107 | [1565087151500, 1565087152340, 1565087151680, np.nan, 1], 108 | [1565087152200, 1565087152960, 1565087152340, np.nan, 0], 109 | [1565087152820, 1565087153580, 1565087152960, np.nan, 0]], 110 | columns=['IC','FC','FC_opp_foot','CoM_height','Gait_Cycle']) 111 | 112 | pd.testing.assert_frame_equal(expected_optimization, obtained_optimization) 113 | 114 | def test_height_change_com(): 115 | # Load/format data 116 | src = os.path.abspath(__file__ + '/../../')+'/demo/demo_data.csv' 117 | raw_data = pd.read_csv(src, skiprows=99, names=['timestamps', 'x', 'y', 'z'], usecols=[0, 1, 2, 3]) 118 | raw_data['unix_timestamps'] = pd.to_datetime(raw_data.timestamps, format="%Y-%m-%d %H:%M:%S:%f").values.astype(np.int64) // 10**6 119 | raw_data = raw_data.iloc[:500,:] 120 | optimization = pd.DataFrame([[1565087150200, 1565087151000, 1565087150380, np.nan, 1], 121 | [1565087150860, 1565087151680, 1565087151000, np.nan, 1], 122 | [1565087151500, 1565087152340, 1565087151680, np.nan, 1], 123 | [1565087152200, 1565087152960, 1565087152340, np.nan, 0], 124 | [1565087152820, 1565087153580, 1565087152960, np.nan, 0]], 125 | columns=['IC','FC','FC_opp_foot','CoM_height','Gait_Cycle']) 126 | 127 | # Run function being tested 128 | obtained_height_change_com = util._height_change_com(optimization, raw_data['unix_timestamps'], raw_data['y'], 50) 129 | obtained_height_change_com['CoM_height'] = obtained_height_change_com.CoM_height.round(6) 130 | 131 | # Confirm expected results 132 | expected_height_change_com = pd.DataFrame([[1565087150200, 1565087151000, 1565087150380, 0.001516, 1], 133 | [1565087150860, 1565087151680, 1565087151000, 0.001538, 1], 134 | [1565087151500, 1565087152340, 1565087151680, 0.001385, 1], 135 | [1565087152200, 1565087152960, 1565087152340, 0.000149, 0], 136 | [1565087152820, 1565087153580, 1565087152960, np.nan, 0]], 137 | columns=['IC','FC','FC_opp_foot','CoM_height','Gait_Cycle']) 138 | pd.testing.assert_frame_equal(expected_height_change_com, obtained_height_change_com) 139 | 140 | def test_calculate_sensor_height(): 141 | # Run function being tested 142 | obtained_sensor_height = util._calculate_sensor_height(177, 'centimeters', 0.53) 143 | 144 | # Confirm expected results 145 | assert obtained_sensor_height == 0.9381 146 | 147 | def test_cwt_feature_extraction(): 148 | # Load/format data 149 | optimized_gait = pd.DataFrame([[1565087150200, 1565087151000, 1565087150380, 0.001516, 1], 150 | [1565087150860, 1565087151680, 1565087151000, 0.001538, 1], 151 | [1565087151500, 1565087152340, 1565087151680, 0.001385, 1], 152 | [1565087152200, 1565087152960, 1565087152340, 0.000149, 0], 153 | [1565087152820, 1565087153580, 1565087152960, np.nan, 0]], 154 | columns=['IC','FC','FC_opp_foot','CoM_height','Gait_Cycle']) 155 | 156 | # Run function being tested 157 | obtained_cwt_feature_extraction = util._cwt_feature_extraction(optimized_gait, 0.9831).round(5) 158 | 159 | # Confirm expected results 160 | expected_cwt_feature_extraction = pd.DataFrame([[1565087150200, 1565087151000, 1565087150380, 0.00152, 1, 5, 1.30000, 0.04000, 0.66000, 0.02000, 90.90909, 0.18000, 0.04000, 0.14000, 0.04000, 0.32000, 0.00000, np.nan, np.nan, 0.80000, 0.02000, 0.50000, 0.02000, 0.10915, 0.00079, 0.21909, 0.00482, 0.16853], 161 | [1565087150860, 1565087151680, 1565087151000, 0.00154, 1, 5, 1.34000, 0.02000, 0.64000, 0.06000, 93.75000, 0.14000, 0.04000, 0.18000, 0.04000, 0.32000, 0.00000, 0.50000, 0.02000, 0.82000, 0.02000, 0.52000, 0.04000, 0.10994, 0.00561, 0.21427, np.nan, 0.15990], 162 | [1565087151500, 1565087152340, 1565087151680, 0.00138, 1, 5, 1.32000, np.nan, 0.70000, np.nan, 85.71429, 0.18000, np.nan, 0.14000, np.nan, 0.32000, np.nan, 0.52000, np.nan, 0.84000, np.nan, 0.48000, np.nan, 0.10433, np.nan, np.nan, np.nan, np.nan], 163 | [1565087152200, 1565087152960, 1565087152340, 0.00015, 0, 5, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan], 164 | [1565087152820, 1565087153580, 1565087152960, np.nan, 0, 5, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan, np.nan]], 165 | columns=['IC','FC','FC_opp_foot','CoM_height','Gait_Cycle','steps','stride_duration','stride_duration_asymmetry','step_duration','step_duration_asymmetry','cadence','initial_double_support','initial_double_support_asymmetry','terminal_double_support','terminal_double_support_asymmetry','double_support','double_support_asymmetry','single_limb_support','single_limb_support_asymmetry','stance','stance_asymmetry','swing','swing_asymmetry','step_length','step_length_asymmetry','stride_length','stride_length_asymmetry','gait_speed']) 166 | pd.testing.assert_frame_equal(expected_cwt_feature_extraction, obtained_cwt_feature_extraction) 167 | -------------------------------------------------------------------------------- /paper/Figure1.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/paper/Figure1.png -------------------------------------------------------------------------------- /paper/Figure2.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/paper/Figure2.png -------------------------------------------------------------------------------- /paper/Figure3.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/matt002/GaitPy/49c296bb1694e5dc36414221d2fae0e263eb6aaa/paper/Figure3.png -------------------------------------------------------------------------------- /paper/paper.bib: -------------------------------------------------------------------------------- 1 | @article{McCamley2012, 2 | abstract = {This study introduces a new method of extracting initial and final contact gait time events from vertical acceleration, measured with one waist mounted inertial measurement unit, by means of continuous wavelet transforms. The method was validated on 18 young healthy subjects and compared to two others available in the literature. Of the three methods investigated, the new one was the most accurate at identifying the existence and timing of initial and final contacts with the ground, with an average error of 0.02. ±. 0.02. s and 0.03. ±. 0.03. s (approximately 2{\%} and 3{\%} of mean stride duration), respectively. {\textcopyright} 2012 Elsevier B.V.}, 3 | author = {McCamley, John and Donati, Marco and Grimpampi, Eleni and Mazz{\`{a}}, Claudia}, 4 | doi = {10.1016/j.gaitpost.2012.02.019}, 5 | isbn = {0966-6362}, 6 | issn = {09666362}, 7 | journal = {Gait and Posture}, 8 | keywords = {Accelerometers,Biomechanics,Gait,Gait events,Gait phases,Locomotion,Pelvis,Temporal parameters,Walking,Wearable sensors}, 9 | mendeley-groups = {Remote{\_}Monitoring/PD/Gait,Remote{\_}Monitoring/PD/Gait/Estimating Gait,gaitpy{\_}joss{\_}references}, 10 | number = {2}, 11 | pages = {316--318}, 12 | pmid = {22465705}, 13 | title = {{An enhanced estimate of initial contact and final contact instants of time using lower trunk inertial sensor data}}, 14 | volume = {36}, 15 | year = {2012} 16 | } 17 | @article{Zijlstra2003, 18 | abstract = {This paper studies the feasibility of an analysis of spatio-temporal gait parameters based upon accelerometry. To this purpose, acceleration patterns of the trunk and their relationships with spatio-temporal gait parameters were analysed in healthy subjects. Based on model predictions of the body's centre of mass trajectory during walking, algorithms were developed to determine spatio-temporal gait parameters from trunk acceleration data. In a first experiment, predicted gait parameters were compared with gait parameters determined from ground reaction forces measured by a treadmill. In a second experiment, spatio-temporal gait parameters were determined during overground walking. From the results of these experiments, it is concluded that, in healthy subjects, the duration of subsequent stride cycles and left/right steps, and estimations of step length and walking speed can be obtained from lower trunk accelerations. The possibility to identify subsequent stride cycles can be the basis for an analysis of other signals (e.g. kinematic or muscle activity) within the stride cycle. {\textcopyright} 2002 Elsevier Science B.V. All rights reserved.}, 19 | author = {Zijlstra, Wiebren and Hof, At L.}, 20 | doi = {10.1016/S0966-6362(02)00190-X}, 21 | isbn = {0966-6362}, 22 | issn = {09666362}, 23 | journal = {Gait and Posture}, 24 | keywords = {Accelerometry,Ambulatory measurements,Foot contact detection,Inverted pendulum model,Locomotion}, 25 | mendeley-groups = {Remote{\_}Monitoring/PD/Gait,Remote{\_}Monitoring/PD/Gait/Estimating Gait,gaitpy{\_}joss{\_}references}, 26 | number = {2}, 27 | pages = {1--10}, 28 | pmid = {14654202}, 29 | title = {{Assessment of spatio-temporal gait parameters from trunk accelerations during human walking}}, 30 | volume = {18}, 31 | year = {2003} 32 | } 33 | @article{Hollman2011, 34 | author = {Hollman, John and McDade, Eric and Peterson, Ronald}, 35 | doi = {10.1016/j.gaitpost.2011.03.024}, 36 | isbn = {2036884555}, 37 | journal = {Gait {\&} Posture}, 38 | keywords = {borrellia burdorferi,erythema migrans,lyme disease,serologic testing,tick-borne infections}, 39 | mendeley-groups = {Remote{\_}Monitoring/PD/Gait/Gait Features,gaitpy{\_}joss{\_}references}, 40 | number = {1}, 41 | pages = {111--118}, 42 | title = {{Normative Spatiotemporal Gait Parameters in Older Adults}}, 43 | volume = {34}, 44 | year = {2011} 45 | } 46 | @article{Najafi2003, 47 | abstract = {A new method of physical activity monitoring is presented, which is able to detect body postures (sitting, standing, and lying) and periods of walking in elderly persons using only one kinematic sensor attached to the chest. The wavelet transform, in conjunction with a simple kinematics model, was used to detect different postural transitions (PTs) and walking periods during daily physical activity. To evaluate the system, three studies were performed. The method was first tested on 11 community-dwelling elderly subjects in a gait laboratory where an optical motion system (Vicon) was used as a reference system. In the second study, the system was tested for classifying PTs (i.e., lying-to-sitting, sitting-to-lying, and turning the body in bed) in 24 hospitalized elderly persons. Finally, in a third study monitoring was performed on nine elderly persons for 45-60 min during their daily physical activity. Moreover, the possibility-to-perform long-term monitoring over 12 h has been shown. The first study revealed a close concordance between the ambulatory and reference systems. Overall, subjects performed 349 PTs during this study. Compared with the reference system, the ambulatory system had an overall sensitivity of 99{\%} for detection of the different PTs. Sensitivities and specificities were 93{\%} and 82{\%} in sit-to-stand, and 82{\%} and 94{\%} in stand-to-sit, respectively. In both first and second studies, the ambulatory system also showed a very high accuracy ({\textgreater} 99{\%}) in identifying the 62 transfers or rolling out of bed, as well as 144 different posture changes to the back, ventral, right and left sides. Relatively high sensitivity ({\textgreater} 90{\%}) was obtained for the classification of usual physical activities in the third study in comparison with visual observation. Sensitivities and specificities were, respectively, 90.2{\%} and 93.4{\%} in sitting, 92.2{\%} and 92.1{\%} in "standing + walking," and, finally, 98.4{\%} and 99.7{\%} in lying. Overall detection errors (as percent of range) were 3.9{\%} for "standing + walking," 4.1{\%} for sitting, and 0.3{\%} for lying. Finally, overall symmetric mean average errors were 12{\%} for "standing + walking," 8.2{\%} for sitting, and 1.3{\%} for lying.}, 48 | author = {Najafi, Bijan and Aminian, Kamiar and Paraschiv-Ionescu, Anisoara and Loew, Fran{\c{c}}ois and B{\"{u}}la, Christophe J. and Robert, Philippe}, 49 | doi = {10.1109/TBME.2003.812189}, 50 | isbn = {0-7803-6603-4}, 51 | issn = {00189294}, 52 | journal = {IEEE Transactions on Biomedical Engineering}, 53 | keywords = {Ambulatory system,Elderly people,Kinematic sensor,Long-term monitoring,Physical activity,Postural transition,Wavelet transform}, 54 | mendeley-groups = {Remote{\_}Monitoring/PD/Gait,gaitpy{\_}joss{\_}references}, 55 | number = {6}, 56 | pages = {711--723}, 57 | pmid = {12814238}, 58 | title = {{Ambulatory system for human motion analysis using a kinematic sensor: Monitoring of daily physical activity in the elderly}}, 59 | volume = {50}, 60 | year = {2003} 61 | } 62 | @article{DelDin2016, 63 | abstract = {Measurement of gait is becoming important as a tool to identify disease and disease progression, yet to date its application is limited largely to specialist centres. Wearable devices enables gait to be measured in naturalistic environments however questions remain regarding validity. Previous research suggests that when compared with a laboratory reference, measurement accuracy is acceptable for mean but not variability or asymmetry gait characteristics. Some fundamental reasons for this have been presented (e.g. synchronisation, different sampling frequencies) but to date this has not been systematically examined. The aims of this study were to: (i) quantify a comprehensive range of gait characteristics measured using a single tri-axial accelerometer-based monitor, (ii) examine outcomes and monitor performance in measuring gait in older adults and those with Parkinson's disease (PD) and (iii) carry out a detailed comparison with those derived from an instrumented walkway to account for any discrepancies. Fourteen gait characteristics were quantified in 30 people with incident PD and 30 healthy age-matched controls. Of the 14 gait characteristics compared, agreement between instruments was excellent for 4 (ICCs 0.913 - 0.983); moderate for 4 (ICCs 0.508 - 0.766); and poor for 6 characteristics (ICCs -0.637 - 0.370). Further analysis revealed that differences reflect an increased sensitivity of accelerometry to detect motion, rather than measurement error. This is most likely because accelerometry measures gait as a continuous activity rather than discrete footfall events, per instrumented tools. The increased sensitivity shown for these characteristics will be of particular interest to researchers keen to interpret 'real world' gait data. In conclusion, use of a body worn monitor is recommended for the measurement of gait but is likely to yield more sensitive data for asymmetry and variability features.}, 64 | author = {Del Din, Silvia and Godfrey, Alan and Rochester, Lynn}, 65 | doi = {10.1109/JBHI.2015.2419317}, 66 | issn = {21682194}, 67 | journal = {IEEE Journal of Biomedical and Health Informatics}, 68 | keywords = {Accelerometer,algorithm,body-worn monitor,instrumented gait,instrumented walkway}, 69 | mendeley-groups = {Remote{\_}Monitoring/PD/Gait,Remote{\_}Monitoring/PD/Gait/Estimating Gait,gaitpy{\_}joss{\_}references}, 70 | number = {3}, 71 | pages = {838--847}, 72 | pmid = {25850097}, 73 | title = {{Validation of an Accelerometer to Quantify a Comprehensive Battery of Gait Characteristics in Healthy Older Adults and Parkinson's Disease: Toward Clinical and at Home Use}}, 74 | volume = {20}, 75 | year = {2016} 76 | } 77 | @Article{Breiman2001, 78 | author= {Breiman, Leo}, 79 | title={Random Forests}, 80 | journal={Machine Learning}, 81 | year={2001}, 82 | month={Oct}, 83 | day={01}, 84 | volume={45}, 85 | number={1}, 86 | pages={5--32}, 87 | abstract={Random forests are a combination of tree predictors such that each tree depends on the values of a random vector sampled independently and with the same distribution for all trees in the forest. The generalization error for forests converges a.s. to a limit as the number of trees in the forest becomes large. The generalization error of a forest of tree classifiers depends on the strength of the individual trees in the forest and the correlation between them. Using a random selection of features to split each node yields error rates that compare favorably to Adaboost (Y. Freund {\&} R. Schapire, Machine Learning: Proceedings of the Thirteenth International conference, ***, 148--156), but are more robust with respect to noise. Internal estimates monitor error, strength, and correlation and these are used to show the response to increasing the number of features used in the splitting. Internal estimates are also used to measure variable importance. These ideas are also applicable to regression.}, 88 | issn={1573-0565}, 89 | doi={10.1023/A:1010933404324}, 90 | url={https://doi.org/10.1023/A:1010933404324} 91 | } 92 | @article{Trojaniello2014, 93 | abstract = {In the last decade, various methods for the estimation of gait events and temporal parameters from the acceleration signals of a single inertial measurement unit (IMU) mounted at waist level have been proposed. Despite the growing interest for such methodologies, a thorough comparative analysis of methods with regards to number of extra and missed events, accuracy and robustness to IMU location is still missing in the literature. The aim of this work was to fill this gap. Five methods have been tested on single IMU data acquired from fourteen healthy subjects walking while being recorded by a stereo-photogrammetric system and two force platforms. The sensitivity in detecting initial and final contacts varied between 81{\%} and 100{\%} across methods, whereas the positive predictive values ranged between 94{\%} and 100{\%}. For all tested methods, stride and step time estimates were obtained; three of the selected methods also allowed estimation of stance, swing and double support time. Results showed that the accuracy in estimating step and stride durations was acceptable for all methods. Conversely, a statistical difference was found in the error in estimating stance, swing and double support time, due to the larger errors in the final contact determination. Except for one method, the IMU positioning on the lower trunk did not represent a critical factor for the estimation of gait temporal parameters. Results obtained in this study may not be applicable to pathologic gait.}, 94 | author = {Trojaniello, Diana and Cereatti, Andrea and {Della Croce}, Ugo}, 95 | doi = {10.1016/j.gaitpost.2014.07.007}, 96 | issn = {18792219}, 97 | journal = {Gait and Posture}, 98 | keywords = {Accelerometry,Gait analysis,Gait events,Inertial sensor,Temporal parameters}, 99 | mendeley-groups = {Remote{\_}Monitoring/PD/Gait/Estimating Gait,gaitpy{\_}joss{\_}references}, 100 | number = {4}, 101 | pages = {487--492}, 102 | publisher = {Elsevier B.V.}, 103 | title = {{Accuracy, sensitivity and robustness of five different methods for the estimation of gait temporal parameters using a single inertial sensor mounted on the lower trunk}}, 104 | url = {http://dx.doi.org/10.1016/j.gaitpost.2014.07.007}, 105 | volume = {40}, 106 | year = {2014} 107 | } 108 | 109 | -------------------------------------------------------------------------------- /paper/paper.md: -------------------------------------------------------------------------------- 1 | --- 2 | title: 'GaitPy: An Open-Source Python Package for Gait Analysis Using an Accelerometer on the Lower Back' 3 | tags: 4 | - Python 5 | - GaitPy 6 | - Accelerometer 7 | - Actigraphy 8 | - Algorithms 9 | - Lumbar 10 | - Gait 11 | - Digital Medicine 12 | - Wearable Sensors 13 | authors: 14 | - name: Matthew D. Czech 15 | orcid: 0000-0001-9954-7003 16 | affiliation: 1 17 | - name: Shyamal Patel 18 | orcid: 0000-0002-4369-3033 19 | affiliation: 1 20 | affiliations: 21 | - name: Pfizer, Inc. 22 | index: 1 23 | date: 25 July 2019 24 | bibliography: paper.bib 25 | --- 26 | 27 | # Introduction 28 | 29 | Gait impairments are present across a broad range of conditions and often have a significant impact on the functional mobility and quality of life of an individual. Clinicians and researchers commonly assess gait using either observational scales (e.g. Unified Parkinson's Disease Rating Scale) or performance-based tests (e.g. timed-up-and-go). However, these assessments can only be performed intermittently because of the need for a trained clinician. In contrast, wearable devices can be used for continuously capturing data from sensors (e.g. accelerometer, ECG) outside the clinic. Recently, several groups [@McCamley2012; @Zijlstra2003; @DelDin2016; @Trojaniello2014] have published algorithms for deriving features of gait from data collected using inertial sensors like accelerometers. However, an implementation of these algorithms is not readily available to researchers, thus hindering progress. 30 | 31 | GaitPy is an open-source Python package that implements several published algorithms in a modular framework for extracting clinical features of gait from a single accelerometer device mounted on the lower back (L5 vertebra, illustrated in figure 1). The package has been developed to make it easy for researchers to derive measures of gait from raw accelerometer data. As shown in figure 2, the package includes modules with three main functions: 1) classify bouts of gait; 2) extract clinical features of gait from each bout; and 3) visualize detected gait events. 32 | 33 | ![Location of the wearable device on the lower back and orientation of the vertical acceleration axis of the accelerometer relative to the body](Figure1.png){width=200px} 34 | 35 | ![A high-level overview of GaitPy API and functions associated with various modules.](Figure2.png) 36 | 37 | # Processing Pipeline 38 | 39 | GaitPy can be used to derive gait features from data collected in the clinic as well as under free-living conditions (e.g. at home). The package accepts input data in a customizable format, thereby not restricting the user to a standard file type. GaitPy utilizes vertical acceleration data from a wearable device located on the lower back (lumbar region) and consists of three main processing modules. 40 | 41 | **classify_bouts** is an optional module intended to be used for processing data collected under free-living or unsupervised conditions. The module uses a pre-trained gait classification model to detect bouts of gait from a continuous stream of raw accelerometer data. It first converts data to units of gravity (g) and down-samples it to 50Hz. Data is then segmented into non-overlapping 3-second epochs and signals features are derived for each epoch. The pre-trained gait classification model then classifies each 3-second epoch as gait or not-gait. 42 | 43 | **extract_features** module utilizes a Gaussian continuous wavelet transform based approach [@McCamley2012] and inverted pendulum model [@Zijlstra2003] to calculate spatial and temporal gait features. Vertical acceleration data is first converted from units of gravity (g) to meters per second squared (m/s2) and down-sampled to 50Hz. Using the approach described in Del Din et al. 2016 [@DelDin2016], we then derive spatial and temporal features of gait. Additionally, an optimization procedure is performed to remove extraneous event detections and is described in more detail below. The extract_features module expects data segments that only contain gait. So, if input data consists of both gait and non-gait data, it is recommended to first apply the classify_bouts function in order to identify periods of gait at the resolution of 3-second epochs. extract_features will then concatenate concurrent 3-second epochs of gait into bouts and extract features for each bout. 44 | 45 | **plot_contacts** module generates an interactive plot of raw data along with the initial and final contact events detected by the gait event detection algorithm [@McCamley2012]. The plot facilitates debugging and presentation of results. 46 | 47 | # Outputs 48 | 49 | As shown in figure 2, the outputs of GaitPy modules include classification of gait bouts, a set of gait feature values extracted for each bout, and a plot of raw sensor data marked with detected gait events (initial contact/heel strike and final contact/toe off). We describe outputs of each of the processing modules below: 50 | 51 | **classify_bouts** generates a pandas dataframe or a h5 file containing the following columns: 52 | 53 | a. window_start_time: Unix timestamp associated with the beginning of each 3-second epoch in the data. 54 | b. window_end_time: Unix timestamp associated with the end of each 3-second epoch in the data. 55 | c. prediction: Output of the gait classification model (1=gait or 0=not gait) for each 3-second epoch in the data. 56 | 57 | **extract_features** generates a pandas dataframe or a csv file containing: 58 | 59 | a. Bout number for each bout detected by classify_bouts (column name: bout_number). 60 | b. Length of bout in seconds (column name: bout_length_sec). 61 | c. Start time of bout (column name: bout_start_time). 62 | d. Total number of steps detected within each bout (column name: steps). 63 | e. Initial and final contact event timestamps in Unix time (column names: IC and FC respectively). 64 | f. Values of the following gait features are derived per stride: stride duration, step duration, cadence, initial double support, terminal double support, double support, single limb support, stance, swing, step length, stride length, gait speed. In addition, we calculate variability and asymmetry associated with a set of features. 65 | 66 | **plot_contacts** generates a HTML file containing an interactive time-series plot of raw vertical acceleration data labeled with detected gait events and bouts (shown in figure 3). 67 | 68 | a. Initial contact: The moment in the gait cycle when foot touches the ground (i.e. heel strike) 69 | b. Final contact: The moment in the gait cycle when foot lifts off the ground (i.e. toe off) 70 | c. Gait bouts: A green vertical line marks the beginning of each detected gait bout and a red vertical line marks the end of the bout (Figure 3). 71 | 72 | ![Time-series plot generated by plot_contacts module of the raw vertical acceleration data labeled with detected gait events (initial contact/heel strike and final contact/toe off) and bout classifications. The start and end times of classified bouts are labeled by green and red vertical lines respectively. During the period shown, the participant walked for about 30 seconds, paused, performed 5 sit-to-stand repetitions, paused again, and continued walking for about 30 seconds.](Figure3.png){width=450px} 73 | 74 | # Algorithms 75 | 76 | GaitPy includes two key algorithms for processing raw accelerometer data to derive gait features. The first algorithm is used for detecting bouts of gait from continuous accelerometer data collected under free-living conditions and the second algorithm derives temporal and spatial features of gait from pre-identified bouts of gait. Below is a brief description of the algorithms. 77 | 78 | **Gait Classification** 79 | In order to derive gait features from data collected under free-living conditions, it is essential to identify periods of walking activity. GaitPy includes a pre-trained random forest [@Breiman2001] binary classifier that operates on time and frequency domain features extracted from 3-second epochs of vertical acceleration data. Prior to feature extraction, raw vertical acceleration data is down-sampled to 50Hz and band-pass filtered using a 0.5-3Hz 1st order Butterworth filter. Extracted signal features include dominant frequency, the ratio of the energy associated with the dominant frequency component to the total energy, the range of amplitude, the root mean square value of the signal, and the signal entropy. Gaitpy's classify_bouts module applies this binary classifier to input data to classify each non-overlapping 3-second epoch as either gait or not-gait. 80 | 81 | **Gait Features** 82 | GaitPy implements a slightly modified version of a Gaussian continuous wavelet-based method [@McCamley2012] and an inverted pendulum model [@Zijlstra2003] to extract features from data collected during bouts of gait. 83 | 84 | Three post-processing steps are applied to remove extraneous stride detections beyond physiological limits. 85 | Step 1: Strides longer than 2.25 seconds or shorter than 0.25 seconds are removed. [@Najafi2003] 86 | Step 2: Strides with stance times exceeding 70% of the maximal stride time of 2.25 seconds are removed. [@Hollman2011] 87 | Step 3: Strides with an initial double support that exceed 20% of the maximal stride time of 2.25 seconds are removed. [@Hollman2011] 88 | 89 | # Acknowledgements 90 | The Digital Medicine & Translational Imaging group at Pfizer, Inc supported the development of this package. 91 | 92 | # License 93 | This project is licensed under the MIT License - see the LICENSE.md file for details 94 | 95 | # References 96 | -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | pandas >= 0.20.3 2 | scipy >= 1.2.0 3 | numpy >= 1.13.3 4 | PyWavelets >= 0.5.2 5 | scikit-learn == 0.21.3 6 | statsmodels >= 0.8.0 7 | bokeh >= 0.12.10 8 | dill >= 0.2.7.1 9 | deepdish >= 0.3.4 10 | -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | import setuptools 2 | 3 | def requirements(): 4 | with open('requirements.txt', "r") as fh: 5 | return [x for x in fh.read().split('\n') if x] 6 | 7 | with open("README.md", "r") as fh: 8 | long_description = fh.read() 9 | 10 | setuptools.setup(name='gaitpy', 11 | version='1.6.1', 12 | description='Read and process raw vertical accelerometry data from a lumbar sensor during gait; calculate clinical gait characteristics.', 13 | long_description=long_description, 14 | long_description_content_type="text/markdown", 15 | url='http://github.com/matt002/gaitpy', 16 | packages=setuptools.find_packages(), 17 | author='Matthew Czech', 18 | author_email='czech1117@gmail.com', 19 | keywords=['gait', 'gaitpy', 'lumbar', 'waist', 'sensor', 'wearable', 'continuous wavelet', 'inverted pendulum', 'czech'], 20 | classifiers=["Programming Language :: Python :: 3.6", 21 | "License :: OSI Approved :: MIT License"], 22 | license='MIT', 23 | zip_safe=False, 24 | install_requires=requirements(), 25 | include_package_data=True) 26 | --------------------------------------------------------------------------------