├── .DS_Store ├── .gitignore ├── .travis.yml ├── LICENSE ├── README.rst ├── _config.yml ├── docs ├── Makefile ├── _config.yml ├── build │ ├── doctrees │ │ ├── environment.pickle │ │ ├── help.doctree │ │ ├── index.doctree │ │ └── reference.doctree │ ├── html │ │ ├── .buildinfo │ │ ├── _sources │ │ │ ├── help.rst.txt │ │ │ ├── index.rst.txt │ │ │ └── reference.rst.txt │ │ ├── _static │ │ │ ├── ajax-loader.gif │ │ │ ├── basic.css │ │ │ ├── comment-bright.png │ │ │ ├── comment-close.png │ │ │ ├── comment.png │ │ │ ├── css │ │ │ │ ├── badge_only.css │ │ │ │ └── theme.css │ │ │ ├── doctools.js │ │ │ ├── down-pressed.png │ │ │ ├── down.png │ │ │ ├── file.png │ │ │ ├── fonts │ │ │ │ ├── Lato │ │ │ │ │ ├── lato-bold.eot │ │ │ │ │ ├── lato-bold.ttf │ │ │ │ │ ├── lato-bold.woff │ │ │ │ │ ├── lato-bold.woff2 │ │ │ │ │ ├── lato-bolditalic.eot │ │ │ │ │ ├── lato-bolditalic.ttf │ │ │ │ │ ├── lato-bolditalic.woff │ │ │ │ │ ├── lato-bolditalic.woff2 │ │ │ │ │ ├── lato-italic.eot │ │ │ │ │ ├── lato-italic.ttf │ │ │ │ │ ├── lato-italic.woff │ │ │ │ │ ├── lato-italic.woff2 │ │ │ │ │ ├── lato-regular.eot │ │ │ │ │ ├── lato-regular.ttf │ │ │ │ │ ├── lato-regular.woff │ │ │ │ │ └── lato-regular.woff2 │ │ │ │ ├── RobotoSlab │ │ │ │ │ ├── roboto-slab-v7-bold.eot │ │ │ │ │ ├── roboto-slab-v7-bold.ttf │ │ │ │ │ ├── roboto-slab-v7-bold.woff │ │ │ │ │ ├── roboto-slab-v7-bold.woff2 │ │ │ │ │ ├── roboto-slab-v7-regular.eot │ │ │ │ │ ├── roboto-slab-v7-regular.ttf │ │ │ │ │ ├── roboto-slab-v7-regular.woff │ │ │ │ │ └── roboto-slab-v7-regular.woff2 │ │ │ │ ├── fontawesome-webfont.eot │ │ │ │ ├── fontawesome-webfont.svg │ │ │ │ ├── fontawesome-webfont.ttf │ │ │ │ ├── fontawesome-webfont.woff │ │ │ │ └── fontawesome-webfont.woff2 │ │ │ ├── jquery-3.1.0.js │ │ │ ├── jquery.js │ │ │ ├── js │ │ │ │ ├── modernizr.min.js │ │ │ │ └── theme.js │ │ │ ├── minus.png │ │ │ ├── plus.png │ │ │ ├── pygments.css │ │ │ ├── searchtools.js │ │ │ ├── underscore-1.3.1.js │ │ │ ├── underscore.js │ │ │ ├── up-pressed.png │ │ │ ├── up.png │ │ │ └── websupport.js │ │ ├── genindex.html │ │ ├── help.html │ │ ├── index.html │ │ ├── objects.inv │ │ ├── py-modindex.html │ │ ├── reference.html │ │ ├── search.html │ │ └── searchindex.js │ └── latex │ │ ├── Makefile │ │ ├── footnotehyper-sphinx.sty │ │ ├── needspace.sty │ │ ├── phonet.aux │ │ ├── phonet.idx │ │ ├── phonet.out │ │ ├── phonet.pdf │ │ ├── phonet.tex │ │ ├── phonet.toc │ │ ├── python.ist │ │ ├── sphinx.sty │ │ ├── sphinxhighlight.sty │ │ ├── sphinxhowto.cls │ │ └── sphinxmanual.cls ├── environment.yml ├── make.bat └── source │ ├── conf.py │ ├── help.rst │ ├── index.rst │ └── reference.rst ├── phonet ├── Phonological.py ├── __init__.py ├── audios │ ├── pataka.wav │ └── sentence.wav ├── example.py ├── models │ ├── model.h5 │ ├── model.json │ ├── mu.npy │ ├── phonemes.hdf5 │ └── std.npy ├── phonet.py └── train │ ├── Phonological.py │ ├── README.md │ ├── extract_feat.py │ ├── get_matrices_labels.py │ ├── get_scaler.py │ ├── main_train_RNN_MT.py │ ├── main_train_RNN_phoneme.py │ ├── read_textgrids.py │ ├── requirements.txt │ └── utils.py ├── readthedocs.yml ├── requirements.txt ├── setup.cfg ├── setup.py └── tests └── test_phonet.py /.DS_Store: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/.DS_Store -------------------------------------------------------------------------------- /.gitignore: -------------------------------------------------------------------------------- 1 | # Byte-compiled / optimized / DLL files 2 | __pycache__/ 3 | *.py[cod] 4 | *$py.class 5 | 6 | # C extensions 7 | *.so 8 | 9 | # Distribution / packaging 10 | .Python 11 | develop-eggs/ 12 | dist/ 13 | downloads/ 14 | eggs/ 15 | .eggs/ 16 | lib/ 17 | lib64/ 18 | parts/ 19 | sdist/ 20 | var/ 21 | wheels/ 22 | *.egg-info/ 23 | .installed.cfg 24 | *.egg 25 | MANIFEST 26 | 27 | # PyInstaller 28 | # Usually these files are written by a python script from a template 29 | # before PyInstaller builds the exe, so as to inject date/other infos into it. 30 | *.manifest 31 | *.spec 32 | 33 | # Installer logs 34 | pip-log.txt 35 | pip-delete-this-directory.txt 36 | 37 | # Unit test / coverage reports 38 | htmlcov/ 39 | .tox/ 40 | .coverage 41 | .coverage.* 42 | .cache 43 | nosetests.xml 44 | coverage.xml 45 | *.cover 46 | .hypothesis/ 47 | .pytest_cache/ 48 | 49 | # Translations 50 | *.mo 51 | *.pot 52 | 53 | # Django stuff: 54 | *.log 55 | local_settings.py 56 | db.sqlite3 57 | 58 | # Flask stuff: 59 | instance/ 60 | .webassets-cache 61 | 62 | # Scrapy stuff: 63 | .scrapy 64 | 65 | # PyBuilder 66 | target/ 67 | 68 | # Jupyter Notebook 69 | .ipynb_checkpoints 70 | 71 | # pyenv 72 | .python-version 73 | 74 | # celery beat schedule file 75 | celerybeat-schedule 76 | 77 | # SageMath parsed files 78 | *.sage.py 79 | 80 | # Environments 81 | .env 82 | .venv 83 | env/ 84 | venv/ 85 | ENV/ 86 | env.bak/ 87 | venv.bak/ 88 | 89 | # Spyder project settings 90 | .spyderproject 91 | .spyproject 92 | 93 | # Rope project settings 94 | .ropeproject 95 | 96 | # mkdocs documentation 97 | /site 98 | 99 | # mypy 100 | .mypy_cache/ 101 | #vscode 102 | .vscode/ 103 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: python 2 | python: 3 | - "3.5" 4 | - "3.5-dev" # 3.5 development branch 5 | - "3.6" 6 | - "3.6-dev" # 3.6 development branch 7 | # command to install dependencies 8 | install: 9 | - pip install tensorflow 10 | - pip install -r requirements.txt 11 | # command to run tests 12 | script: 13 | - python ./tests/test.py 14 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 J. C. Vasquez-Correa 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.rst: -------------------------------------------------------------------------------- 1 | ====================== 2 | Phonet 3 | ====================== 4 | 5 | .. image:: https://readthedocs.org/projects/phonet/badge/?version=latest 6 | :target: https://phonet.readthedocs.io/en/latest/?badge=latest 7 | :alt: Documentation Status 8 | 9 | .. image:: https://travis-ci.org/jcvasquezc/phonet.svg?branch=master 10 | :target: https://travis-ci.org/jcvasquezc/phonet 11 | 12 | This toolkit compute posteriors probabilities of phonological classes from audio files for several groups of phonemes according to the mode and manner of articulation. 13 | 14 | If you are not sure about what phonological classes are, have a look at this 15 | `Phonological classes tutorial `_ 16 | 17 | 18 | `Project Documentation `_ 19 | 20 | `Paper `_ 21 | 22 | The list of the phonological classes available and the phonemes that are activated for each phonological class are observed in the following Table 23 | 24 | 25 | The list of the phonological classes available and the phonemes that are activated for each phonological class are observed in the following Table 26 | 27 | 28 | ================== ================================================================================ 29 | Phonological class Phonemes 30 | ================== ================================================================================ 31 | vocalic /a/, /e/, /i/, /o/, /u/ 32 | consonantal /b/, /tS/, /d/, /f/, /g/, /x/, /k/, /l/, /ʎ/, /m/, /n/, /p/, /ɾ/, /r/, /s/, /t/ 33 | back /a/, /o/, /u/ 34 | anterior /e/, /i/ 35 | open /a/, /e/, /o/ 36 | close /i/, /u/ 37 | nasal /m/, /n/ 38 | stop /p/, /b/, /t/, /k/, /g/, /tS/, /d/ 39 | continuant /f/, /b/, /tS/, /d/, /s/, /g/, /ʎ/, /x/ 40 | lateral /l/ 41 | flap /ɾ/ 42 | trill /r/ 43 | voiced /a/, /e/, /i/, /o/, /u/, /b/, /d/, /l/, /m/, /n/, /r/, /g/, /ʎ/ 44 | strident /f/, /s/, /tS/ 45 | labial /m/, /p/, /b/, /f/ 46 | dental /t/, /d/ 47 | velar /k/, /g/, /x/ 48 | pause /sil/ 49 | ================== ================================================================================ 50 | 51 | 52 | Installation 53 | ============ 54 | 55 | 56 | From this repository:: 57 | 58 | git clone https://github.com/jcvasquezc/phonet 59 | cd phonet 60 | python setup.py 61 | 62 | Usage 63 | ===== 64 | 65 | Supported features: 66 | 67 | - Estimate probabilities of phonological classes for an audio file 68 | 69 | `Example use `_ 70 | 71 | Estimation of phonological classes 72 | ==================================== 73 | 74 | Estimate the phonological classes using the BGRU models for an audio file or for a folder that contains audio files inside:: 75 | 76 | python 77 | phon=Phonet([phonclass]) 78 | get_phon_wav(self, audio_file, feat_file, plot_flag=True) 79 | 80 | ============= =========== 81 | Parameters Description 82 | ============= =========== 83 | audio_file file audio (.wav) sampled at 16 kHz 84 | feat_file file (.csv) to save the posteriors for the phonological classes 85 | phonclass list of phonological classes to be evaluated 86 | The list of phonological classes include: 87 | "consonantal", "back", "anterior", "open", "close", "nasal", "stop", 88 | "continuant", "lateral", "flap", "trill", "voice", "strident", 89 | "labial", "dental", "velar", "pause", "vocalic" or "all" 90 | plot_flag True or False, whether you want plots of phonological classes or not 91 | returns It crates the feat_file with the estimation of the phonological classes for each time-frame of the audio file. 92 | ============= =========== 93 | 94 | Training 95 | ==================================== 96 | 97 | If you want to train Phonet in your own language, or specific phonological classes that are not defined here, please refer to the folder `train `_ and follow the instructions there. 98 | 99 | If you experienced problems with the Training process, please send me an email `` 100 | 101 | 102 | Reference 103 | ================================== 104 | 105 | Phonet is available for research purposes 106 | 107 | If you use Phonet, please cite the following paper. 108 | 109 | @inproceedings{Vasquez-Correa2019, 110 | author={J. C. Vásquez-Correa and P. Klumpp and J. R. Orozco-Arroyave and E. N\"oth}, 111 | title={{Phonet: A Tool Based on Gated Recurrent Neural Networks to Extract Phonological Posteriors from Speech}}, 112 | year=2019, 113 | booktitle={Proc. Interspeech 2019}, 114 | pages={549--553}, 115 | doi={10.21437/Interspeech.2019-1405}, 116 | url={http://dx.doi.org/10.21437/Interspeech.2019-1405} 117 | } 118 | -------------------------------------------------------------------------------- /_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-slate -------------------------------------------------------------------------------- /docs/Makefile: -------------------------------------------------------------------------------- 1 | # Minimal makefile for Sphinx documentation 2 | # 3 | 4 | # You can set these variables from the command line. 5 | SPHINXOPTS = 6 | SPHINXBUILD = sphinx-build 7 | SPHINXPROJ = phonet 8 | SOURCEDIR = source 9 | BUILDDIR = build 10 | 11 | # Put it first so that "make" without argument is like "make help". 12 | help: 13 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) 14 | 15 | .PHONY: help Makefile 16 | 17 | # Catch-all target: route all unknown targets to Sphinx using the new 18 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). 19 | %: Makefile 20 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) -------------------------------------------------------------------------------- /docs/_config.yml: -------------------------------------------------------------------------------- 1 | theme: jekyll-theme-time-machine -------------------------------------------------------------------------------- /docs/build/doctrees/environment.pickle: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/doctrees/environment.pickle -------------------------------------------------------------------------------- /docs/build/doctrees/help.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/doctrees/help.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/index.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/doctrees/index.doctree -------------------------------------------------------------------------------- /docs/build/doctrees/reference.doctree: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/doctrees/reference.doctree -------------------------------------------------------------------------------- /docs/build/html/.buildinfo: -------------------------------------------------------------------------------- 1 | # Sphinx build info version 1 2 | # This file hashes the configuration used when building these files. When it is not found, a full rebuild will be done. 3 | config: b327a9881c41cbe5aca7a232491ec965 4 | tags: 645f666f9bcd5a90fca523b33c5a78b7 5 | -------------------------------------------------------------------------------- /docs/build/html/_sources/help.rst.txt: -------------------------------------------------------------------------------- 1 | Need Help? 2 | ================================== 3 | 4 | If you have trouble with Phonet, please write to Camilo Vasquez at: juan.vasquez@fau.de 5 | 6 | -------------------------------------------------------------------------------- /docs/build/html/_sources/index.rst.txt: -------------------------------------------------------------------------------- 1 | .. phonet documentation master file, created by 2 | sphinx-quickstart on Sat Mar 9 04:39:38 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Phonet's documentation! 7 | ================================== 8 | 9 | This toolkit compute posteriors probabilities of phonological classes from audio files for several groups of phonemes according to the mode and manner of articulation. 10 | 11 | If you are not sure about what phonological classes are, have a look at this 12 | `Phonological classes tutorial `_ 13 | 14 | 15 | The code for this project is available at https://github.com/jcvasquezc/phonet . 16 | 17 | The list of the phonological classes available and the phonemes that are activated for each phonological class are observed in the following Table 18 | 19 | 20 | ================== ================================================================================ 21 | Phonological class Phonemes 22 | ================== ================================================================================ 23 | vocalic /a/, /e/, /i/, /o/, /u/ 24 | consonantal /b/, /tS/, /d/, /f/, /g/, /x/, /k/, /l/, /ʎ/, /m/, /n/, /p/, /ɾ/, /r/, /s/, /t/ 25 | back /a/, /o/, /u/ 26 | anterior /e/, /i/ 27 | open /a/, /e/, /o/ 28 | close /i/, /u/ 29 | nasal /m/, /n/ 30 | stop /p/, /b/, /t/, /k/, /g/, /tS/, /d/ 31 | continuant /f/, /b/, /tS/, /d/, /s/, /g/, /ʎ/, /x/ 32 | lateral /l/ 33 | flap /ɾ/ 34 | trill /r/ 35 | voiced /a/, /e/, /i/, /o/, /u/, /b/, /d/, /l/, /m/, /n/, /r/, /g/, /ʎ/ 36 | strident /f/, /s/, /tS/ 37 | labial /m/, /p/, /b/, /f/ 38 | dental /t/, /d/ 39 | velar /k/, /g/, /x/ 40 | pause /sil/ 41 | ================== ================================================================================ 42 | 43 | 44 | .. toctree:: 45 | :maxdepth: 3 46 | 47 | help 48 | reference 49 | 50 | 51 | 52 | Supported features: 53 | 54 | - :py:meth:`phonet.model` - This is the architecture used for the estimation of the phonological classes using a multitask learning strategy. It consists of a 2 Bidirectional GRU layers, followed by a time-distributed dense layer 55 | - :py:meth:`phonet.get_phon_wav` - Estimate the phonological classes using the BGRU models for an audio file (.wav) 56 | - :py:meth:`phonet.get_phon_path` - Estimate the phonological classes using the BGRU models for all the (.wav) audio files included inside a directory. 57 | - :py:meth:`phonet.get_posteriorgram` - Estimate the posteriorgram for an audio file (.wav) sampled at 16kHz. 58 | - :py:meth:`phonet.get_PLLR` - Estimate the phonological log-likelihood ratio (PLLR) features for an audio file (.wav) sampled at 16kHz. 59 | 60 | Installation 61 | ------------------------------------- 62 | 63 | From the source file:: 64 | 65 | git clone https://github.com/jcvasquezc/phonet 66 | cd phonet 67 | python setup.py install 68 | 69 | Methods 70 | ------------------------------------- 71 | 72 | .. automodule:: phonet 73 | 74 | .. autoclass:: Phonet 75 | :members: 76 | 77 | 78 | Indices and tables 79 | ------------------------------------- 80 | * :ref:`genindex` 81 | * :ref:`modindex` 82 | * :ref:`search` 83 | 84 | 85 | 86 | Help 87 | ------------------------------------- 88 | If you have trouble with Phonet, please write to Camilo Vasquez at: juan.vasquez@fau.de 89 | -------------------------------------------------------------------------------- /docs/build/html/_sources/reference.rst.txt: -------------------------------------------------------------------------------- 1 | References 2 | ================================== 3 | 4 | If you use Phonet for research purposes, please cite the following paper: 5 | 6 | Vásquez-Correa, J. C., Klumpp, P., Orozco-Arroyave, J. R., & Nöth, E. (2019). Phonet: a Tool Based on Gated Recurrent Neural Networks to Extract Phonological Posteriors from Speech. Proc. Interspeech 2019, 549-553. 7 | 8 | `Download paper `_ 9 | 10 | -------------------------------------------------------------------------------- /docs/build/html/_static/ajax-loader.gif: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/ajax-loader.gif -------------------------------------------------------------------------------- /docs/build/html/_static/basic.css: -------------------------------------------------------------------------------- 1 | /* 2 | * basic.css 3 | * ~~~~~~~~~ 4 | * 5 | * Sphinx stylesheet -- basic theme. 6 | * 7 | * :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /* -- main layout ----------------------------------------------------------- */ 13 | 14 | div.clearer { 15 | clear: both; 16 | } 17 | 18 | /* -- relbar ---------------------------------------------------------------- */ 19 | 20 | div.related { 21 | width: 100%; 22 | font-size: 90%; 23 | } 24 | 25 | div.related h3 { 26 | display: none; 27 | } 28 | 29 | div.related ul { 30 | margin: 0; 31 | padding: 0 0 0 10px; 32 | list-style: none; 33 | } 34 | 35 | div.related li { 36 | display: inline; 37 | } 38 | 39 | div.related li.right { 40 | float: right; 41 | margin-right: 5px; 42 | } 43 | 44 | /* -- sidebar --------------------------------------------------------------- */ 45 | 46 | div.sphinxsidebarwrapper { 47 | padding: 10px 5px 0 10px; 48 | } 49 | 50 | div.sphinxsidebar { 51 | float: left; 52 | width: 230px; 53 | margin-left: -100%; 54 | font-size: 90%; 55 | word-wrap: break-word; 56 | overflow-wrap : break-word; 57 | } 58 | 59 | div.sphinxsidebar ul { 60 | list-style: none; 61 | } 62 | 63 | div.sphinxsidebar ul ul, 64 | div.sphinxsidebar ul.want-points { 65 | margin-left: 20px; 66 | list-style: square; 67 | } 68 | 69 | div.sphinxsidebar ul ul { 70 | margin-top: 0; 71 | margin-bottom: 0; 72 | } 73 | 74 | div.sphinxsidebar form { 75 | margin-top: 10px; 76 | } 77 | 78 | div.sphinxsidebar input { 79 | border: 1px solid #98dbcc; 80 | font-family: sans-serif; 81 | font-size: 1em; 82 | } 83 | 84 | div.sphinxsidebar #searchbox input[type="text"] { 85 | width: 170px; 86 | } 87 | 88 | img { 89 | border: 0; 90 | max-width: 100%; 91 | } 92 | 93 | /* -- search page ----------------------------------------------------------- */ 94 | 95 | ul.search { 96 | margin: 10px 0 0 20px; 97 | padding: 0; 98 | } 99 | 100 | ul.search li { 101 | padding: 5px 0 5px 20px; 102 | background-image: url(file.png); 103 | background-repeat: no-repeat; 104 | background-position: 0 7px; 105 | } 106 | 107 | ul.search li a { 108 | font-weight: bold; 109 | } 110 | 111 | ul.search li div.context { 112 | color: #888; 113 | margin: 2px 0 0 30px; 114 | text-align: left; 115 | } 116 | 117 | ul.keywordmatches li.goodmatch a { 118 | font-weight: bold; 119 | } 120 | 121 | /* -- index page ------------------------------------------------------------ */ 122 | 123 | table.contentstable { 124 | width: 90%; 125 | margin-left: auto; 126 | margin-right: auto; 127 | } 128 | 129 | table.contentstable p.biglink { 130 | line-height: 150%; 131 | } 132 | 133 | a.biglink { 134 | font-size: 1.3em; 135 | } 136 | 137 | span.linkdescr { 138 | font-style: italic; 139 | padding-top: 5px; 140 | font-size: 90%; 141 | } 142 | 143 | /* -- general index --------------------------------------------------------- */ 144 | 145 | table.indextable { 146 | width: 100%; 147 | } 148 | 149 | table.indextable td { 150 | text-align: left; 151 | vertical-align: top; 152 | } 153 | 154 | table.indextable ul { 155 | margin-top: 0; 156 | margin-bottom: 0; 157 | list-style-type: none; 158 | } 159 | 160 | table.indextable > tbody > tr > td > ul { 161 | padding-left: 0em; 162 | } 163 | 164 | table.indextable tr.pcap { 165 | height: 10px; 166 | } 167 | 168 | table.indextable tr.cap { 169 | margin-top: 10px; 170 | background-color: #f2f2f2; 171 | } 172 | 173 | img.toggler { 174 | margin-right: 3px; 175 | margin-top: 3px; 176 | cursor: pointer; 177 | } 178 | 179 | div.modindex-jumpbox { 180 | border-top: 1px solid #ddd; 181 | border-bottom: 1px solid #ddd; 182 | margin: 1em 0 1em 0; 183 | padding: 0.4em; 184 | } 185 | 186 | div.genindex-jumpbox { 187 | border-top: 1px solid #ddd; 188 | border-bottom: 1px solid #ddd; 189 | margin: 1em 0 1em 0; 190 | padding: 0.4em; 191 | } 192 | 193 | /* -- domain module index --------------------------------------------------- */ 194 | 195 | table.modindextable td { 196 | padding: 2px; 197 | border-collapse: collapse; 198 | } 199 | 200 | /* -- general body styles --------------------------------------------------- */ 201 | 202 | div.body p, div.body dd, div.body li, div.body blockquote { 203 | -moz-hyphens: auto; 204 | -ms-hyphens: auto; 205 | -webkit-hyphens: auto; 206 | hyphens: auto; 207 | } 208 | 209 | a.headerlink { 210 | visibility: hidden; 211 | } 212 | 213 | h1:hover > a.headerlink, 214 | h2:hover > a.headerlink, 215 | h3:hover > a.headerlink, 216 | h4:hover > a.headerlink, 217 | h5:hover > a.headerlink, 218 | h6:hover > a.headerlink, 219 | dt:hover > a.headerlink, 220 | caption:hover > a.headerlink, 221 | p.caption:hover > a.headerlink, 222 | div.code-block-caption:hover > a.headerlink { 223 | visibility: visible; 224 | } 225 | 226 | div.body p.caption { 227 | text-align: inherit; 228 | } 229 | 230 | div.body td { 231 | text-align: left; 232 | } 233 | 234 | .first { 235 | margin-top: 0 !important; 236 | } 237 | 238 | p.rubric { 239 | margin-top: 30px; 240 | font-weight: bold; 241 | } 242 | 243 | img.align-left, .figure.align-left, object.align-left { 244 | clear: left; 245 | float: left; 246 | margin-right: 1em; 247 | } 248 | 249 | img.align-right, .figure.align-right, object.align-right { 250 | clear: right; 251 | float: right; 252 | margin-left: 1em; 253 | } 254 | 255 | img.align-center, .figure.align-center, object.align-center { 256 | display: block; 257 | margin-left: auto; 258 | margin-right: auto; 259 | } 260 | 261 | .align-left { 262 | text-align: left; 263 | } 264 | 265 | .align-center { 266 | text-align: center; 267 | } 268 | 269 | .align-right { 270 | text-align: right; 271 | } 272 | 273 | /* -- sidebars -------------------------------------------------------------- */ 274 | 275 | div.sidebar { 276 | margin: 0 0 0.5em 1em; 277 | border: 1px solid #ddb; 278 | padding: 7px 7px 0 7px; 279 | background-color: #ffe; 280 | width: 40%; 281 | float: right; 282 | } 283 | 284 | p.sidebar-title { 285 | font-weight: bold; 286 | } 287 | 288 | /* -- topics ---------------------------------------------------------------- */ 289 | 290 | div.topic { 291 | border: 1px solid #ccc; 292 | padding: 7px 7px 0 7px; 293 | margin: 10px 0 10px 0; 294 | } 295 | 296 | p.topic-title { 297 | font-size: 1.1em; 298 | font-weight: bold; 299 | margin-top: 10px; 300 | } 301 | 302 | /* -- admonitions ----------------------------------------------------------- */ 303 | 304 | div.admonition { 305 | margin-top: 10px; 306 | margin-bottom: 10px; 307 | padding: 7px; 308 | } 309 | 310 | div.admonition dt { 311 | font-weight: bold; 312 | } 313 | 314 | div.admonition dl { 315 | margin-bottom: 0; 316 | } 317 | 318 | p.admonition-title { 319 | margin: 0px 10px 5px 0px; 320 | font-weight: bold; 321 | } 322 | 323 | div.body p.centered { 324 | text-align: center; 325 | margin-top: 25px; 326 | } 327 | 328 | /* -- tables ---------------------------------------------------------------- */ 329 | 330 | table.docutils { 331 | border: 0; 332 | border-collapse: collapse; 333 | } 334 | 335 | table caption span.caption-number { 336 | font-style: italic; 337 | } 338 | 339 | table caption span.caption-text { 340 | } 341 | 342 | table.docutils td, table.docutils th { 343 | padding: 1px 8px 1px 5px; 344 | border-top: 0; 345 | border-left: 0; 346 | border-right: 0; 347 | border-bottom: 1px solid #aaa; 348 | } 349 | 350 | table.footnote td, table.footnote th { 351 | border: 0 !important; 352 | } 353 | 354 | th { 355 | text-align: left; 356 | padding-right: 5px; 357 | } 358 | 359 | table.citation { 360 | border-left: solid 1px gray; 361 | margin-left: 1px; 362 | } 363 | 364 | table.citation td { 365 | border-bottom: none; 366 | } 367 | 368 | /* -- figures --------------------------------------------------------------- */ 369 | 370 | div.figure { 371 | margin: 0.5em; 372 | padding: 0.5em; 373 | } 374 | 375 | div.figure p.caption { 376 | padding: 0.3em; 377 | } 378 | 379 | div.figure p.caption span.caption-number { 380 | font-style: italic; 381 | } 382 | 383 | div.figure p.caption span.caption-text { 384 | } 385 | 386 | /* -- field list styles ----------------------------------------------------- */ 387 | 388 | table.field-list td, table.field-list th { 389 | border: 0 !important; 390 | } 391 | 392 | .field-list ul { 393 | margin: 0; 394 | padding-left: 1em; 395 | } 396 | 397 | .field-list p { 398 | margin: 0; 399 | } 400 | 401 | /* -- other body styles ----------------------------------------------------- */ 402 | 403 | ol.arabic { 404 | list-style: decimal; 405 | } 406 | 407 | ol.loweralpha { 408 | list-style: lower-alpha; 409 | } 410 | 411 | ol.upperalpha { 412 | list-style: upper-alpha; 413 | } 414 | 415 | ol.lowerroman { 416 | list-style: lower-roman; 417 | } 418 | 419 | ol.upperroman { 420 | list-style: upper-roman; 421 | } 422 | 423 | dl { 424 | margin-bottom: 15px; 425 | } 426 | 427 | dd p { 428 | margin-top: 0px; 429 | } 430 | 431 | dd ul, dd table { 432 | margin-bottom: 10px; 433 | } 434 | 435 | dd { 436 | margin-top: 3px; 437 | margin-bottom: 10px; 438 | margin-left: 30px; 439 | } 440 | 441 | dt:target, .highlighted { 442 | background-color: #fbe54e; 443 | } 444 | 445 | dl.glossary dt { 446 | font-weight: bold; 447 | font-size: 1.1em; 448 | } 449 | 450 | .optional { 451 | font-size: 1.3em; 452 | } 453 | 454 | .sig-paren { 455 | font-size: larger; 456 | } 457 | 458 | .versionmodified { 459 | font-style: italic; 460 | } 461 | 462 | .system-message { 463 | background-color: #fda; 464 | padding: 5px; 465 | border: 3px solid red; 466 | } 467 | 468 | .footnote:target { 469 | background-color: #ffa; 470 | } 471 | 472 | .line-block { 473 | display: block; 474 | margin-top: 1em; 475 | margin-bottom: 1em; 476 | } 477 | 478 | .line-block .line-block { 479 | margin-top: 0; 480 | margin-bottom: 0; 481 | margin-left: 1.5em; 482 | } 483 | 484 | .guilabel, .menuselection { 485 | font-family: sans-serif; 486 | } 487 | 488 | .accelerator { 489 | text-decoration: underline; 490 | } 491 | 492 | .classifier { 493 | font-style: oblique; 494 | } 495 | 496 | abbr, acronym { 497 | border-bottom: dotted 1px; 498 | cursor: help; 499 | } 500 | 501 | /* -- code displays --------------------------------------------------------- */ 502 | 503 | pre { 504 | overflow: auto; 505 | overflow-y: hidden; /* fixes display issues on Chrome browsers */ 506 | } 507 | 508 | span.pre { 509 | -moz-hyphens: none; 510 | -ms-hyphens: none; 511 | -webkit-hyphens: none; 512 | hyphens: none; 513 | } 514 | 515 | td.linenos pre { 516 | padding: 5px 0px; 517 | border: 0; 518 | background-color: transparent; 519 | color: #aaa; 520 | } 521 | 522 | table.highlighttable { 523 | margin-left: 0.5em; 524 | } 525 | 526 | table.highlighttable td { 527 | padding: 0 0.5em 0 0.5em; 528 | } 529 | 530 | div.code-block-caption { 531 | padding: 2px 5px; 532 | font-size: small; 533 | } 534 | 535 | div.code-block-caption code { 536 | background-color: transparent; 537 | } 538 | 539 | div.code-block-caption + div > div.highlight > pre { 540 | margin-top: 0; 541 | } 542 | 543 | div.code-block-caption span.caption-number { 544 | padding: 0.1em 0.3em; 545 | font-style: italic; 546 | } 547 | 548 | div.code-block-caption span.caption-text { 549 | } 550 | 551 | div.literal-block-wrapper { 552 | padding: 1em 1em 0; 553 | } 554 | 555 | div.literal-block-wrapper div.highlight { 556 | margin: 0; 557 | } 558 | 559 | code.descname { 560 | background-color: transparent; 561 | font-weight: bold; 562 | font-size: 1.2em; 563 | } 564 | 565 | code.descclassname { 566 | background-color: transparent; 567 | } 568 | 569 | code.xref, a code { 570 | background-color: transparent; 571 | font-weight: bold; 572 | } 573 | 574 | h1 code, h2 code, h3 code, h4 code, h5 code, h6 code { 575 | background-color: transparent; 576 | } 577 | 578 | .viewcode-link { 579 | float: right; 580 | } 581 | 582 | .viewcode-back { 583 | float: right; 584 | font-family: sans-serif; 585 | } 586 | 587 | div.viewcode-block:target { 588 | margin: -1px -10px; 589 | padding: 0 10px; 590 | } 591 | 592 | /* -- math display ---------------------------------------------------------- */ 593 | 594 | img.math { 595 | vertical-align: middle; 596 | } 597 | 598 | div.body div.math p { 599 | text-align: center; 600 | } 601 | 602 | span.eqno { 603 | float: right; 604 | } 605 | 606 | span.eqno a.headerlink { 607 | position: relative; 608 | left: 0px; 609 | z-index: 1; 610 | } 611 | 612 | div.math:hover a.headerlink { 613 | visibility: visible; 614 | } 615 | 616 | /* -- printout stylesheet --------------------------------------------------- */ 617 | 618 | @media print { 619 | div.document, 620 | div.documentwrapper, 621 | div.bodywrapper { 622 | margin: 0 !important; 623 | width: 100%; 624 | } 625 | 626 | div.sphinxsidebar, 627 | div.related, 628 | div.footer, 629 | #top-link { 630 | display: none; 631 | } 632 | } -------------------------------------------------------------------------------- /docs/build/html/_static/comment-bright.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/comment-bright.png -------------------------------------------------------------------------------- /docs/build/html/_static/comment-close.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/comment-close.png -------------------------------------------------------------------------------- /docs/build/html/_static/comment.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/comment.png -------------------------------------------------------------------------------- /docs/build/html/_static/css/badge_only.css: -------------------------------------------------------------------------------- 1 | .fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../fonts/fontawesome-webfont.eot");src:url("../fonts/fontawesome-webfont.eot?#iefix") format("embedded-opentype"),url("../fonts/fontawesome-webfont.woff") format("woff"),url("../fonts/fontawesome-webfont.ttf") format("truetype"),url("../fonts/fontawesome-webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-book:before{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up{height:auto;max-height:100%}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}} 2 | -------------------------------------------------------------------------------- /docs/build/html/_static/doctools.js: -------------------------------------------------------------------------------- 1 | /* 2 | * doctools.js 3 | * ~~~~~~~~~~~ 4 | * 5 | * Sphinx JavaScript utilities for all documentation. 6 | * 7 | * :copyright: Copyright 2007-2017 by the Sphinx team, see AUTHORS. 8 | * :license: BSD, see LICENSE for details. 9 | * 10 | */ 11 | 12 | /** 13 | * select a different prefix for underscore 14 | */ 15 | $u = _.noConflict(); 16 | 17 | /** 18 | * make the code below compatible with browsers without 19 | * an installed firebug like debugger 20 | if (!window.console || !console.firebug) { 21 | var names = ["log", "debug", "info", "warn", "error", "assert", "dir", 22 | "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace", 23 | "profile", "profileEnd"]; 24 | window.console = {}; 25 | for (var i = 0; i < names.length; ++i) 26 | window.console[names[i]] = function() {}; 27 | } 28 | */ 29 | 30 | /** 31 | * small helper function to urldecode strings 32 | */ 33 | jQuery.urldecode = function(x) { 34 | return decodeURIComponent(x).replace(/\+/g, ' '); 35 | }; 36 | 37 | /** 38 | * small helper function to urlencode strings 39 | */ 40 | jQuery.urlencode = encodeURIComponent; 41 | 42 | /** 43 | * This function returns the parsed url parameters of the 44 | * current request. Multiple values per key are supported, 45 | * it will always return arrays of strings for the value parts. 46 | */ 47 | jQuery.getQueryParameters = function(s) { 48 | if (typeof s == 'undefined') 49 | s = document.location.search; 50 | var parts = s.substr(s.indexOf('?') + 1).split('&'); 51 | var result = {}; 52 | for (var i = 0; i < parts.length; i++) { 53 | var tmp = parts[i].split('=', 2); 54 | var key = jQuery.urldecode(tmp[0]); 55 | var value = jQuery.urldecode(tmp[1]); 56 | if (key in result) 57 | result[key].push(value); 58 | else 59 | result[key] = [value]; 60 | } 61 | return result; 62 | }; 63 | 64 | /** 65 | * highlight a given string on a jquery object by wrapping it in 66 | * span elements with the given class name. 67 | */ 68 | jQuery.fn.highlightText = function(text, className) { 69 | function highlight(node) { 70 | if (node.nodeType == 3) { 71 | var val = node.nodeValue; 72 | var pos = val.toLowerCase().indexOf(text); 73 | if (pos >= 0 && !jQuery(node.parentNode).hasClass(className)) { 74 | var span = document.createElement("span"); 75 | span.className = className; 76 | span.appendChild(document.createTextNode(val.substr(pos, text.length))); 77 | node.parentNode.insertBefore(span, node.parentNode.insertBefore( 78 | document.createTextNode(val.substr(pos + text.length)), 79 | node.nextSibling)); 80 | node.nodeValue = val.substr(0, pos); 81 | } 82 | } 83 | else if (!jQuery(node).is("button, select, textarea")) { 84 | jQuery.each(node.childNodes, function() { 85 | highlight(this); 86 | }); 87 | } 88 | } 89 | return this.each(function() { 90 | highlight(this); 91 | }); 92 | }; 93 | 94 | /* 95 | * backward compatibility for jQuery.browser 96 | * This will be supported until firefox bug is fixed. 97 | */ 98 | if (!jQuery.browser) { 99 | jQuery.uaMatch = function(ua) { 100 | ua = ua.toLowerCase(); 101 | 102 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) || 103 | /(webkit)[ \/]([\w.]+)/.exec(ua) || 104 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) || 105 | /(msie) ([\w.]+)/.exec(ua) || 106 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) || 107 | []; 108 | 109 | return { 110 | browser: match[ 1 ] || "", 111 | version: match[ 2 ] || "0" 112 | }; 113 | }; 114 | jQuery.browser = {}; 115 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true; 116 | } 117 | 118 | /** 119 | * Small JavaScript module for the documentation. 120 | */ 121 | var Documentation = { 122 | 123 | init : function() { 124 | this.fixFirefoxAnchorBug(); 125 | this.highlightSearchWords(); 126 | this.initIndexTable(); 127 | 128 | }, 129 | 130 | /** 131 | * i18n support 132 | */ 133 | TRANSLATIONS : {}, 134 | PLURAL_EXPR : function(n) { return n == 1 ? 0 : 1; }, 135 | LOCALE : 'unknown', 136 | 137 | // gettext and ngettext don't access this so that the functions 138 | // can safely bound to a different name (_ = Documentation.gettext) 139 | gettext : function(string) { 140 | var translated = Documentation.TRANSLATIONS[string]; 141 | if (typeof translated == 'undefined') 142 | return string; 143 | return (typeof translated == 'string') ? translated : translated[0]; 144 | }, 145 | 146 | ngettext : function(singular, plural, n) { 147 | var translated = Documentation.TRANSLATIONS[singular]; 148 | if (typeof translated == 'undefined') 149 | return (n == 1) ? singular : plural; 150 | return translated[Documentation.PLURALEXPR(n)]; 151 | }, 152 | 153 | addTranslations : function(catalog) { 154 | for (var key in catalog.messages) 155 | this.TRANSLATIONS[key] = catalog.messages[key]; 156 | this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')'); 157 | this.LOCALE = catalog.locale; 158 | }, 159 | 160 | /** 161 | * add context elements like header anchor links 162 | */ 163 | addContextElements : function() { 164 | $('div[id] > :header:first').each(function() { 165 | $('\u00B6'). 166 | attr('href', '#' + this.id). 167 | attr('title', _('Permalink to this headline')). 168 | appendTo(this); 169 | }); 170 | $('dt[id]').each(function() { 171 | $('\u00B6'). 172 | attr('href', '#' + this.id). 173 | attr('title', _('Permalink to this definition')). 174 | appendTo(this); 175 | }); 176 | }, 177 | 178 | /** 179 | * workaround a firefox stupidity 180 | * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075 181 | */ 182 | fixFirefoxAnchorBug : function() { 183 | if (document.location.hash) 184 | window.setTimeout(function() { 185 | document.location.href += ''; 186 | }, 10); 187 | }, 188 | 189 | /** 190 | * highlight the search words provided in the url in the text 191 | */ 192 | highlightSearchWords : function() { 193 | var params = $.getQueryParameters(); 194 | var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : []; 195 | if (terms.length) { 196 | var body = $('div.body'); 197 | if (!body.length) { 198 | body = $('body'); 199 | } 200 | window.setTimeout(function() { 201 | $.each(terms, function() { 202 | body.highlightText(this.toLowerCase(), 'highlighted'); 203 | }); 204 | }, 10); 205 | $('') 207 | .appendTo($('#searchbox')); 208 | } 209 | }, 210 | 211 | /** 212 | * init the domain index toggle buttons 213 | */ 214 | initIndexTable : function() { 215 | var togglers = $('img.toggler').click(function() { 216 | var src = $(this).attr('src'); 217 | var idnum = $(this).attr('id').substr(7); 218 | $('tr.cg-' + idnum).toggle(); 219 | if (src.substr(-9) == 'minus.png') 220 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png'); 221 | else 222 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png'); 223 | }).css('display', ''); 224 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) { 225 | togglers.click(); 226 | } 227 | }, 228 | 229 | /** 230 | * helper function to hide the search marks again 231 | */ 232 | hideSearchWords : function() { 233 | $('#searchbox .highlight-link').fadeOut(300); 234 | $('span.highlighted').removeClass('highlighted'); 235 | }, 236 | 237 | /** 238 | * make the url absolute 239 | */ 240 | makeURL : function(relativeURL) { 241 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL; 242 | }, 243 | 244 | /** 245 | * get the current relative url 246 | */ 247 | getCurrentURL : function() { 248 | var path = document.location.pathname; 249 | var parts = path.split(/\//); 250 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() { 251 | if (this == '..') 252 | parts.pop(); 253 | }); 254 | var url = parts.join('/'); 255 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1); 256 | }, 257 | 258 | initOnKeyListeners: function() { 259 | $(document).keyup(function(event) { 260 | var activeElementType = document.activeElement.tagName; 261 | // don't navigate when in search box or textarea 262 | if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT') { 263 | switch (event.keyCode) { 264 | case 37: // left 265 | var prevHref = $('link[rel="prev"]').prop('href'); 266 | if (prevHref) { 267 | window.location.href = prevHref; 268 | return false; 269 | } 270 | case 39: // right 271 | var nextHref = $('link[rel="next"]').prop('href'); 272 | if (nextHref) { 273 | window.location.href = nextHref; 274 | return false; 275 | } 276 | } 277 | } 278 | }); 279 | } 280 | }; 281 | 282 | // quick alias for translations 283 | _ = Documentation.gettext; 284 | 285 | $(document).ready(function() { 286 | Documentation.init(); 287 | }); -------------------------------------------------------------------------------- /docs/build/html/_static/down-pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/down-pressed.png -------------------------------------------------------------------------------- /docs/build/html/_static/down.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/down.png -------------------------------------------------------------------------------- /docs/build/html/_static/file.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/file.png -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bold.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-bold.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-bold.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-bold.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-bold.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bolditalic.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-bolditalic.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bolditalic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-bolditalic.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bolditalic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-bolditalic.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-bolditalic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-bolditalic.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-italic.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-italic.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-italic.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-italic.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-italic.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-italic.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-italic.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-italic.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-regular.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-regular.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-regular.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/Lato/lato-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/Lato/lato-regular.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/fontawesome-webfont.eot: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/fontawesome-webfont.eot -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/fontawesome-webfont.ttf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/fontawesome-webfont.ttf -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/fontawesome-webfont.woff: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/fontawesome-webfont.woff -------------------------------------------------------------------------------- /docs/build/html/_static/fonts/fontawesome-webfont.woff2: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/fonts/fontawesome-webfont.woff2 -------------------------------------------------------------------------------- /docs/build/html/_static/js/modernizr.min.js: -------------------------------------------------------------------------------- 1 | /* Modernizr 2.6.2 (Custom Build) | MIT & BSD 2 | * Build: http://modernizr.com/download/#-fontface-backgroundsize-borderimage-borderradius-boxshadow-flexbox-hsla-multiplebgs-opacity-rgba-textshadow-cssanimations-csscolumns-generatedcontent-cssgradients-cssreflections-csstransforms-csstransforms3d-csstransitions-applicationcache-canvas-canvastext-draganddrop-hashchange-history-audio-video-indexeddb-input-inputtypes-localstorage-postmessage-sessionstorage-websockets-websqldatabase-webworkers-geolocation-inlinesvg-smil-svg-svgclippaths-touch-webgl-shiv-mq-cssclasses-addtest-prefixed-teststyles-testprop-testallprops-hasevent-prefixes-domprefixes-load 3 | */ 4 | ;window.Modernizr=function(a,b,c){function D(a){j.cssText=a}function E(a,b){return D(n.join(a+";")+(b||""))}function F(a,b){return typeof a===b}function G(a,b){return!!~(""+a).indexOf(b)}function H(a,b){for(var d in a){var e=a[d];if(!G(e,"-")&&j[e]!==c)return b=="pfx"?e:!0}return!1}function I(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:F(f,"function")?f.bind(d||b):f}return!1}function J(a,b,c){var d=a.charAt(0).toUpperCase()+a.slice(1),e=(a+" "+p.join(d+" ")+d).split(" ");return F(b,"string")||F(b,"undefined")?H(e,b):(e=(a+" "+q.join(d+" ")+d).split(" "),I(e,b,c))}function K(){e.input=function(c){for(var d=0,e=c.length;d',a,""].join(""),l.id=h,(m?l:n).innerHTML+=f,n.appendChild(l),m||(n.style.background="",n.style.overflow="hidden",k=g.style.overflow,g.style.overflow="hidden",g.appendChild(n)),i=c(l,a),m?l.parentNode.removeChild(l):(n.parentNode.removeChild(n),g.style.overflow=k),!!i},z=function(b){var c=a.matchMedia||a.msMatchMedia;if(c)return c(b).matches;var d;return y("@media "+b+" { #"+h+" { position: absolute; } }",function(b){d=(a.getComputedStyle?getComputedStyle(b,null):b.currentStyle)["position"]=="absolute"}),d},A=function(){function d(d,e){e=e||b.createElement(a[d]||"div"),d="on"+d;var f=d in e;return f||(e.setAttribute||(e=b.createElement("div")),e.setAttribute&&e.removeAttribute&&(e.setAttribute(d,""),f=F(e[d],"function"),F(e[d],"undefined")||(e[d]=c),e.removeAttribute(d))),e=null,f}var a={select:"input",change:"input",submit:"form",reset:"form",error:"img",load:"img",abort:"img"};return d}(),B={}.hasOwnProperty,C;!F(B,"undefined")&&!F(B.call,"undefined")?C=function(a,b){return B.call(a,b)}:C=function(a,b){return b in a&&F(a.constructor.prototype[b],"undefined")},Function.prototype.bind||(Function.prototype.bind=function(b){var c=this;if(typeof c!="function")throw new TypeError;var d=w.call(arguments,1),e=function(){if(this instanceof e){var a=function(){};a.prototype=c.prototype;var f=new a,g=c.apply(f,d.concat(w.call(arguments)));return Object(g)===g?g:f}return c.apply(b,d.concat(w.call(arguments)))};return e}),s.flexbox=function(){return J("flexWrap")},s.canvas=function(){var a=b.createElement("canvas");return!!a.getContext&&!!a.getContext("2d")},s.canvastext=function(){return!!e.canvas&&!!F(b.createElement("canvas").getContext("2d").fillText,"function")},s.webgl=function(){return!!a.WebGLRenderingContext},s.touch=function(){var c;return"ontouchstart"in a||a.DocumentTouch&&b instanceof DocumentTouch?c=!0:y(["@media (",n.join("touch-enabled),("),h,")","{#modernizr{top:9px;position:absolute}}"].join(""),function(a){c=a.offsetTop===9}),c},s.geolocation=function(){return"geolocation"in navigator},s.postmessage=function(){return!!a.postMessage},s.websqldatabase=function(){return!!a.openDatabase},s.indexedDB=function(){return!!J("indexedDB",a)},s.hashchange=function(){return A("hashchange",a)&&(b.documentMode===c||b.documentMode>7)},s.history=function(){return!!a.history&&!!history.pushState},s.draganddrop=function(){var a=b.createElement("div");return"draggable"in a||"ondragstart"in a&&"ondrop"in a},s.websockets=function(){return"WebSocket"in a||"MozWebSocket"in a},s.rgba=function(){return D("background-color:rgba(150,255,150,.5)"),G(j.backgroundColor,"rgba")},s.hsla=function(){return D("background-color:hsla(120,40%,100%,.5)"),G(j.backgroundColor,"rgba")||G(j.backgroundColor,"hsla")},s.multiplebgs=function(){return D("background:url(https://),url(https://),red url(https://)"),/(url\s*\(.*?){3}/.test(j.background)},s.backgroundsize=function(){return J("backgroundSize")},s.borderimage=function(){return J("borderImage")},s.borderradius=function(){return J("borderRadius")},s.boxshadow=function(){return J("boxShadow")},s.textshadow=function(){return b.createElement("div").style.textShadow===""},s.opacity=function(){return E("opacity:.55"),/^0.55$/.test(j.opacity)},s.cssanimations=function(){return J("animationName")},s.csscolumns=function(){return J("columnCount")},s.cssgradients=function(){var a="background-image:",b="gradient(linear,left top,right bottom,from(#9f9),to(white));",c="linear-gradient(left top,#9f9, white);";return D((a+"-webkit- ".split(" ").join(b+a)+n.join(c+a)).slice(0,-a.length)),G(j.backgroundImage,"gradient")},s.cssreflections=function(){return J("boxReflect")},s.csstransforms=function(){return!!J("transform")},s.csstransforms3d=function(){var a=!!J("perspective");return a&&"webkitPerspective"in g.style&&y("@media (transform-3d),(-webkit-transform-3d){#modernizr{left:9px;position:absolute;height:3px;}}",function(b,c){a=b.offsetLeft===9&&b.offsetHeight===3}),a},s.csstransitions=function(){return J("transition")},s.fontface=function(){var a;return y('@font-face {font-family:"font";src:url("https://")}',function(c,d){var e=b.getElementById("smodernizr"),f=e.sheet||e.styleSheet,g=f?f.cssRules&&f.cssRules[0]?f.cssRules[0].cssText:f.cssText||"":"";a=/src/i.test(g)&&g.indexOf(d.split(" ")[0])===0}),a},s.generatedcontent=function(){var a;return y(["#",h,"{font:0/0 a}#",h,':after{content:"',l,'";visibility:hidden;font:3px/1 a}'].join(""),function(b){a=b.offsetHeight>=3}),a},s.video=function(){var a=b.createElement("video"),c=!1;try{if(c=!!a.canPlayType)c=new Boolean(c),c.ogg=a.canPlayType('video/ogg; codecs="theora"').replace(/^no$/,""),c.h264=a.canPlayType('video/mp4; codecs="avc1.42E01E"').replace(/^no$/,""),c.webm=a.canPlayType('video/webm; codecs="vp8, vorbis"').replace(/^no$/,"")}catch(d){}return c},s.audio=function(){var a=b.createElement("audio"),c=!1;try{if(c=!!a.canPlayType)c=new Boolean(c),c.ogg=a.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,""),c.mp3=a.canPlayType("audio/mpeg;").replace(/^no$/,""),c.wav=a.canPlayType('audio/wav; codecs="1"').replace(/^no$/,""),c.m4a=(a.canPlayType("audio/x-m4a;")||a.canPlayType("audio/aac;")).replace(/^no$/,"")}catch(d){}return c},s.localstorage=function(){try{return localStorage.setItem(h,h),localStorage.removeItem(h),!0}catch(a){return!1}},s.sessionstorage=function(){try{return sessionStorage.setItem(h,h),sessionStorage.removeItem(h),!0}catch(a){return!1}},s.webworkers=function(){return!!a.Worker},s.applicationcache=function(){return!!a.applicationCache},s.svg=function(){return!!b.createElementNS&&!!b.createElementNS(r.svg,"svg").createSVGRect},s.inlinesvg=function(){var a=b.createElement("div");return a.innerHTML="",(a.firstChild&&a.firstChild.namespaceURI)==r.svg},s.smil=function(){return!!b.createElementNS&&/SVGAnimate/.test(m.call(b.createElementNS(r.svg,"animate")))},s.svgclippaths=function(){return!!b.createElementNS&&/SVGClipPath/.test(m.call(b.createElementNS(r.svg,"clipPath")))};for(var L in s)C(s,L)&&(x=L.toLowerCase(),e[x]=s[L](),v.push((e[x]?"":"no-")+x));return e.input||K(),e.addTest=function(a,b){if(typeof a=="object")for(var d in a)C(a,d)&&e.addTest(d,a[d]);else{a=a.toLowerCase();if(e[a]!==c)return e;b=typeof b=="function"?b():b,typeof f!="undefined"&&f&&(g.className+=" "+(b?"":"no-")+a),e[a]=b}return e},D(""),i=k=null,function(a,b){function k(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function l(){var a=r.elements;return typeof a=="string"?a.split(" "):a}function m(a){var b=i[a[g]];return b||(b={},h++,a[g]=h,i[h]=b),b}function n(a,c,f){c||(c=b);if(j)return c.createElement(a);f||(f=m(c));var g;return f.cache[a]?g=f.cache[a].cloneNode():e.test(a)?g=(f.cache[a]=f.createElem(a)).cloneNode():g=f.createElem(a),g.canHaveChildren&&!d.test(a)?f.frag.appendChild(g):g}function o(a,c){a||(a=b);if(j)return a.createDocumentFragment();c=c||m(a);var d=c.frag.cloneNode(),e=0,f=l(),g=f.length;for(;e",f="hidden"in a,j=a.childNodes.length==1||function(){b.createElement("a");var a=b.createDocumentFragment();return typeof a.cloneNode=="undefined"||typeof a.createDocumentFragment=="undefined"||typeof a.createElement=="undefined"}()}catch(c){f=!0,j=!0}})();var r={elements:c.elements||"abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup mark meter nav output progress section summary time video",shivCSS:c.shivCSS!==!1,supportsUnknownElements:j,shivMethods:c.shivMethods!==!1,type:"default",shivDocument:q,createElement:n,createDocumentFragment:o};a.html5=r,q(b)}(this,b),e._version=d,e._prefixes=n,e._domPrefixes=q,e._cssomPrefixes=p,e.mq=z,e.hasEvent=A,e.testProp=function(a){return H([a])},e.testAllProps=J,e.testStyles=y,e.prefixed=function(a,b,c){return b?J(a,b,c):J(a,"pfx")},g.className=g.className.replace(/(^|\s)no-js(\s|$)/,"$1$2")+(f?" js "+v.join(" "):""),e}(this,this.document),function(a,b,c){function d(a){return"[object Function]"==o.call(a)}function e(a){return"string"==typeof a}function f(){}function g(a){return!a||"loaded"==a||"complete"==a||"uninitialized"==a}function h(){var a=p.shift();q=1,a?a.t?m(function(){("c"==a.t?B.injectCss:B.injectJs)(a.s,0,a.a,a.x,a.e,1)},0):(a(),h()):q=0}function i(a,c,d,e,f,i,j){function k(b){if(!o&&g(l.readyState)&&(u.r=o=1,!q&&h(),l.onload=l.onreadystatechange=null,b)){"img"!=a&&m(function(){t.removeChild(l)},50);for(var d in y[c])y[c].hasOwnProperty(d)&&y[c][d].onload()}}var j=j||B.errorTimeout,l=b.createElement(a),o=0,r=0,u={t:d,s:c,e:f,a:i,x:j};1===y[c]&&(r=1,y[c]=[]),"object"==a?l.data=c:(l.src=c,l.type=a),l.width=l.height="0",l.onerror=l.onload=l.onreadystatechange=function(){k.call(this,r)},p.splice(e,0,u),"img"!=a&&(r||2===y[c]?(t.insertBefore(l,s?null:n),m(k,j)):y[c].push(l))}function j(a,b,c,d,f){return q=0,b=b||"j",e(a)?i("c"==b?v:u,a,b,this.i++,c,d,f):(p.splice(this.i++,0,a),1==p.length&&h()),this}function k(){var a=B;return a.loader={load:j,i:0},a}var l=b.documentElement,m=a.setTimeout,n=b.getElementsByTagName("script")[0],o={}.toString,p=[],q=0,r="MozAppearance"in l.style,s=r&&!!b.createRange().compareNode,t=s?l:n.parentNode,l=a.opera&&"[object Opera]"==o.call(a.opera),l=!!b.attachEvent&&!l,u=r?"object":l?"script":"img",v=l?"script":u,w=Array.isArray||function(a){return"[object Array]"==o.call(a)},x=[],y={},z={timeout:function(a,b){return b.length&&(a.timeout=b[0]),a}},A,B;B=function(a){function b(a){var a=a.split("!"),b=x.length,c=a.pop(),d=a.length,c={url:c,origUrl:c,prefixes:a},e,f,g;for(f=0;f"),n("table.docutils.footnote").wrap("
"),n("table.docutils.citation").wrap("
"),n(".wy-menu-vertical ul").not(".simple").siblings("a").each(function(){var i=n(this);expand=n(''),expand.on("click",function(n){return e.toggleCurrent(i),n.stopPropagation(),!1}),i.prepend(expand)})},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),i=e.find('[href="'+n+'"]');if(0===i.length){var t=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(i=e.find('[href="#'+t.attr("id")+'"]')).length&&(i=e.find('[href="#"]'))}i.length>0&&($(".wy-menu-vertical .current").removeClass("current"),i.addClass("current"),i.closest("li.toctree-l1").addClass("current"),i.closest("li.toctree-l1").parent().addClass("current"),i.closest("li.toctree-l1").addClass("current"),i.closest("li.toctree-l2").addClass("current"),i.closest("li.toctree-l3").addClass("current"),i.closest("li.toctree-l4").addClass("current"))}catch(o){console.log("Error expanding nav for anchor",o)}},onScroll:function(){this.winScroll=!1;var n=this.win.scrollTop(),e=n+this.winHeight,i=this.navBar.scrollTop()+(n-this.winPosition);n<0||e>this.docHeight||(this.navBar.scrollTop(i),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",function(){this.linkScroll=!1})},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current"),e.siblings().find("li.current").removeClass("current"),e.find("> ul li.current").removeClass("current"),e.toggleClass("current")}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:e.exports.ThemeNav,StickyNav:e.exports.ThemeNav}),function(){for(var n=0,e=["ms","moz","webkit","o"],i=0;i2;a== 12 | null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect= 13 | function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e= 14 | e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck= 15 | function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;bd?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a, 17 | c,d){d||(d=b.identity);for(var e=0,f=a.length;e>1;d(a[g])=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e=0;d--)b=[a[d].apply(this,b)];return b[0]}}; 24 | b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments, 25 | 1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)}; 26 | b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"}; 27 | b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e/g,">").replace(/"/g,""").replace(/'/g,"'").replace(/\//g,"/")};b.mixin=function(a){j(b.functions(a), 28 | function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+ 29 | u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]= 30 | function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain= 31 | true;return this};m.prototype.value=function(){return this._wrapped}}).call(this); 32 | -------------------------------------------------------------------------------- /docs/build/html/_static/up-pressed.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/up-pressed.png -------------------------------------------------------------------------------- /docs/build/html/_static/up.png: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/_static/up.png -------------------------------------------------------------------------------- /docs/build/html/genindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | 12 | Index — phonet 0.3 documentation 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 |
42 | 43 | 44 | 94 | 95 |
96 | 97 | 98 | 104 | 105 | 106 |
107 | 108 |
109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 |
127 | 128 |
    129 | 130 |
  • Docs »
  • 131 | 132 |
  • Index
  • 133 | 134 | 135 |
  • 136 | 137 | 138 | 139 |
  • 140 | 141 |
142 | 143 | 144 |
145 |
146 |
147 |
148 | 149 | 150 |

Index

151 | 152 |
153 | G 154 | | M 155 | | N 156 | | P 157 | 158 |
159 |

G

160 | 161 | 167 | 175 |
176 | 177 |

M

178 | 179 | 183 | 189 |
190 | 191 |

N

192 | 193 | 197 |
198 | 199 |

P

200 | 201 | 205 | 209 |
210 | 211 | 212 | 213 |
214 | 215 |
216 |
217 | 218 | 219 |
220 | 221 |
222 |

223 | © Copyright 2019, Camilo Vasquez. 224 | 225 |

226 |
227 | Built with Sphinx using a theme provided by Read the Docs. 228 | 229 |
230 | 231 |
232 |
233 | 234 |
235 | 236 |
237 | 238 | 239 | 240 | 241 | 242 | 253 | 254 | 255 | 256 | 257 | 258 | 259 | 260 | 261 | 266 | 267 | 268 | -------------------------------------------------------------------------------- /docs/build/html/help.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Need Help? — phonet 0.3 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 | 42 |
43 | 44 | 45 | 95 | 96 |
97 | 98 | 99 | 105 | 106 | 107 |
108 | 109 |
110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 |
128 | 129 |
    130 | 131 |
  • Docs »
  • 132 | 133 |
  • Need Help?
  • 134 | 135 | 136 |
  • 137 | 138 | 139 | View page source 140 | 141 | 142 |
  • 143 | 144 |
145 | 146 | 147 |
148 |
149 |
150 |
151 | 152 |
153 |

Need Help?

154 |

If you have trouble with Phonet, please write to Camilo Vasquez at: juan.vasquez@fau.de

155 |
156 | 157 | 158 |
159 | 160 |
161 |
162 | 163 | 171 | 172 | 173 |
174 | 175 |
176 |

177 | © Copyright 2019, Camilo Vasquez. 178 | 179 |

180 |
181 | Built with Sphinx using a theme provided by Read the Docs. 182 | 183 |
184 | 185 |
186 |
187 | 188 |
189 | 190 |
191 | 192 | 193 | 194 | 195 | 196 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 215 | 220 | 221 | 222 | -------------------------------------------------------------------------------- /docs/build/html/objects.inv: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/html/objects.inv -------------------------------------------------------------------------------- /docs/build/html/py-modindex.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Python Module Index — phonet 0.3 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 36 | 37 | 38 | 39 | 40 | 41 | 42 | 43 | 44 | 45 | 46 | 47 |
48 | 49 | 50 | 100 | 101 |
102 | 103 | 104 | 110 | 111 | 112 |
113 | 114 |
115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 | 127 | 128 | 129 | 130 | 131 | 132 |
133 | 134 |
    135 | 136 |
  • Docs »
  • 137 | 138 |
  • Python Module Index
  • 139 | 140 | 141 |
  • 142 | 143 |
  • 144 | 145 |
146 | 147 | 148 |
149 |
150 |
151 |
152 | 153 | 154 |

Python Module Index

155 | 156 |
157 | p 158 |
159 | 160 | 161 | 162 | 164 | 165 | 166 | 169 |
 
163 | p
167 | phonet 168 |
170 | 171 | 172 |
173 | 174 |
175 |
176 | 177 | 178 |
179 | 180 |
181 |

182 | © Copyright 2019, Camilo Vasquez. 183 | 184 |

185 |
186 | Built with Sphinx using a theme provided by Read the Docs. 187 | 188 |
189 | 190 |
191 |
192 | 193 |
194 | 195 |
196 | 197 | 198 | 199 | 200 | 201 | 212 | 213 | 214 | 215 | 216 | 217 | 218 | 219 | 220 | 225 | 226 | 227 | -------------------------------------------------------------------------------- /docs/build/html/reference.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | References — phonet 0.3 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 | 41 |
42 | 43 | 44 | 94 | 95 |
96 | 97 | 98 | 104 | 105 | 106 |
107 | 108 |
109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 | 126 |
127 | 128 |
    129 | 130 |
  • Docs »
  • 131 | 132 |
  • References
  • 133 | 134 | 135 |
  • 136 | 137 | 138 | View page source 139 | 140 | 141 |
  • 142 | 143 |
144 | 145 | 146 |
147 |
148 |
149 |
150 | 151 |
152 |

References

153 |

If you use Phonet for research purposes, please cite the following paper:

154 |

Vásquez-Correa, J. C., Klumpp, P., Orozco-Arroyave, J. R., & Nöth, E. (2019). Phonet: a Tool Based on Gated Recurrent Neural Networks to Extract Phonological Posteriors from Speech. Proc. Interspeech 2019, 549-553.

155 |

Download paper

156 |
157 | 158 | 159 |
160 | 161 |
162 |
163 | 164 | 170 | 171 | 172 |
173 | 174 |
175 |

176 | © Copyright 2019, Camilo Vasquez. 177 | 178 |

179 |
180 | Built with Sphinx using a theme provided by Read the Docs. 181 | 182 |
183 | 184 |
185 |
186 | 187 |
188 | 189 |
190 | 191 | 192 | 193 | 194 | 195 | 206 | 207 | 208 | 209 | 210 | 211 | 212 | 213 | 214 | 219 | 220 | 221 | -------------------------------------------------------------------------------- /docs/build/html/search.html: -------------------------------------------------------------------------------- 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8 | 9 | 10 | 11 | Search — phonet 0.3 documentation 12 | 13 | 14 | 15 | 16 | 17 | 18 | 19 | 20 | 21 | 22 | 23 | 24 | 25 | 26 | 27 | 28 | 29 | 30 | 31 | 32 | 33 | 34 | 35 | 36 | 37 | 38 | 39 | 40 |
41 | 42 | 43 | 93 | 94 |
95 | 96 | 97 | 103 | 104 | 105 |
106 | 107 |
108 | 109 | 110 | 111 | 112 | 113 | 114 | 115 | 116 | 117 | 118 | 119 | 120 | 121 | 122 | 123 | 124 | 125 |
126 | 127 |
    128 | 129 |
  • Docs »
  • 130 | 131 |
  • Search
  • 132 | 133 | 134 |
  • 135 | 136 | 137 | 138 |
  • 139 | 140 |
141 | 142 | 143 |
144 |
145 |
146 |
147 | 148 | 156 | 157 | 158 |
159 | 160 |
161 | 162 |
163 | 164 |
165 |
166 | 167 | 168 |
169 | 170 |
171 |

172 | © Copyright 2019, Camilo Vasquez. 173 | 174 |

175 |
176 | Built with Sphinx using a theme provided by Read the Docs. 177 | 178 |
179 | 180 |
181 |
182 | 183 |
184 | 185 |
186 | 187 | 188 | 189 | 190 | 191 | 202 | 203 | 204 | 205 | 206 | 207 | 208 | 209 | 210 | 211 | 216 | 219 | 220 | 221 | 222 | 223 | 224 | 225 | -------------------------------------------------------------------------------- /docs/build/html/searchindex.js: -------------------------------------------------------------------------------- 1 | Search.setIndex({docnames:["help","index","reference"],envversion:51,filenames:["help.rst","index.rst","reference.rst"],objects:{"":{phonet:[1,0,0,"-"]},"phonet.Phonet":{get_PLLR:[1,2,1,""],get_feat:[1,2,1,""],get_phon_path:[1,2,1,""],get_phon_wav:[1,2,1,""],get_posteriorgram:[1,2,1,""],mask_correction:[1,2,1,""],model:[1,2,1,""],modelp:[1,2,1,""],number2phoneme:[1,2,1,""]},phonet:{Phonet:[1,1,1,""]}},objnames:{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","method","Python method"]},objtypes:{"0":"py:module","1":"py:class","2":"py:method"},terms:{"16khz":1,"\u027e":1,"\u028e":1,"class":1,"import":1,"n\u00f6th":2,"return":1,"true":1,"v\u00e1squez":2,The:1,abad:1,about:1,accord:1,activ:1,all:1,anterior:1,antioquia:1,architectur:1,arrai:1,arroyav:2,articul:1,astudillo:1,audio:1,audio_fil:1,audio_path:1,author:1,avail:1,avoid:1,back:1,base:2,bgru:1,bidirect:1,booktitl:[],bordel:1,bound:1,camilo:[0,1],cite:2,clone:1,close:1,code:1,com:1,complet:1,comput:1,consist:1,consonant:1,contain:1,continu:1,convert:1,correa:[1,2],correct:1,creat:1,csv:1,datafram:1,deafult:1,dens:1,dental:1,detect:1,diez:1,directori:1,distribut:1,doe:1,download:2,each:1,effect:1,energi:1,engin:1,english:1,erlangen:1,estim:1,evalu:1,exampl:1,exploit:1,extract:[1,2],faculti:1,fals:1,fau:[0,1],feat_fil:1,feat_path:1,featur:1,file:1,file_audio:1,file_feat:1,filterbank:1,flap:1,follow:[1,2],frame:1,from:[1,2],fuent:1,gate:2,get:1,get_feat:1,get_phon_path:1,get_phon_wav:1,get_pllr:1,get_posteriorgram:1,git:1,github:1,group:1,gru:1,have:[0,1],hold:1,http:1,ieee:1,implement:1,includ:1,index:1,inproceed:[],input:1,input_s:1,insid:1,integ:1,interspeech:[1,2],jcvasquezc:1,juan:[0,1],kepler:1,kera:1,khz:1,klumpp:2,lab:1,labial:1,languag:1,later:1,layer:1,learn:1,length:1,letter:1,likelihood:1,list:1,log:1,look:1,make:1,manner:1,mask:1,mask_correct:1,mel:1,mode:1,model:1,modelp:1,modul:1,multitask:1,nasal:1,nativ:1,need:1,network:[1,2],neural:[1,2],non:1,number2phonem:1,number:1,numfram:1,numpi:1,nuremberg:1,object:1,observ:1,obtain:1,open:1,order:1,orozco:2,page:1,panda:1,paper:2,param:[],paramet:1,pataka:1,path:1,pattern:1,paus:1,penagarikano:1,per:1,phon:1,phonclass:1,phonclasses2:1,phone:1,phonem:1,phonet:[0,2],phonolog:[1,2],phonological_class:1,pleas:[0,1,2],pllr:1,plot:1,plot_flag:1,posterior:[1,2],posteriorgram:1,posteriro:1,predict:1,preiction:1,probabl:1,proc:2,process:1,project:1,purpos:2,python:1,rate:1,ratio:1,recogn:1,recognit:1,recurr:2,refer:1,research:2,ribeiro:1,rodriguez:1,row:1,sampl:1,save:1,search:1,see:1,sentenc:1,sentence_nas:1,seq:1,sequenc:1,setup:1,sever:1,should:1,signal:1,sil:1,singl:1,size:1,sourc:1,space:1,speaker:1,speech:2,spoken:1,squez:[],stop:1,store:1,strategi:1,strident:1,string:1,support:1,sure:1,thi:1,threshold:1,time:1,titl:[],tool:2,toolkit:1,trancoso:1,trill:1,troubl:[0,1],tutori:1,unbound:1,univers:1,use:2,used:1,using:1,varona:1,vasquez2019phonet:[],vasquez:[0,1],vector:1,velar:1,vocal:1,voic:1,want:1,wav:1,were:1,what:1,whether:1,which:1,work:1,write:[0,1],year:[],you:[0,1,2]},titles:["Need Help?","Welcome to Phonet’s documentation!","References"],titleterms:{document:1,help:[0,1],indic:1,instal:1,method:1,need:0,phonet:1,refer:2,tabl:1,welcom:1}}) -------------------------------------------------------------------------------- /docs/build/latex/Makefile: -------------------------------------------------------------------------------- 1 | # Makefile for Sphinx LaTeX output 2 | 3 | ALLDOCS = $(basename $(wildcard *.tex)) 4 | ALLPDF = $(addsuffix .pdf,$(ALLDOCS)) 5 | ALLDVI = $(addsuffix .dvi,$(ALLDOCS)) 6 | ALLPS = $(addsuffix .ps,$(ALLDOCS)) 7 | 8 | # Prefix for archive names 9 | ARCHIVEPREFIX = 10 | # Additional LaTeX options 11 | LATEXOPTS = 12 | # format: pdf or dvi 13 | FMT = pdf 14 | 15 | LATEX = latex 16 | PDFLATEX = pdflatex 17 | MAKEINDEX = makeindex 18 | 19 | 20 | all: $(ALLPDF) 21 | all-pdf: $(ALLPDF) 22 | all-dvi: $(ALLDVI) 23 | all-ps: $(ALLPS) 24 | 25 | all-pdf-ja: 26 | for f in *.pdf *.png *.gif *.jpg *.jpeg; do extractbb $$f; done 27 | for f in *.tex; do platex -kanji=utf8 $(LATEXOPTS) $$f; done 28 | for f in *.tex; do platex -kanji=utf8 $(LATEXOPTS) $$f; done 29 | for f in *.tex; do platex -kanji=utf8 $(LATEXOPTS) $$f; done 30 | -for f in *.idx; do mendex -U -f -d "`basename $$f .idx`.dic" -s python.ist $$f; done 31 | for f in *.tex; do platex -kanji=utf8 $(LATEXOPTS) $$f; done 32 | for f in *.tex; do platex -kanji=utf8 $(LATEXOPTS) $$f; done 33 | for f in *.dvi; do dvipdfmx $$f; done 34 | 35 | zip: all-$(FMT) 36 | mkdir $(ARCHIVEPREFIX)docs-$(FMT) 37 | cp $(ALLPDF) $(ARCHIVEPREFIX)docs-$(FMT) 38 | zip -q -r -9 $(ARCHIVEPREFIX)docs-$(FMT).zip $(ARCHIVEPREFIX)docs-$(FMT) 39 | rm -r $(ARCHIVEPREFIX)docs-$(FMT) 40 | 41 | tar: all-$(FMT) 42 | mkdir $(ARCHIVEPREFIX)docs-$(FMT) 43 | cp $(ALLPDF) $(ARCHIVEPREFIX)docs-$(FMT) 44 | tar cf $(ARCHIVEPREFIX)docs-$(FMT).tar $(ARCHIVEPREFIX)docs-$(FMT) 45 | rm -r $(ARCHIVEPREFIX)docs-$(FMT) 46 | 47 | gz: tar 48 | gzip -9 < $(ARCHIVEPREFIX)docs-$(FMT).tar > $(ARCHIVEPREFIX)docs-$(FMT).tar.gz 49 | 50 | bz2: tar 51 | bzip2 -9 -k $(ARCHIVEPREFIX)docs-$(FMT).tar 52 | 53 | xz: tar 54 | xz -9 -k $(ARCHIVEPREFIX)docs-$(FMT).tar 55 | 56 | # The number of LaTeX runs is quite conservative, but I don't expect it 57 | # to get run often, so the little extra time won't hurt. 58 | %.dvi: %.tex 59 | $(LATEX) $(LATEXOPTS) '$<' 60 | $(LATEX) $(LATEXOPTS) '$<' 61 | $(LATEX) $(LATEXOPTS) '$<' 62 | -$(MAKEINDEX) -s python.ist '$(basename $<).idx' 63 | $(LATEX) $(LATEXOPTS) '$<' 64 | $(LATEX) $(LATEXOPTS) '$<' 65 | 66 | %.pdf: %.tex 67 | $(PDFLATEX) $(LATEXOPTS) '$<' 68 | $(PDFLATEX) $(LATEXOPTS) '$<' 69 | $(PDFLATEX) $(LATEXOPTS) '$<' 70 | -$(MAKEINDEX) -s python.ist '$(basename $<).idx' 71 | $(PDFLATEX) $(LATEXOPTS) '$<' 72 | $(PDFLATEX) $(LATEXOPTS) '$<' 73 | 74 | %.ps: %.dvi 75 | dvips '$<' 76 | 77 | clean: 78 | rm -f *.log *.ind *.aux *.toc *.syn *.idx *.out *.ilg *.pla *.ps *.tar *.tar.gz *.tar.bz2 *.tar.xz $(ALLPDF) $(ALLDVI) 79 | 80 | .PHONY: all all-pdf all-dvi all-ps clean zip tar gz bz2 xz 81 | .PHONY: all-pdf-ja 82 | -------------------------------------------------------------------------------- /docs/build/latex/footnotehyper-sphinx.sty: -------------------------------------------------------------------------------- 1 | \NeedsTeXFormat{LaTeX2e} 2 | \ProvidesPackage{footnotehyper-sphinx}% 3 | [2017/01/16 v1.5.2 hyperref aware footnote.sty for sphinx (JFB)] 4 | %% 5 | %% Package: footnotehyper-sphinx 6 | %% Version: based on footnotehyper.sty v0.9f (2016/10/03) 7 | %% as available at http://www.ctan.org/pkg/footnotehyper 8 | %% License: the one applying to Sphinx 9 | %% 10 | %% Differences from footnotehyper v0.9f (2016/10/03): 11 | %% 1. hyperref is assumed in use (with default hyperfootnotes=true), 12 | %% 2. no need to check if footnote.sty was loaded, 13 | %% 3. a special tabulary compatibility layer added, (partial but enough for 14 | %% Sphinx), 15 | %% 4. \sphinxfootnotemark, and use of \spx@opt@BeforeFootnote from sphinx.sty. 16 | %% Note: with \footnotemark[N]/\footnotetext[N] syntax, hyperref 17 | %% does not insert an hyperlink. This is _not_ improved here. 18 | %% 5. use of \sphinxunactivateextrasandspace for parsed literals 19 | %% 20 | \DeclareOption*{\PackageWarning{footnotehyper}{Option `\CurrentOption' is unknown}}% 21 | \ProcessOptions\relax 22 | \let\FNH@@makefntext\@makefntext\let\@makefntext\@firstofone 23 | \RequirePackage{footnote} 24 | \let\fnparbox\parbox\let\parbox\fn@parbox\let\@makefntext\FNH@@makefntext 25 | \let\FNH@fn@footnote \footnote % buggy footnote.sty's \footnote 26 | \let\FNH@fn@footnotetext\footnotetext % will be redefined later 27 | \let\footnote \fn@latex@@footnote % meaning of \footnote before footnote.sty 28 | \let\footnotetext\fn@latex@@footnotetext 29 | \def\fn@endnote {\color@endgroup}% 30 | \AtBeginDocument {% 31 | \let\fn@latex@@footnote \footnote % meaning of \footnote at end of preamble 32 | \let\fn@latex@@footnotetext\footnotetext 33 | \let\fn@fntext \FNH@hyper@fntext 34 | \let\spewnotes \FNH@hyper@spewnotes 35 | \let\endsavenotes\spewnotes 36 | \let\fn@endfntext\FNH@fixed@endfntext 37 | \let\footnote \FNH@fixed@footnote 38 | \let\footnotetext\FNH@fixed@footnotetext 39 | \let\endfootnote\fn@endfntext 40 | \let\endfootnotetext\endfootnote 41 | }% 42 | \def\FNH@hyper@fntext {% 43 | %% amsmath compatibility 44 | \ifx\ifmeasuring@\undefined\expandafter\@secondoftwo 45 | \else\expandafter\@firstofone\fi 46 | {\ifmeasuring@\expandafter\@gobbletwo\else\expandafter\@firstofone\fi}% 47 | %% partial tabulary compatibility, [N] must be used, but Sphinx does it 48 | {\ifx\equation$\expandafter\@gobbletwo\fi\FNH@hyper@fntext@i }%$ 49 | }% 50 | \long\def\FNH@hyper@fntext@i #1{\global\setbox\fn@notes\vbox 51 | {\unvbox\fn@notes 52 | \fn@startnote 53 | \@makefntext 54 | {\rule\z@\footnotesep\ignorespaces 55 | \ifHy@nesting\expandafter\ltx@firstoftwo 56 | \else\expandafter\ltx@secondoftwo 57 | \fi 58 | {\expandafter\hyper@@anchor\expandafter{\Hy@footnote@currentHref}{#1}}% 59 | {\Hy@raisedlink 60 | {\expandafter\hyper@@anchor\expandafter{\Hy@footnote@currentHref}% 61 | {\relax}}% 62 | \let\@currentHref\Hy@footnote@currentHref 63 | \let\@currentlabelname\@empty 64 | #1}% 65 | \@finalstrut\strutbox }% 66 | \fn@endnote }% 67 | }% 68 | \def\FNH@hyper@spewnotes {\endgroup 69 | \if@savingnotes\else\ifvoid\fn@notes\else 70 | \begingroup\let\@makefntext\@empty 71 | \let\@finalstrut\@gobble 72 | \let\rule\@gobbletwo 73 | \H@@footnotetext{\unvbox\fn@notes}% 74 | \endgroup\fi\fi 75 | }% 76 | \def\FNH@fixed@endfntext {% 77 | \@finalstrut\strutbox 78 | \fn@postfntext 79 | \fn@endnote 80 | \egroup\FNH@endfntext@next % will decide if link or no link 81 | }% 82 | \def\FNH@endfntext@link {\begingroup 83 | \let\@makefntext\@empty\let\@finalstrut\@gobble\let\rule\@gobbletwo 84 | \@footnotetext {\unvbox\z@}% 85 | \endgroup 86 | }% 87 | \def\FNH@endfntext@nolink {\begingroup 88 | \let\@makefntext\@empty\let\@finalstrut\@gobble 89 | \let\rule\@gobbletwo 90 | \if@savingnotes\expandafter\fn@fntext\else\expandafter\H@@footnotetext\fi 91 | {\unvbox\z@}\endgroup 92 | }% 93 | %% \spx@opt@BeforeFootnote is defined in sphinx.sty 94 | \def\FNH@fixed@footnote {\spx@opt@BeforeFootnote\ifx\@currenvir\fn@footnote 95 | \expandafter\FNH@footnoteenv\else\expandafter\fn@latex@@footnote\fi }% 96 | \def\FNH@footnoteenv {\catcode13=5\sphinxunactivateextrasandspace 97 | \@ifnextchar[\FNH@xfootnoteenv%] 98 | {\stepcounter\@mpfn 99 | \protected@xdef\@thefnmark{\thempfn}\@footnotemark 100 | \def\FNH@endfntext@next{\FNH@endfntext@link}\fn@startfntext}}% 101 | \def\FNH@xfootnoteenv [#1]{% 102 | \begingroup 103 | \csname c@\@mpfn\endcsname #1\relax 104 | \unrestored@protected@xdef\@thefnmark{\thempfn}% 105 | \endgroup\@footnotemark\def\FNH@endfntext@next{\FNH@endfntext@link}% 106 | \fn@startfntext}% 107 | \def\FNH@fixed@footnotetext {\ifx\@currenvir\fn@footnotetext 108 | \expandafter\FNH@footnotetextenv\else\expandafter\fn@latex@@footnotetext\fi}% 109 | \def\FNH@footnotetextenv {\@ifnextchar[\FNH@xfootnotetextenv%] 110 | {\protected@xdef\@thefnmark{\thempfn}% 111 | \def\FNH@endfntext@next{\FNH@endfntext@link}\fn@startfntext}}% 112 | \def\FNH@xfootnotetextenv [#1]{% 113 | \begingroup 114 | \csname c@\@mpfn\endcsname #1\relax 115 | \unrestored@protected@xdef\@thefnmark{\thempfn}% 116 | \endgroup\def\FNH@endfntext@next{\FNH@endfntext@nolink}% 117 | \fn@startfntext }% 118 | % Now some checks in case some package has modified \@makefntext. 119 | \AtBeginDocument 120 | {% compatibility with French module of LaTeX babel 121 | \ifx\@makefntextFB\undefined 122 | \expandafter\@gobble\else\expandafter\@firstofone\fi 123 | {\ifFBFrenchFootnotes \let\FNH@@makefntext\@makefntextFB \else 124 | \let\FNH@@makefntext\@makefntextORI\fi}% 125 | \expandafter\FNH@check@a\FNH@@makefntext{1.2!3?4,}\FNH@@@1.2!3?4,\FNH@@@\relax 126 | }% 127 | \long\def\FNH@check@a #11.2!3?4,#2\FNH@@@#3% 128 | {% 129 | \ifx\relax#3\expandafter\@firstoftwo\else\expandafter\@secondoftwo\fi 130 | \FNH@bad@footnote@env 131 | {\def\fn@prefntext{#1}\def\fn@postfntext{#2}\FNH@check@b}% 132 | }% 133 | \def\FNH@check@b #1\relax 134 | {% 135 | \expandafter\expandafter\expandafter\FNH@check@c 136 | \expandafter\meaning\expandafter\fn@prefntext 137 | \meaning\fn@postfntext1.2!3?4,\FNH@check@c\relax 138 | }% 139 | \def\FNH@check@c #11.2!3?4,#2#3\relax 140 | {\ifx\FNH@check@c#2\expandafter\@gobble\fi\FNH@bad@footnote@env}% 141 | \def\FNH@bad@footnote@env 142 | {\PackageWarningNoLine{footnotehyper}% 143 | {The footnote environment from package footnote^^J 144 | will be dysfunctional, sorry (not my fault...). You may try to make a bug^^J 145 | report at https://github.com/sphinx-doc/sphinx including the next lines:}% 146 | \typeout{\meaning\@makefntext}% 147 | \let\fn@prefntext\@empty\let\fn@postfntext\@empty 148 | }% 149 | %% \sphinxfootnotemark: usable in section titles and silently removed from 150 | %% TOCs. 151 | \def\sphinxfootnotemark [#1]% 152 | {\ifx\thepage\relax\else \protect\spx@opt@BeforeFootnote 153 | \protect\footnotemark[#1]\fi}% 154 | \AtBeginDocument % let hyperref less complain 155 | {\pdfstringdefDisableCommands{\def\sphinxfootnotemark [#1]{}}}% 156 | \endinput 157 | %% 158 | %% End of file `footnotehyper-sphinx.sty'. 159 | -------------------------------------------------------------------------------- /docs/build/latex/needspace.sty: -------------------------------------------------------------------------------- 1 | 2 | \NeedsTeXFormat{LaTeX2e} 3 | \ProvidesPackage{needspace}[2010/09/12 v1.3d reserve vertical space] 4 | 5 | \newcommand{\needspace}[1]{% 6 | \begingroup 7 | \setlength{\dimen@}{#1}% 8 | \vskip\z@\@plus\dimen@ 9 | \penalty -100\vskip\z@\@plus -\dimen@ 10 | \vskip\dimen@ 11 | \penalty 9999% 12 | \vskip -\dimen@ 13 | \vskip\z@skip % hide the previous |\vskip| from |\addvspace| 14 | \endgroup 15 | } 16 | 17 | \newcommand{\Needspace}{\@ifstar{\@sneedsp@}{\@needsp@}} 18 | 19 | \newcommand{\@sneedsp@}[1]{\par \penalty-100\begingroup 20 | \setlength{\dimen@}{#1}% 21 | \dimen@ii\pagegoal \advance\dimen@ii-\pagetotal 22 | \ifdim \dimen@>\dimen@ii 23 | \break 24 | \fi\endgroup} 25 | 26 | \newcommand{\@needsp@}[1]{\par \penalty-100\begingroup 27 | \setlength{\dimen@}{#1}% 28 | \dimen@ii\pagegoal \advance\dimen@ii-\pagetotal 29 | \ifdim \dimen@>\dimen@ii 30 | \ifdim \dimen@ii>\z@ 31 | \vfil 32 | \fi 33 | \break 34 | \fi\endgroup} 35 | 36 | -------------------------------------------------------------------------------- /docs/build/latex/phonet.aux: -------------------------------------------------------------------------------- 1 | \relax 2 | \providecommand\hyper@newdestlabel[2]{} 3 | \providecommand\HyperFirstAtBeginDocument{\AtBeginDocument} 4 | \HyperFirstAtBeginDocument{\ifx\hyper@anchor\@undefined 5 | \global\let\oldcontentsline\contentsline 6 | \gdef\contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}} 7 | \global\let\oldnewlabel\newlabel 8 | \gdef\newlabel#1#2{\newlabelxx{#1}#2} 9 | \gdef\newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}} 10 | \AtEndDocument{\ifx\hyper@anchor\@undefined 11 | \let\contentsline\oldcontentsline 12 | \let\newlabel\oldnewlabel 13 | \fi} 14 | \fi} 15 | \global\let\hyper@last\relax 16 | \gdef\HyperFirstAtBeginDocument#1{#1} 17 | \providecommand\HyField@AuxAddToFields[1]{} 18 | \providecommand\HyField@AuxAddToCoFields[2]{} 19 | \babel@aux{english}{} 20 | \newlabel{index::doc}{{}{1}{}{section*.2}{}} 21 | \@writefile{toc}{\contentsline {chapter}{\numberline {1}Need Help?}{3}{chapter.1}} 22 | \@writefile{lof}{\addvspace {10\p@ }} 23 | \@writefile{lot}{\addvspace {10\p@ }} 24 | \newlabel{help::doc}{{1}{3}{Need Help?}{chapter.1}{}} 25 | \newlabel{help:welcome-to-phonet-s-documentation}{{1}{3}{Need Help?}{chapter.1}{}} 26 | \newlabel{help:need-help}{{1}{3}{Need Help?}{chapter.1}{}} 27 | \@writefile{toc}{\contentsline {chapter}{\numberline {2}References}{5}{chapter.2}} 28 | \@writefile{lof}{\addvspace {10\p@ }} 29 | \@writefile{lot}{\addvspace {10\p@ }} 30 | \newlabel{reference:references}{{2}{5}{References}{chapter.2}{}} 31 | \newlabel{reference::doc}{{2}{5}{References}{chapter.2}{}} 32 | \@writefile{toc}{\contentsline {chapter}{\numberline {3}Installation}{7}{chapter.3}} 33 | \@writefile{lof}{\addvspace {10\p@ }} 34 | \@writefile{lot}{\addvspace {10\p@ }} 35 | \newlabel{index:installation}{{3}{7}{Installation}{chapter.3}{}} 36 | \@writefile{toc}{\contentsline {chapter}{\numberline {4}Methods}{9}{chapter.4}} 37 | \@writefile{lof}{\addvspace {10\p@ }} 38 | \@writefile{lot}{\addvspace {10\p@ }} 39 | \newlabel{index:methods}{{4}{9}{Methods}{chapter.4}{}} 40 | \newlabel{index:module-phonet}{{4}{9}{Methods}{chapter.4}{}} 41 | \newlabel{index:phonet.Phonet}{{4}{9}{Methods}{section*.3}{}} 42 | \newlabel{index:phonet.Phonet.get_PLLR}{{4}{9}{Methods}{section*.4}{}} 43 | \newlabel{index:phonet.Phonet.get_feat}{{4}{10}{Methods}{section*.5}{}} 44 | \newlabel{index:phonet.Phonet.get_phon_path}{{4}{10}{Methods}{section*.6}{}} 45 | \newlabel{index:phonet.Phonet.get_phon_wav}{{4}{10}{Methods}{section*.7}{}} 46 | \newlabel{index:phonet.Phonet.get_posteriorgram}{{4}{11}{Methods}{section*.8}{}} 47 | \newlabel{index:phonet.Phonet.mask_correction}{{4}{11}{Methods}{section*.9}{}} 48 | \newlabel{index:phonet.Phonet.model}{{4}{11}{Methods}{section*.10}{}} 49 | \newlabel{index:phonet.Phonet.modelp}{{4}{11}{Methods}{section*.11}{}} 50 | \newlabel{index:phonet.Phonet.number2phoneme}{{4}{11}{Methods}{section*.12}{}} 51 | \@writefile{toc}{\contentsline {chapter}{\numberline {5}Indices and tables}{13}{chapter.5}} 52 | \@writefile{lof}{\addvspace {10\p@ }} 53 | \@writefile{lot}{\addvspace {10\p@ }} 54 | \newlabel{index:indices-and-tables}{{5}{13}{Indices and tables}{chapter.5}{}} 55 | \@writefile{toc}{\contentsline {chapter}{\numberline {6}Help}{15}{chapter.6}} 56 | \@writefile{lof}{\addvspace {10\p@ }} 57 | \@writefile{lot}{\addvspace {10\p@ }} 58 | \newlabel{index:help}{{6}{15}{Help}{chapter.6}{}} 59 | \@writefile{toc}{\contentsline {chapter}{Python Module Index}{17}{section*.13}} 60 | -------------------------------------------------------------------------------- /docs/build/latex/phonet.idx: -------------------------------------------------------------------------------- 1 | \indexentry{phonet (module)|hyperpage}{9} 2 | \indexentry{Phonet (class in phonet)|hyperpage}{9} 3 | \indexentry{get\_PLLR() (phonet.Phonet method)|hyperpage}{9} 4 | \indexentry{get\_feat() (phonet.Phonet method)|hyperpage}{10} 5 | \indexentry{get\_phon\_path() (phonet.Phonet method)|hyperpage}{10} 6 | \indexentry{get\_phon\_wav() (phonet.Phonet method)|hyperpage}{10} 7 | \indexentry{get\_posteriorgram() (phonet.Phonet method)|hyperpage}{11} 8 | \indexentry{mask\_correction() (phonet.Phonet method)|hyperpage}{11} 9 | \indexentry{model() (phonet.Phonet method)|hyperpage}{11} 10 | \indexentry{modelp() (phonet.Phonet method)|hyperpage}{11} 11 | \indexentry{number2phoneme() (phonet.Phonet method)|hyperpage}{11} 12 | -------------------------------------------------------------------------------- /docs/build/latex/phonet.out: -------------------------------------------------------------------------------- 1 | \BOOKMARK [0][-]{chapter.1}{\376\377\000N\000e\000e\000d\000\040\000H\000e\000l\000p\000?}{}% 1 2 | \BOOKMARK [0][-]{chapter.2}{\376\377\000R\000e\000f\000e\000r\000e\000n\000c\000e\000s}{}% 2 3 | \BOOKMARK [0][-]{chapter.3}{\376\377\000I\000n\000s\000t\000a\000l\000l\000a\000t\000i\000o\000n}{}% 3 4 | \BOOKMARK [0][-]{chapter.4}{\376\377\000M\000e\000t\000h\000o\000d\000s}{}% 4 5 | \BOOKMARK [0][-]{chapter.5}{\376\377\000I\000n\000d\000i\000c\000e\000s\000\040\000a\000n\000d\000\040\000t\000a\000b\000l\000e\000s}{}% 5 6 | \BOOKMARK [0][-]{chapter.6}{\376\377\000H\000e\000l\000p}{}% 6 7 | \BOOKMARK [0][-]{section*.13}{\376\377\000P\000y\000t\000h\000o\000n\000\040\000M\000o\000d\000u\000l\000e\000\040\000I\000n\000d\000e\000x}{}% 7 8 | -------------------------------------------------------------------------------- /docs/build/latex/phonet.pdf: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/docs/build/latex/phonet.pdf -------------------------------------------------------------------------------- /docs/build/latex/phonet.toc: -------------------------------------------------------------------------------- 1 | \babel@toc {english}{} 2 | \contentsline {chapter}{\numberline {1}Need Help?}{3}{chapter.1} 3 | \contentsline {chapter}{\numberline {2}References}{5}{chapter.2} 4 | \contentsline {chapter}{\numberline {3}Installation}{7}{chapter.3} 5 | \contentsline {chapter}{\numberline {4}Methods}{9}{chapter.4} 6 | \contentsline {chapter}{\numberline {5}Indices and tables}{13}{chapter.5} 7 | \contentsline {chapter}{\numberline {6}Help}{15}{chapter.6} 8 | \contentsline {chapter}{Python Module Index}{17}{section*.13} 9 | -------------------------------------------------------------------------------- /docs/build/latex/python.ist: -------------------------------------------------------------------------------- 1 | line_max 100 2 | headings_flag 1 3 | heading_prefix " \\bigletter " 4 | 5 | preamble "\\begin{sphinxtheindex} 6 | \\def\\bigletter#1{{\\Large\\sffamily#1}\\nopagebreak\\vspace{1mm}} 7 | 8 | " 9 | 10 | postamble "\n\n\\end{sphinxtheindex}\n" 11 | 12 | symhead_positive "{Symbols}" 13 | numhead_positive "{Numbers}" 14 | -------------------------------------------------------------------------------- /docs/build/latex/sphinxhighlight.sty: -------------------------------------------------------------------------------- 1 | \NeedsTeXFormat{LaTeX2e}[1995/12/01] 2 | \ProvidesPackage{sphinxhighlight}[2016/05/29 stylesheet for highlighting with pygments] 3 | 4 | 5 | \makeatletter 6 | \def\PYG@reset{\let\PYG@it=\relax \let\PYG@bf=\relax% 7 | \let\PYG@ul=\relax \let\PYG@tc=\relax% 8 | \let\PYG@bc=\relax \let\PYG@ff=\relax} 9 | \def\PYG@tok#1{\csname PYG@tok@#1\endcsname} 10 | \def\PYG@toks#1+{\ifx\relax#1\empty\else% 11 | \PYG@tok{#1}\expandafter\PYG@toks\fi} 12 | \def\PYG@do#1{\PYG@bc{\PYG@tc{\PYG@ul{% 13 | \PYG@it{\PYG@bf{\PYG@ff{#1}}}}}}} 14 | \def\PYG#1#2{\PYG@reset\PYG@toks#1+\relax+\PYG@do{#2}} 15 | 16 | \expandafter\def\csname PYG@tok@w\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.73,0.73,0.73}{##1}}} 17 | \expandafter\def\csname PYG@tok@c\endcsname{\let\PYG@it=\textit\def\PYG@tc##1{\textcolor[rgb]{0.25,0.50,0.56}{##1}}} 18 | \expandafter\def\csname PYG@tok@cp\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 19 | \expandafter\def\csname PYG@tok@cs\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.25,0.50,0.56}{##1}}\def\PYG@bc##1{\setlength{\fboxsep}{0pt}\colorbox[rgb]{1.00,0.94,0.94}{\strut ##1}}} 20 | \expandafter\def\csname PYG@tok@k\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 21 | \expandafter\def\csname PYG@tok@kp\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 22 | \expandafter\def\csname PYG@tok@kt\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.56,0.13,0.00}{##1}}} 23 | \expandafter\def\csname PYG@tok@o\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.40,0.40,0.40}{##1}}} 24 | \expandafter\def\csname PYG@tok@ow\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 25 | \expandafter\def\csname PYG@tok@nb\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 26 | \expandafter\def\csname PYG@tok@nf\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.02,0.16,0.49}{##1}}} 27 | \expandafter\def\csname PYG@tok@nc\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.05,0.52,0.71}{##1}}} 28 | \expandafter\def\csname PYG@tok@nn\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.05,0.52,0.71}{##1}}} 29 | \expandafter\def\csname PYG@tok@ne\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 30 | \expandafter\def\csname PYG@tok@nv\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.73,0.38,0.84}{##1}}} 31 | \expandafter\def\csname PYG@tok@no\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.38,0.68,0.84}{##1}}} 32 | \expandafter\def\csname PYG@tok@nl\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.00,0.13,0.44}{##1}}} 33 | \expandafter\def\csname PYG@tok@ni\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.84,0.33,0.22}{##1}}} 34 | \expandafter\def\csname PYG@tok@na\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 35 | \expandafter\def\csname PYG@tok@nt\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.02,0.16,0.45}{##1}}} 36 | \expandafter\def\csname PYG@tok@nd\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.33,0.33,0.33}{##1}}} 37 | \expandafter\def\csname PYG@tok@s\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 38 | \expandafter\def\csname PYG@tok@sd\endcsname{\let\PYG@it=\textit\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 39 | \expandafter\def\csname PYG@tok@si\endcsname{\let\PYG@it=\textit\def\PYG@tc##1{\textcolor[rgb]{0.44,0.63,0.82}{##1}}} 40 | \expandafter\def\csname PYG@tok@se\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 41 | \expandafter\def\csname PYG@tok@sr\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.14,0.33,0.53}{##1}}} 42 | \expandafter\def\csname PYG@tok@ss\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.32,0.47,0.09}{##1}}} 43 | \expandafter\def\csname PYG@tok@sx\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.78,0.36,0.04}{##1}}} 44 | \expandafter\def\csname PYG@tok@m\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.13,0.50,0.31}{##1}}} 45 | \expandafter\def\csname PYG@tok@gh\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.00,0.00,0.50}{##1}}} 46 | \expandafter\def\csname PYG@tok@gu\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.50,0.00,0.50}{##1}}} 47 | \expandafter\def\csname PYG@tok@gd\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.63,0.00,0.00}{##1}}} 48 | \expandafter\def\csname PYG@tok@gi\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.00,0.63,0.00}{##1}}} 49 | \expandafter\def\csname PYG@tok@gr\endcsname{\def\PYG@tc##1{\textcolor[rgb]{1.00,0.00,0.00}{##1}}} 50 | \expandafter\def\csname PYG@tok@ge\endcsname{\let\PYG@it=\textit} 51 | \expandafter\def\csname PYG@tok@gs\endcsname{\let\PYG@bf=\textbf} 52 | \expandafter\def\csname PYG@tok@gp\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.78,0.36,0.04}{##1}}} 53 | \expandafter\def\csname PYG@tok@go\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.20,0.20,0.20}{##1}}} 54 | \expandafter\def\csname PYG@tok@gt\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.00,0.27,0.87}{##1}}} 55 | \expandafter\def\csname PYG@tok@err\endcsname{\def\PYG@bc##1{\setlength{\fboxsep}{0pt}\fcolorbox[rgb]{1.00,0.00,0.00}{1,1,1}{\strut ##1}}} 56 | \expandafter\def\csname PYG@tok@kc\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 57 | \expandafter\def\csname PYG@tok@kd\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 58 | \expandafter\def\csname PYG@tok@kn\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 59 | \expandafter\def\csname PYG@tok@kr\endcsname{\let\PYG@bf=\textbf\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 60 | \expandafter\def\csname PYG@tok@bp\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.00,0.44,0.13}{##1}}} 61 | \expandafter\def\csname PYG@tok@fm\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.02,0.16,0.49}{##1}}} 62 | \expandafter\def\csname PYG@tok@vc\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.73,0.38,0.84}{##1}}} 63 | \expandafter\def\csname PYG@tok@vg\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.73,0.38,0.84}{##1}}} 64 | \expandafter\def\csname PYG@tok@vi\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.73,0.38,0.84}{##1}}} 65 | \expandafter\def\csname PYG@tok@vm\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.73,0.38,0.84}{##1}}} 66 | \expandafter\def\csname PYG@tok@sa\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 67 | \expandafter\def\csname PYG@tok@sb\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 68 | \expandafter\def\csname PYG@tok@sc\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 69 | \expandafter\def\csname PYG@tok@dl\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 70 | \expandafter\def\csname PYG@tok@s2\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 71 | \expandafter\def\csname PYG@tok@sh\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 72 | \expandafter\def\csname PYG@tok@s1\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.25,0.44,0.63}{##1}}} 73 | \expandafter\def\csname PYG@tok@mb\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.13,0.50,0.31}{##1}}} 74 | \expandafter\def\csname PYG@tok@mf\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.13,0.50,0.31}{##1}}} 75 | \expandafter\def\csname PYG@tok@mh\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.13,0.50,0.31}{##1}}} 76 | \expandafter\def\csname PYG@tok@mi\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.13,0.50,0.31}{##1}}} 77 | \expandafter\def\csname PYG@tok@il\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.13,0.50,0.31}{##1}}} 78 | \expandafter\def\csname PYG@tok@mo\endcsname{\def\PYG@tc##1{\textcolor[rgb]{0.13,0.50,0.31}{##1}}} 79 | \expandafter\def\csname PYG@tok@ch\endcsname{\let\PYG@it=\textit\def\PYG@tc##1{\textcolor[rgb]{0.25,0.50,0.56}{##1}}} 80 | \expandafter\def\csname PYG@tok@cm\endcsname{\let\PYG@it=\textit\def\PYG@tc##1{\textcolor[rgb]{0.25,0.50,0.56}{##1}}} 81 | \expandafter\def\csname PYG@tok@cpf\endcsname{\let\PYG@it=\textit\def\PYG@tc##1{\textcolor[rgb]{0.25,0.50,0.56}{##1}}} 82 | \expandafter\def\csname PYG@tok@c1\endcsname{\let\PYG@it=\textit\def\PYG@tc##1{\textcolor[rgb]{0.25,0.50,0.56}{##1}}} 83 | 84 | \def\PYGZbs{\char`\\} 85 | \def\PYGZus{\char`\_} 86 | \def\PYGZob{\char`\{} 87 | \def\PYGZcb{\char`\}} 88 | \def\PYGZca{\char`\^} 89 | \def\PYGZam{\char`\&} 90 | \def\PYGZlt{\char`\<} 91 | \def\PYGZgt{\char`\>} 92 | \def\PYGZsh{\char`\#} 93 | \def\PYGZpc{\char`\%} 94 | \def\PYGZdl{\char`\$} 95 | \def\PYGZhy{\char`\-} 96 | \def\PYGZsq{\char`\'} 97 | \def\PYGZdq{\char`\"} 98 | \def\PYGZti{\char`\~} 99 | % for compatibility with earlier versions 100 | \def\PYGZat{@} 101 | \def\PYGZlb{[} 102 | \def\PYGZrb{]} 103 | \makeatother 104 | 105 | \renewcommand\PYGZsq{\textquotesingle} 106 | -------------------------------------------------------------------------------- /docs/build/latex/sphinxhowto.cls: -------------------------------------------------------------------------------- 1 | % 2 | % sphinxhowto.cls for Sphinx (http://sphinx-doc.org/) 3 | % 4 | 5 | \NeedsTeXFormat{LaTeX2e}[1995/12/01] 6 | \ProvidesClass{sphinxhowto}[2017/03/26 v1.5.4 Document class (Sphinx HOWTO)] 7 | 8 | % 'oneside' option overriding the 'twoside' default 9 | \newif\if@oneside 10 | \DeclareOption{oneside}{\@onesidetrue} 11 | % Pass remaining document options to the parent class. 12 | \DeclareOption*{\PassOptionsToClass{\CurrentOption}{\sphinxdocclass}} 13 | \ProcessOptions\relax 14 | 15 | % Default to two-side document 16 | \if@oneside 17 | % nothing to do (oneside is the default) 18 | \else 19 | \PassOptionsToClass{twoside}{\sphinxdocclass} 20 | \fi 21 | 22 | \LoadClass{\sphinxdocclass} 23 | 24 | % Set some sane defaults for section numbering depth and TOC depth. You can 25 | % reset these counters in your preamble. 26 | % 27 | \setcounter{secnumdepth}{2} 28 | 29 | % Change the title page to look a bit better, and fit in with the fncychap 30 | % ``Bjarne'' style a bit better. 31 | % 32 | \renewcommand{\maketitle}{% 33 | \noindent\rule{\textwidth}{1pt}\ifsphinxpdfoutput\newline\null\fi\par 34 | \ifsphinxpdfoutput 35 | \begingroup 36 | %\pdfstringdefDisableCommands{\def\\{, }\def\endgraf{ }\def\and{, }}% 37 | %\hypersetup{pdfauthor={\@author}, pdftitle={\@title}}% 38 | \endgroup 39 | \fi 40 | \begin{flushright} 41 | \sphinxlogo 42 | \py@HeaderFamily 43 | {\Huge \@title }\par 44 | {\itshape\large \py@release \releaseinfo}\par 45 | \vspace{25pt} 46 | {\Large 47 | \begin{tabular}[t]{c} 48 | \@author 49 | \end{tabular}}\par 50 | \vspace{25pt} 51 | \@date \par 52 | \py@authoraddress \par 53 | \end{flushright} 54 | \@thanks 55 | \setcounter{footnote}{0} 56 | \let\thanks\relax\let\maketitle\relax 57 | %\gdef\@thanks{}\gdef\@author{}\gdef\@title{} 58 | } 59 | 60 | \newcommand{\sphinxtableofcontents}{ 61 | \begingroup 62 | \parskip = 0mm 63 | \tableofcontents 64 | \endgroup 65 | \rule{\textwidth}{1pt} 66 | \vspace{12pt} 67 | } 68 | 69 | \@ifundefined{fancyhf}{ 70 | \pagestyle{plain}}{ 71 | \pagestyle{normal}} % start this way; change for 72 | \pagenumbering{arabic} % ToC & chapters 73 | 74 | \thispagestyle{empty} 75 | 76 | % Fix the bibliography environment to add an entry to the Table of 77 | % Contents. 78 | % For an article document class this environment is a section, 79 | % so no page break before it. 80 | % 81 | % Note: \phantomsection is required for TeXLive 2009 82 | % http://tex.stackexchange.com/questions/44088/when-do-i-need-to-invoke-phantomsection#comment166081_44091 83 | \newenvironment{sphinxthebibliography}[1]{% 84 | \phantomsection 85 | \begin{thebibliography}{1}% 86 | \addcontentsline{toc}{section}{\ifdefined\refname\refname\else\ifdefined\bibname\bibname\fi\fi}}{\end{thebibliography}} 87 | 88 | 89 | % Same for the indices. 90 | % The memoir class already does this, so we don't duplicate it in that case. 91 | % 92 | \@ifclassloaded{memoir} 93 | {\newenvironment{sphinxtheindex}{\begin{theindex}}{\end{theindex}}} 94 | {\newenvironment{sphinxtheindex}{% 95 | \phantomsection 96 | \begin{theindex}% 97 | \addcontentsline{toc}{section}{\indexname}}{\end{theindex}}} 98 | -------------------------------------------------------------------------------- /docs/build/latex/sphinxmanual.cls: -------------------------------------------------------------------------------- 1 | % 2 | % sphinxmanual.cls for Sphinx (http://sphinx-doc.org/) 3 | % 4 | 5 | \NeedsTeXFormat{LaTeX2e}[1995/12/01] 6 | \ProvidesClass{sphinxmanual}[2017/03/26 v1.5.4 Document class (Sphinx manual)] 7 | 8 | % chapters starting at odd pages (overridden by 'openany' document option) 9 | \PassOptionsToClass{openright}{\sphinxdocclass} 10 | 11 | % 'oneside' option overriding the 'twoside' default 12 | \newif\if@oneside 13 | \DeclareOption{oneside}{\@onesidetrue} 14 | % Pass remaining document options to the parent class. 15 | \DeclareOption*{\PassOptionsToClass{\CurrentOption}{\sphinxdocclass}} 16 | \ProcessOptions\relax 17 | 18 | % Defaults two-side document 19 | \if@oneside 20 | % nothing to do (oneside is the default) 21 | \else 22 | \PassOptionsToClass{twoside}{\sphinxdocclass} 23 | \fi 24 | 25 | \LoadClass{\sphinxdocclass} 26 | 27 | % Set some sane defaults for section numbering depth and TOC depth. You can 28 | % reset these counters in your preamble. 29 | % 30 | \setcounter{secnumdepth}{2} 31 | \setcounter{tocdepth}{1} 32 | 33 | % Change the title page to look a bit better, and fit in with the fncychap 34 | % ``Bjarne'' style a bit better. 35 | % 36 | \renewcommand{\maketitle}{% 37 | \let\spx@tempa\relax 38 | \ifHy@pageanchor\def\spx@tempa{\Hy@pageanchortrue}\fi 39 | \hypersetup{pageanchor=false}% avoid duplicate destination warnings 40 | \begin{titlepage}% 41 | \let\footnotesize\small 42 | \let\footnoterule\relax 43 | \noindent\rule{\textwidth}{1pt}\ifsphinxpdfoutput\newline\null\fi\par 44 | \ifsphinxpdfoutput 45 | \begingroup 46 | %\pdfstringdefDisableCommands{\def\\{, }\def\endgraf{ }\def\and{, }}% 47 | %\hypersetup{pdfauthor={\@author}, pdftitle={\@title}}% 48 | \endgroup 49 | \fi 50 | \begin{flushright}% 51 | \sphinxlogo 52 | \py@HeaderFamily 53 | {\Huge \@title \par} 54 | {\itshape\LARGE \py@release\releaseinfo \par} 55 | \vfill 56 | {\LARGE 57 | \begin{tabular}[t]{c} 58 | \@author 59 | \end{tabular} 60 | \par} 61 | \vfill\vfill 62 | {\large 63 | \@date \par 64 | \vfill 65 | \py@authoraddress \par 66 | }% 67 | \end{flushright}%\par 68 | \@thanks 69 | \end{titlepage}% 70 | \setcounter{footnote}{0}% 71 | \let\thanks\relax\let\maketitle\relax 72 | %\gdef\@thanks{}\gdef\@author{}\gdef\@title{} 73 | \if@openright\cleardoublepage\else\clearpage\fi 74 | \spx@tempa 75 | } 76 | 77 | \newcommand{\sphinxtableofcontents}{% 78 | \pagenumbering{roman}% 79 | \pagestyle{plain}% 80 | \begingroup 81 | \parskip \z@skip 82 | \tableofcontents 83 | \endgroup 84 | % before resetting page counter, let's do the right thing. 85 | \if@openright\cleardoublepage\else\clearpage\fi 86 | \pagenumbering{arabic}% 87 | \ifdefined\fancyhf\pagestyle{normal}\fi 88 | } 89 | 90 | % This is needed to get the width of the section # area wide enough in the 91 | % library reference. Doing it here keeps it the same for all the manuals. 92 | % 93 | \renewcommand*\l@section{\@dottedtocline{1}{1.5em}{2.6em}} 94 | \renewcommand*\l@subsection{\@dottedtocline{2}{4.1em}{3.5em}} 95 | 96 | % Fix the bibliography environment to add an entry to the Table of 97 | % Contents. 98 | % For a report document class this environment is a chapter. 99 | % 100 | % Note: \phantomsection is required for TeXLive 2009 101 | % http://tex.stackexchange.com/questions/44088/when-do-i-need-to-invoke-phantomsection#comment166081_44091 102 | \newenvironment{sphinxthebibliography}[1]{% 103 | \if@openright\cleardoublepage\else\clearpage\fi 104 | \phantomsection 105 | \begin{thebibliography}{1}% 106 | \addcontentsline{toc}{chapter}{\bibname}}{\end{thebibliography}} 107 | 108 | % Same for the indices. 109 | % The memoir class already does this, so we don't duplicate it in that case. 110 | % 111 | \@ifclassloaded{memoir} 112 | {\newenvironment{sphinxtheindex}{\begin{theindex}}{\end{theindex}}} 113 | {\newenvironment{sphinxtheindex}{% 114 | \if@openright\cleardoublepage\else\clearpage\fi 115 | \phantomsection 116 | \begin{theindex}% 117 | \addcontentsline{toc}{chapter}{\indexname}}{\end{theindex}}} 118 | -------------------------------------------------------------------------------- /docs/environment.yml: -------------------------------------------------------------------------------- 1 | name: phonet 2 | 3 | channels: 4 | - anaconda 5 | 6 | dependencies: 7 | - python==3.7 8 | - tensorflow==2.7.0 9 | - pip: 10 | - pandas 11 | - pysptk 12 | - six 13 | - matplotlib 14 | - python_speech_features 15 | - tqdm 16 | - sphinx-gallery 17 | - sphinxcontrib-napoleon 18 | - sphinxcontrib-bibtex 19 | - sphinxcontrib-inlinesyntaxhighlight 20 | 21 | -------------------------------------------------------------------------------- /docs/make.bat: -------------------------------------------------------------------------------- 1 | @ECHO OFF 2 | 3 | pushd %~dp0 4 | 5 | REM Command file for Sphinx documentation 6 | 7 | if "%SPHINXBUILD%" == "" ( 8 | set SPHINXBUILD=sphinx-build 9 | ) 10 | set SOURCEDIR=source 11 | set BUILDDIR=build 12 | set SPHINXPROJ=phonet 13 | 14 | if "%1" == "" goto help 15 | 16 | %SPHINXBUILD% >NUL 2>NUL 17 | if errorlevel 9009 ( 18 | echo. 19 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx 20 | echo.installed, then set the SPHINXBUILD environment variable to point 21 | echo.to the full path of the 'sphinx-build' executable. Alternatively you 22 | echo.may add the Sphinx directory to PATH. 23 | echo. 24 | echo.If you don't have Sphinx installed, grab it from 25 | echo.http://sphinx-doc.org/ 26 | exit /b 1 27 | ) 28 | 29 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 30 | goto end 31 | 32 | :help 33 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% 34 | 35 | :end 36 | popd 37 | -------------------------------------------------------------------------------- /docs/source/conf.py: -------------------------------------------------------------------------------- 1 | #!/usr/bin/env python3 2 | # -*- coding: utf-8 -*- 3 | # 4 | # phonet documentation build configuration file, created by 5 | # sphinx-quickstart on Sat Mar 9 04:39:38 2019. 6 | # 7 | # This file is execfile()d with the current directory set to its 8 | # containing dir. 9 | # 10 | # Note that not all possible configuration values are present in this 11 | # autogenerated file. 12 | # 13 | # All configuration values have a default; values that are commented out 14 | # serve to show the default. 15 | 16 | import os 17 | import sys 18 | 19 | from unittest.mock import MagicMock 20 | 21 | class Mock(MagicMock): 22 | @classmethod 23 | def __getattr__(cls, name): 24 | return MagicMock() 25 | 26 | MOCK_MODULES = [] 27 | sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) 28 | 29 | autodoc_mock_imports = ["tensorfow"] 30 | 31 | 32 | # If extensions (or modules to document with autodoc) are in another directory, 33 | # add these directories to sys.path here. If the directory is relative to the 34 | # documentation root, use os.path.abspath to make it absolute, like shown here. 35 | # 36 | 37 | sys.path.insert(0, os.path.abspath('.')) 38 | sys.path.insert(0,os.path.abspath('../../')) 39 | 40 | 41 | # -- General configuration ------------------------------------------------ 42 | 43 | # If your documentation needs a minimal Sphinx version, state it here. 44 | # 45 | # needs_sphinx = '1.0' 46 | 47 | # Add any Sphinx extension module names here, as strings. They can be 48 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom 49 | # ones. 50 | extensions = ['sphinx.ext.autodoc'] 51 | 52 | # Add any paths that contain templates here, relative to this directory. 53 | templates_path = ['_templates'] 54 | 55 | # The suffix(es) of source filenames. 56 | # You can specify multiple suffix as a list of string: 57 | # 58 | # source_suffix = ['.rst', '.md'] 59 | source_suffix = '.rst' 60 | 61 | # The master toctree document. 62 | master_doc = 'index' 63 | 64 | # General information about the project. 65 | project = 'phonet' 66 | copyright = '2019, Camilo Vasquez' 67 | author = 'Camilo Vasquez' 68 | 69 | # The version info for the project you're documenting, acts as replacement for 70 | # |version| and |release|, also used in various other places throughout the 71 | # built documents. 72 | # 73 | # The short X.Y version. 74 | version = '0.3' 75 | # The full version, including alpha/beta/rc tags. 76 | release = '0.3' 77 | 78 | # The language for content autogenerated by Sphinx. Refer to documentation 79 | # for a list of supported languages. 80 | # 81 | # This is also used if you do content translation via gettext catalogs. 82 | # Usually you set "language" from the command line for these cases. 83 | language = None 84 | 85 | # List of patterns, relative to source directory, that match files and 86 | # directories to ignore when looking for source files. 87 | # This patterns also effect to html_static_path and html_extra_path 88 | exclude_patterns = [] 89 | 90 | # The name of the Pygments (syntax highlighting) style to use. 91 | pygments_style = 'sphinx' 92 | 93 | # If true, `todo` and `todoList` produce output, else they produce nothing. 94 | todo_include_todos = False 95 | 96 | 97 | # -- Options for HTML output ---------------------------------------------- 98 | 99 | # The theme to use for HTML and HTML Help pages. See the documentation for 100 | # a list of builtin themes. 101 | # 102 | html_theme = 'sphinx_rtd_theme' 103 | 104 | # Theme options are theme-specific and customize the look and feel of a theme 105 | # further. For a list of options available for each theme, see the 106 | # documentation. 107 | # 108 | # html_theme_options = {} 109 | 110 | # Add any paths that contain custom static files (such as style sheets) here, 111 | # relative to this directory. They are copied after the builtin static files, 112 | # so a file named "default.css" will overwrite the builtin "default.css". 113 | html_static_path = ['_static'] 114 | 115 | 116 | # -- Options for HTMLHelp output ------------------------------------------ 117 | 118 | # Output file base name for HTML help builder. 119 | htmlhelp_basename = 'phonetdoc' 120 | 121 | 122 | # -- Options for LaTeX output --------------------------------------------- 123 | 124 | latex_elements = { 125 | # The paper size ('letterpaper' or 'a4paper'). 126 | # 127 | # 'papersize': 'letterpaper', 128 | 129 | # The font size ('10pt', '11pt' or '12pt'). 130 | # 131 | # 'pointsize': '10pt', 132 | 133 | # Additional stuff for the LaTeX preamble. 134 | # 135 | # 'preamble': '', 136 | 137 | # Latex figure (float) alignment 138 | # 139 | # 'figure_align': 'htbp', 140 | } 141 | 142 | # Grouping the document tree into LaTeX files. List of tuples 143 | # (source start file, target name, title, 144 | # author, documentclass [howto, manual, or own class]). 145 | latex_documents = [ 146 | (master_doc, 'phonet.tex', 'phonet Documentation', 147 | 'Camilo Vasquez', 'manual'), 148 | ] 149 | 150 | 151 | # -- Options for manual page output --------------------------------------- 152 | 153 | # One entry per manual page. List of tuples 154 | # (source start file, name, description, authors, manual section). 155 | man_pages = [ 156 | (master_doc, 'phonet', 'phonet Documentation', 157 | [author], 1) 158 | ] 159 | 160 | 161 | # -- Options for Texinfo output ------------------------------------------- 162 | 163 | # Grouping the document tree into Texinfo files. List of tuples 164 | # (source start file, target name, title, author, 165 | # dir menu entry, description, category) 166 | texinfo_documents = [ 167 | (master_doc, 'phonet', 'phonet Documentation', 168 | author, 'phonet', 'compute posteriors probabilities of phonological classes from an audio file for several groups of phonemes.', 169 | 'Miscellaneous'), 170 | ] 171 | -------------------------------------------------------------------------------- /docs/source/help.rst: -------------------------------------------------------------------------------- 1 | Need Help? 2 | ================================== 3 | 4 | If you have trouble with Phonet, please write to Camilo Vasquez at: juan.vasquez@fau.de 5 | 6 | -------------------------------------------------------------------------------- /docs/source/index.rst: -------------------------------------------------------------------------------- 1 | .. phonet documentation master file, created by 2 | sphinx-quickstart on Sat Mar 9 04:39:38 2019. 3 | You can adapt this file completely to your liking, but it should at least 4 | contain the root `toctree` directive. 5 | 6 | Welcome to Phonet's documentation! 7 | ================================== 8 | 9 | This toolkit compute posteriors probabilities of phonological classes from audio files for several groups of phonemes according to the mode and manner of articulation. 10 | 11 | If you are not sure about what phonological classes are, have a look at this 12 | `Phonological classes tutorial `_ 13 | 14 | 15 | The code for this project is available at https://github.com/jcvasquezc/phonet . 16 | 17 | The list of the phonological classes available and the phonemes that are activated for each phonological class are observed in the following Table 18 | 19 | 20 | ================== ================================================================================ 21 | Phonological class Phonemes 22 | ================== ================================================================================ 23 | vocalic /a/, /e/, /i/, /o/, /u/ 24 | consonantal /b/, /tS/, /d/, /f/, /g/, /x/, /k/, /l/, /ʎ/, /m/, /n/, /p/, /ɾ/, /r/, /s/, /t/ 25 | back /a/, /o/, /u/ 26 | anterior /e/, /i/ 27 | open /a/, /e/, /o/ 28 | close /i/, /u/ 29 | nasal /m/, /n/ 30 | stop /p/, /b/, /t/, /k/, /g/, /tS/, /d/ 31 | continuant /f/, /b/, /tS/, /d/, /s/, /g/, /ʎ/, /x/ 32 | lateral /l/ 33 | flap /ɾ/ 34 | trill /r/ 35 | voiced /a/, /e/, /i/, /o/, /u/, /b/, /d/, /l/, /m/, /n/, /r/, /g/, /ʎ/ 36 | strident /f/, /s/, /tS/ 37 | labial /m/, /p/, /b/, /f/ 38 | dental /t/, /d/ 39 | velar /k/, /g/, /x/ 40 | pause /sil/ 41 | ================== ================================================================================ 42 | 43 | 44 | .. toctree:: 45 | :maxdepth: 3 46 | 47 | help 48 | reference 49 | 50 | 51 | 52 | Supported features: 53 | 54 | - :py:meth:`phonet.model` - This is the architecture used for the estimation of the phonological classes using a multitask learning strategy. It consists of a 2 Bidirectional GRU layers, followed by a time-distributed dense layer 55 | - :py:meth:`phonet.get_phon_wav` - Estimate the phonological classes using the BGRU models for an audio file (.wav) 56 | - :py:meth:`phonet.get_phon_path` - Estimate the phonological classes using the BGRU models for all the (.wav) audio files included inside a directory. 57 | - :py:meth:`phonet.get_posteriorgram` - Estimate the posteriorgram for an audio file (.wav) sampled at 16kHz. 58 | - :py:meth:`phonet.get_PLLR` - Estimate the phonological log-likelihood ratio (PLLR) features for an audio file (.wav) sampled at 16kHz. 59 | 60 | Installation 61 | ------------------------------------- 62 | 63 | From the source file:: 64 | 65 | git clone https://github.com/jcvasquezc/phonet 66 | cd phonet 67 | python setup.py install 68 | 69 | Methods 70 | ------------------------------------- 71 | 72 | .. automodule:: phonet 73 | 74 | .. autoclass:: Phonet 75 | :members: 76 | 77 | 78 | Indices and tables 79 | ------------------------------------- 80 | * :ref:`genindex` 81 | * :ref:`modindex` 82 | * :ref:`search` 83 | 84 | 85 | 86 | Help 87 | ------------------------------------- 88 | If you have trouble with Phonet, please write to Camilo Vasquez at: juan.vasquez@fau.de 89 | -------------------------------------------------------------------------------- /docs/source/reference.rst: -------------------------------------------------------------------------------- 1 | References 2 | ================================== 3 | 4 | If you use Phonet for research purposes, please cite the following paper: 5 | 6 | Vásquez-Correa, J. C., Klumpp, P., Orozco-Arroyave, J. R., & Nöth, E. (2019). Phonet: a Tool Based on Gated Recurrent Neural Networks to Extract Phonological Posteriors from Speech. Proc. Interspeech 2019, 549-553. 7 | 8 | `Download paper `_ 9 | 10 | -------------------------------------------------------------------------------- /phonet/Phonological.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | Created on Feb 28 2019 4 | @author: J. C. Vasquez-Correa 5 | Pattern recognition Lab, University of Erlangen-Nuremberg 6 | Faculty of Engineering, University of Antioquia, 7 | juan.vasquez@fau.de 8 | """ 9 | 10 | import numpy as np 11 | import pandas as pd 12 | 13 | class Phonological: 14 | 15 | def __init__(self): 16 | 17 | self.list_phonological={"vocalic" : ["a","e","i","o","u", "w", "j"], 18 | "consonantal" : ["b", "B","d", "D","f", "F","k","l","m","n", "N","p","r","rr","s", "Z", "T","t","g", "G","tS","S","x", "jj", "J", "L", "z"], 19 | "back" : ["a","o","u", "w"], 20 | "anterior" : ["e","i","j"], 21 | "open" : ["a","e","o"], 22 | "close" : ["j","i","u", "w"], 23 | "nasal" : ["m","n", "N"], 24 | "stop" : ["p","b", "B","t","k","g", "G","tS","d", "D"], 25 | "continuant" : ["f", "F","b", "B","tS","d", "D","s", "Z", "T","x", "jj", "J","g", "G","S","L","x", "jj", "J", "z"], 26 | "lateral" :["l"], 27 | "flap" :["r"], 28 | "trill" :["rr"], 29 | "voice" :["a","e","i","o","u", "w","b", "B","d", "D","l","m","n", "N","rr","g", "G","L", "j"], 30 | "strident" :["tS","f", "F","s", "Z", "T", "z", "S"], 31 | "labial" :["m","p","b", "B","f", "F"], 32 | "dental" :["t","d", "D"], 33 | "velar" :["k","g", "G"], 34 | "pause" : ["sil", ""]} 35 | 36 | def get_list_phonological(self): 37 | return self.list_phonological 38 | 39 | def get_list_phonological_keys(self): 40 | keys=self.list_phonological.keys() 41 | return list(keys) 42 | 43 | 44 | def get_d1(self): 45 | keys=self.get_list_phonological_keys() 46 | dict_1={"xmin":[],"xmax":[],"phoneme":[],"phoneme_code":[]} 47 | for k in keys: 48 | dict_1[k]=[] 49 | return dict_1 50 | 51 | def get_d2(self): 52 | keys=self.get_list_phonological_keys() 53 | dict_2={"n_frame":[],"phoneme":[],"phoneme_code":[]} 54 | for k in keys: 55 | dict_2[k]=[] 56 | return dict_2 57 | 58 | def get_list_phonemes(self): 59 | keys=self.get_list_phonological_keys() 60 | phon=[] 61 | for k in keys: 62 | phon.append(self.list_phonological[k]) 63 | phon=np.hstack(phon) 64 | 65 | return np.unique(phon) 66 | 67 | 68 | def main(): 69 | phon=Phonological() 70 | keys=phon.get_list_phonological_keys() 71 | 72 | d1=phon.get_d1() 73 | d2=phon.get_d2() 74 | ph=phon.get_list_phonemes() 75 | 76 | if __name__=="__main__": 77 | main() 78 | 79 | 80 | -------------------------------------------------------------------------------- /phonet/__init__.py: -------------------------------------------------------------------------------- 1 | from phonet.phonet import Phonet 2 | from phonet.Phonological import Phonological -------------------------------------------------------------------------------- /phonet/audios/pataka.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/phonet/audios/pataka.wav -------------------------------------------------------------------------------- /phonet/audios/sentence.wav: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/phonet/audios/sentence.wav -------------------------------------------------------------------------------- /phonet/example.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 28 2019 4 | @author: J. C. Vasquez-Correa 5 | Pattern recognition Lab, University of Erlangen-Nuremberg 6 | Faculty of Engineering, University of Antioquia, 7 | juan.vasquez@fau.de 8 | """ 9 | 10 | from phonet import Phonet 11 | import os 12 | 13 | if __name__=="__main__": 14 | 15 | PATH=os.path.dirname(os.path.abspath(__file__)) 16 | 17 | path_results=os.path.join(PATH, "examples") 18 | 19 | if not os.path.exists(path_results): 20 | os.makedirs(path_results) 21 | ## get the "stop" phonological posterior from a single file 22 | file_audio=PATH+"/audios/pataka.wav" 23 | file_feat=path_results+"/pataka" 24 | phon=Phonet(["stop"]) 25 | phon.get_phon_wav(file_audio, file_feat, True) 26 | 27 | # get the "nasal" phonological posterior from a single file 28 | file_audio=PATH+"/audios/sentence.wav" 29 | file_feat=path_results+"/sentence_nasal" 30 | phon=Phonet(["nasal"]) 31 | phon.get_phon_wav(file_audio, file_feat, True) 32 | 33 | # get the "strident" phonological posterior from a single file 34 | file_feat=path_results+"/sentence_strident" 35 | phon=Phonet(["strident"]) 36 | phon.get_phon_wav(file_audio, file_feat, True) 37 | 38 | # get "strident, nasal, and back" phonological posteriors from a single file 39 | file_feat=path_results+"/sentence_all" 40 | phon=Phonet(["strident", "nasal", "back"]) 41 | phon.get_phon_wav(file_audio, file_feat, True) 42 | 43 | 44 | # compute the posteriorgram for an audio_file for different phonological posteriors 45 | phon=Phonet(["vocalic", "strident", "nasal", "back", "stop", "pause"]) 46 | phon.get_posteriorgram(file_audio) 47 | 48 | 49 | # get phonological posteriors from de audio files included in a directory 50 | directory=PATH+"/phonclasses/" 51 | phon=Phonet(["vocalic", "strident", "nasal", "back", "stop", "pause"]) 52 | phon.get_phon_path(PATH+"/audios/", path_results) 53 | 54 | ## get the PLLR features from an audio file 55 | phon=Phonet(["all"]) 56 | PLLR=phon.get_PLLR(file_audio, plot_flag=True) 57 | print(PLLR.head()) -------------------------------------------------------------------------------- /phonet/models/model.h5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/phonet/models/model.h5 -------------------------------------------------------------------------------- /phonet/models/mu.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/phonet/models/mu.npy -------------------------------------------------------------------------------- /phonet/models/phonemes.hdf5: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/phonet/models/phonemes.hdf5 -------------------------------------------------------------------------------- /phonet/models/std.npy: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/phonet/models/std.npy -------------------------------------------------------------------------------- /phonet/train/Phonological.py: -------------------------------------------------------------------------------- 1 | 2 | """ 3 | Created on Feb 28 2019 4 | @author: J. C. Vasquez-Correa 5 | Pattern recognition Lab, University of Erlangen-Nuremberg 6 | Faculty of Engineering, University of Antioquia, 7 | juan.vasquez@fau.de 8 | """ 9 | 10 | import numpy as np 11 | import pandas as pd 12 | 13 | class Phonological: 14 | 15 | def __init__(self): 16 | 17 | self.list_phonological={"vocalic" : ["a","e","i","o","u", "w", "j"], 18 | "consonantal" : ["b", "B","d", "D","f", "F","k","l","m","n", "N","p","r","rr","s", "Z", "T","t","g", "G","tS","S","x", "jj", "J", "L", "z"], 19 | "back" : ["a","o","u", "w"], 20 | "anterior" : ["e","i","j"], 21 | "open" : ["a","e","o"], 22 | "close" : ["j","i","u", "w"], 23 | "nasal" : ["m","n", "N"], 24 | "stop" : ["p","b", "B","t","k","g", "G","tS","d", "D"], 25 | "continuant" : ["f", "F","b", "B","tS","d", "D","s", "Z", "T","x", "jj", "J","g", "G","S","L","x", "jj", "J", "z"], 26 | "lateral" :["l"], 27 | "flap" :["r"], 28 | "trill" :["rr"], 29 | "voice" :["a","e","i","o","u", "w","b", "B","d", "D","l","m","n", "N","rr","g", "G","L", "j"], 30 | "strident" :["tS","f", "F","s", "Z", "T", "z", "S"], 31 | "labial" :["m","p","b", "B","f", "F"], 32 | "dental" :["t","d", "D"], 33 | "velar" :["k","g", "G"], 34 | "pause" : ["sil", ""]} 35 | 36 | def get_list_phonological(self): 37 | return self.list_phonological 38 | 39 | def get_list_phonological_keys(self): 40 | keys=self.list_phonological.keys() 41 | return list(keys) 42 | 43 | def get_d1(self): 44 | keys=self.get_list_phonological_keys() 45 | dict_1={"xmin":[],"xmax":[],"phoneme":[],"phoneme_code":[]} 46 | for k in keys: 47 | dict_1[k]=[] 48 | return dict_1 49 | 50 | def get_d2(self): 51 | keys=self.get_list_phonological_keys() 52 | dict_2={"n_frame":[],"phoneme":[],"phoneme_code":[]} 53 | for k in keys: 54 | dict_2[k]=[] 55 | return dict_2 56 | 57 | def get_list_phonemes(self): 58 | keys=self.get_list_phonological_keys() 59 | phon=[] 60 | for k in keys: 61 | phon.append(self.list_phonological[k]) 62 | phon=np.hstack(phon) 63 | 64 | return np.unique(phon) 65 | 66 | 67 | def main(): 68 | phon=Phonological() 69 | keys=phon.get_list_phonological_keys() 70 | print(keys) 71 | d1=phon.get_d1() 72 | print(d1) 73 | d2=phon.get_d2() 74 | print(d2) 75 | ph=phon.get_list_phonemes() 76 | print(ph) 77 | 78 | if __name__=="__main__": 79 | main() 80 | 81 | 82 | -------------------------------------------------------------------------------- /phonet/train/README.md: -------------------------------------------------------------------------------- 1 | #Training phonet 2 | 3 | To train phonet with a different dataset in a different languages, follow the next steps. 4 | 5 | 1. Install dependencies 6 | 7 | ```pip install -r requirements.txt``` 8 | 9 | 2. Feature extraction 10 | 11 | Extract the Mel-filterbank energies from the training and test sets, using the script ```extract_feat.py``` as follows 12 | 13 | ```python extract_feat.py ``` 14 | 15 | Example with the CIEMPIESS database 16 | 17 | ```python extract_feat.py ../train_data/audio_same/ ../features/train/``` 18 | 19 | ```python extract_feat.py ../test_data/audio_same/ ../features/test/``` 20 | 21 | 22 | 3. Transform the phonological classes you want to train. Edit the file ```Phonological.py``` according to your classes. 23 | 24 | Edit variable ```list_phonological``` according to the phonological classes to train 25 | 26 | Phonemes should be written in XAMPA symbols 27 | 28 | 29 | 4. Read and process the textgrid labels ```read_textgrids.py``` as follows 30 | 31 | The ```textgrid``` files should be ```PRAAT``` labeled files with Xampa symbols. 32 | 33 | You can obtain them using the [WebMAUS forced alignemt tool](https://clarin.phonetik.uni-muenchen.de/BASWebServices/interface/WebMAUSBasic) 34 | 35 | Once you have the textgrid, you can use the following script. 36 | 37 | ```python read_textgrids.py ``` 38 | 39 | 40 | Example with the CIEMPIESS database 41 | 42 | ```python read_textgrids.py ../train_data/textgrid/ ../labels/train/``` 43 | 44 | ```python read_textgrids.py ../test_data/textgrid/ ../labels/test/``` 45 | 46 | 5. Get the feature matrices for train and validation to train phonet with ```get_matrices_labels.py``` as follows 47 | 48 | ```python get_matrices_labels.py ``` 49 | 50 | From the Example 51 | 52 | ```python get_matrices_labels.py ../features/train/ ../labels/train/ ../seq_train/``` 53 | 54 | ```python get_matrices_labels.py ../features/test/ ../labels/test/ ../seq_test/``` 55 | 56 | 57 | 58 | 6. Train the model using a multi-task learning strategy. Instead of the bank of parallel RNNs, a single neural netowrk is trained. 59 | The results a re similar to the obtained in the original paper. However, this version converge faster. 60 | 61 | ```python main_train_RNN_MT.py ``` 62 | 63 | Example 64 | 65 | ```python main_train_RNN_MT.py ../seq_train/ ../seq_test/ ../results/MT_test/``` 66 | 67 | 68 | 7. Additionally, you can train the model for phoneme recognition, using ```main_train_RNN_phoneme.py``` 69 | 70 | ```python main_train_RNN_phoneme.py ``` 71 | 72 | 73 | -------------------------------------------------------------------------------- /phonet/train/extract_feat.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | import os 4 | from scipy.io.wavfile import read 5 | import numpy as np 6 | import pysptk.sptk as sptk 7 | from six.moves import cPickle as pickle 8 | 9 | import python_speech_features as pyfeat 10 | from tqdm import tqdm 11 | 12 | def extract_feat(signal,fs): 13 | size_frame=0.025 14 | time_shift=0.010 15 | order=13 16 | nfilt=33 17 | signal=signal-np.mean(signal) 18 | signal=signal/np.max(np.abs(signal)) 19 | Fbank, energy=pyfeat.fbank(signal,samplerate=fs,winlen=size_frame,winstep=time_shift, 20 | nfilt=nfilt,nfft=512,lowfreq=0,highfreq=None,preemph=0.97) 21 | energy= np.expand_dims(energy, axis=1) 22 | feat2=np.concatenate((Fbank,energy),axis=1) 23 | feat2=np.log10(feat2) 24 | 25 | return feat2 26 | 27 | 28 | if __name__=="__main__": 29 | if len(sys.argv)!=3: 30 | print("python extract_feat.py ") 31 | sys.exit() 32 | 33 | path_audios=sys.argv[1] 34 | path_features=sys.argv[2] 35 | 36 | hf=os.listdir(path_audios) 37 | hf.sort() 38 | pbar=tqdm(range(len(hf))) 39 | for j in pbar: 40 | pbar.set_description("Processing %s" % hf[j]) 41 | fs,data=read(path_audios+hf[j]) 42 | feat=extract_feat(data,fs) 43 | file_results=path_features+hf[j].replace(".wav", ".pickle") 44 | try: 45 | f = open(file_results, 'wb') 46 | pickle.dump(feat, f, pickle.HIGHEST_PROTOCOL) 47 | f.close() 48 | except Exception as e: 49 | print('Unable to save data to', file_results, ':', e) 50 | 51 | -------------------------------------------------------------------------------- /phonet/train/get_matrices_labels.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | import os 4 | from six.moves import cPickle as pickle 5 | import numpy as np 6 | PATH=os.path.dirname(os.path.abspath(__file__)) 7 | sys.path.append(PATH) 8 | from Phonological import Phonological 9 | from tqdm import tqdm 10 | 11 | if __name__=="__main__": 12 | if len(sys.argv)!=4: 13 | print("python get_matrices_labels.py ") 14 | sys.exit() 15 | 16 | Phon=Phonological() 17 | path_feat=sys.argv[1] 18 | path_lab=sys.argv[2] 19 | path_seq=sys.argv[3] 20 | if not os.path.exists(path_seq): 21 | os.makedirs(path_seq) 22 | len_seq=40 23 | 24 | hf=os.listdir(path_lab) 25 | hf.sort() 26 | 27 | hf2=os.listdir(path_feat) 28 | hf2.sort() 29 | 30 | Feat=[] 31 | warnings=0 32 | pbar=tqdm(range(len(hf))) 33 | for j in pbar: 34 | pbar.set_description("Processing %s" % hf[j]) 35 | pickle_file1=path_feat+hf[j] 36 | if not (hf[j] in hf2): 37 | print("warning, file labels not found in audio", hf[j]) 38 | warnings=warnings+1 39 | continue 40 | pickle_file2=path_lab+hf[j] 41 | with open(pickle_file1, 'rb') as f: 42 | feat = pickle.load(f) 43 | f.close() 44 | with open(pickle_file2, 'rb') as f: 45 | dict1, dict2 = pickle.load(f) 46 | f.close() 47 | 48 | nf=int(feat.shape[0]/len_seq) 49 | start=0 50 | fin=len_seq 51 | for r in range(nf): 52 | Lab=Phon.get_d2() 53 | 54 | featmat_t=feat[start:fin,:] 55 | keyslab=Lab.keys() 56 | for k in keyslab: 57 | Lab[k]=dict2[k][start:fin] 58 | start=start+len_seq 59 | fin=fin+len_seq 60 | 61 | list_phonokeys=list(Phon.get_list_phonological()) 62 | list_phonokeys.append("phoneme_code") 63 | 64 | for k in list_phonokeys: 65 | Lab[k]=np.stack(Lab[k], axis=0) 66 | Lab[k]=np.expand_dims(Lab[k], axis=2) 67 | 68 | 69 | save={'features': featmat_t, 'labels':Lab} 70 | file_lab=path_seq+hf[j].replace('.pickle', '')+'_'+str(r)+'.pickle' 71 | 72 | try: 73 | f = open(file_lab, 'wb') 74 | pickle.dump(save, f, pickle.HIGHEST_PROTOCOL) 75 | f.close() 76 | except Exception as e: 77 | print('Unable to save data to', file_lab, ':', e) -------------------------------------------------------------------------------- /phonet/train/get_scaler.py: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/jcvasquezc/phonet/49b7416c80393420f329ac1edc180c6cd3189a18/phonet/train/get_scaler.py -------------------------------------------------------------------------------- /phonet/train/main_train_RNN_MT.py: -------------------------------------------------------------------------------- 1 | import sys 2 | from six.moves import cPickle as pickle 3 | 4 | from keras.layers import Input, BatchNormalization, Bidirectional, GRU, Permute, Reshape, Lambda, Dense, RepeatVector, multiply, TimeDistributed, Dropout, LSTM 5 | from keras.utils import np_utils 6 | from keras.models import Model 7 | from keras import optimizers 8 | import keras.backend as K 9 | from keras.callbacks import EarlyStopping, ModelCheckpoint, tensorboard_v1 10 | 11 | import numpy as np 12 | import os 13 | from sklearn.metrics import classification_report, recall_score, precision_score, f1_score 14 | #import pandas as pd 15 | 16 | import matplotlib 17 | matplotlib.use('agg') 18 | import matplotlib.pyplot as plt 19 | from utils import plot_confusion_matrix 20 | 21 | from sklearn.utils import class_weight 22 | 23 | from utils import confusion_matrix, get_scaler, test_labels 24 | 25 | from Phonological import Phonological 26 | 27 | Phon=Phonological() 28 | 29 | 30 | 31 | 32 | def generate_data(directory, batch_size, mu, std): 33 | i = 0 34 | file_list = os.listdir(directory) 35 | file_list.sort() 36 | keys=Phon.get_list_phonological_keys() 37 | while True: 38 | seq_batch = [] 39 | 40 | y={} 41 | for k in keys: 42 | y[k]=[] 43 | 44 | class_weights=[] 45 | weights=[] 46 | y2=[] 47 | for b in range(batch_size): 48 | if i == len(file_list): 49 | i = 0 50 | np.random.shuffle(file_list) 51 | with open(directory+file_list[i], 'rb') as f: 52 | save = pickle.load(f) 53 | f.close() 54 | seq_batch.append((save['features']-mu)/std) 55 | 56 | for problem in keys: 57 | y[problem].append(save['labels'][problem]) 58 | i += 1 59 | 60 | for problem in keys: 61 | ystack=np.stack(y[problem], axis=0) 62 | 63 | ystack2=np.concatenate(ystack, axis=0) 64 | ystack2=np.hstack(ystack2) 65 | lab, count=np.unique(ystack2, return_counts=True) 66 | class_weights.append(class_weight.compute_class_weight('balanced', np.unique(y[problem]), ystack2)) 67 | weights_t=np.zeros((ystack.shape)) 68 | for j in range(len(lab)): 69 | p=np.where(ystack==lab[j]) 70 | weights_t[p]=class_weights[-1][j] 71 | weights.append(weights_t[:,:,0]) 72 | if len(lab)>1: 73 | y2.append(np_utils.to_categorical(ystack)) 74 | else: 75 | da=np.zeros((batch_size,ystack.shape[1], 2)) 76 | da[:,:,0]=1 77 | y2.append(da) 78 | 79 | seq_batch=np.stack(seq_batch, axis=0) 80 | yield seq_batch, y2, weights 81 | 82 | 83 | def generate_data_test(directory, batch_size, mu, std): 84 | i = 0 85 | file_list = os.listdir(directory) 86 | file_list.sort() 87 | while True: 88 | seq_batch = [] 89 | for b in range(batch_size): 90 | with open(directory+file_list[i], 'rb') as f: 91 | save = pickle.load(f) 92 | f.close() 93 | seq_batch.append((save['features']-mu)/std) 94 | i+=1 95 | seq_batch=np.stack(seq_batch, axis=0) 96 | yield seq_batch 97 | 98 | 99 | 100 | 101 | def get_test_labels(directory, batch_size): 102 | i = 0 103 | file_list = os.listdir(directory) 104 | file_list.sort() 105 | keys=Phon.get_list_phonological_keys() 106 | 107 | y={} 108 | for k in keys: 109 | y[k]=[] 110 | 111 | for i in range(len(file_list)): 112 | with open(directory+file_list[i], 'rb') as f: 113 | save = pickle.load(f) 114 | f.close() 115 | for problem in keys: 116 | y[problem].append(save['labels'][problem]) 117 | 118 | for problem in keys: 119 | y[problem]=np.stack(y[problem], axis=0) 120 | return y 121 | 122 | 123 | 124 | def DeepArch(input_size, GRU_size, hidden_size, num_labels, names, Learning_rate, recurrent_droput_prob): 125 | input_data=Input(shape=(input_size)) 126 | x=input_data 127 | x=BatchNormalization()(x) 128 | x=Bidirectional(GRU(GRU_size, recurrent_dropout=recurrent_droput_prob, return_sequences=True))(x) 129 | x=Bidirectional(GRU(GRU_size, recurrent_dropout=recurrent_droput_prob, return_sequences=True))(x) 130 | x=Dropout(0.2)(x) 131 | x = TimeDistributed(Dense(hidden_size, activation='relu'))(x) 132 | x=Dropout(0.2)(x) 133 | # multi-task 134 | xout=[] 135 | out=[] 136 | for j in range(len(names)): 137 | xout.append(TimeDistributed(Dense(hidden_size, activation='relu'))(x)) 138 | out.append(TimeDistributed(Dense(num_labels[j], activation='softmax'), name=names[j])(xout[-1])) 139 | 140 | modelGRU=Model(inputs=input_data, outputs=out) 141 | opt=optimizers.Adam(lr=Learning_rate) 142 | alphas=list(np.ones(len(names))/len(names)) 143 | modelGRU.compile(optimizer=opt, loss='categorical_crossentropy', metrics=['categorical_accuracy'], sample_weight_mode="temporal", loss_weights=alphas) 144 | return modelGRU 145 | 146 | 147 | 148 | 149 | if __name__=="__main__": 150 | 151 | if len(sys.argv)!=4: 152 | print("python main_train_RNN_MT.py ") 153 | sys.exit() 154 | 155 | file_feat_train=sys.argv[1] 156 | file_feat_test=sys.argv[2] 157 | file_results=sys.argv[3] 158 | 159 | Nfiles_train=len(os.listdir(file_feat_train)) 160 | Nfiles_test=len(os.listdir(file_feat_test)) 161 | 162 | if not os.path.exists(file_results): 163 | os.makedirs(file_results) 164 | 165 | 166 | checkpointer = ModelCheckpoint(filepath=file_results+'weights.hdf5', verbose=1, save_best_only=True) 167 | 168 | #perc=test_labels(file_feat_test) 169 | #print("perc_classes=", perc) 170 | 171 | 172 | 173 | if os.path.exists(file_results+'mu.npy'): 174 | 175 | mu=np.load(file_results+"mu.npy") 176 | std=np.load(file_results+"std.npy") 177 | else: 178 | mu, std=get_scaler(file_feat_train) 179 | 180 | np.save(file_results+"mu.npy", mu) 181 | np.save(file_results+"std.npy", std) 182 | 183 | input_size=(40,34) 184 | GRU_size=128 185 | hidden=128 186 | keys=Phon.get_list_phonological_keys() 187 | num_labels=[2 for j in range(len(keys))] 188 | Learning_rate=0.0001 189 | recurrent_droput_prob=0.0 190 | epochs=1000 191 | batch_size=64 192 | 193 | 194 | modelPH=DeepArch(input_size, GRU_size, hidden, num_labels, keys, Learning_rate, recurrent_droput_prob) 195 | print(modelPH.summary()) 196 | 197 | steps_per_epoch=int(Nfiles_train/batch_size)# 198 | validation_steps=int(Nfiles_test/batch_size) 199 | 200 | if os.path.exists(file_results+'weights.hdf5'): 201 | modelPH.load_weights(file_results+'weights.hdf5') 202 | 203 | earlystopper = EarlyStopping(monitor='val_loss', patience=15, verbose=0) 204 | history=modelPH.fit_generator(generate_data(file_feat_train, batch_size, mu, std), steps_per_epoch=steps_per_epoch, workers=4, use_multiprocessing=True, 205 | epochs=epochs, shuffle=True, validation_data=generate_data(file_feat_test, batch_size, mu, std), 206 | verbose=1, callbacks=[earlystopper, checkpointer], validation_steps=validation_steps) 207 | 208 | plt.figure() 209 | plt.plot(np.log(history.history['loss'])) 210 | plt.plot(np.log(history.history['val_loss'])) 211 | plt.xlabel("epochs") 212 | plt.ylabel("log-Loss") 213 | plt.savefig(file_results+'Loss.png') 214 | #plt.show() 215 | plt.close('all') 216 | 217 | 218 | model_json = modelPH.to_json() 219 | with open(file_results+"model.json", "w") as json_file: 220 | json_file.write(model_json) 221 | try: 222 | modelPH.save_weights(file_results+'model.h5') 223 | except: 224 | print('------------------------------------------------------------------------------------------------------------') 225 | print('┌───────────────────────────────────────────────────────────────────────────────────────────────────────────┐') 226 | print('| |') 227 | print('| FILE '+file_results+'.h5'+' |') 228 | print('| could not be saved |') 229 | print('| |') 230 | print('└────────────────────────────────────────────────────────────────────────────────────────────────────────────┘') 231 | print('------------------------------------------------------------------------------------------------------------') 232 | 233 | 234 | 235 | np.set_printoptions(precision=4) 236 | batch_size_val=1 237 | validation_steps=int(Nfiles_test/batch_size_val) 238 | 239 | ypred=modelPH.predict_generator(generate_data_test(file_feat_test, batch_size_val, mu, std), steps=validation_steps) 240 | 241 | yt=get_test_labels(file_feat_test, batch_size) 242 | 243 | F=open(file_results+"params.csv", "w") 244 | header="class, acc_train, acc_dev, loss, val_loss, epochs_run, Fscore, precision, recall\n" 245 | F.write(header) 246 | 247 | for e, problem in enumerate(keys): 248 | 249 | ypredv=np.argmax(ypred[e], axis=2) 250 | ypredv=np.concatenate(ypredv, axis=0) 251 | ytv=np.concatenate(yt[problem],0) 252 | 253 | print(ytv.shape, ypredv.shape) 254 | dfclass=classification_report(ytv, ypredv,digits=4) 255 | 256 | 257 | print(dfclass) 258 | 259 | class_names=["not "+problem, problem] 260 | 261 | 262 | ax2=plot_confusion_matrix(ytv, ypredv, file_res=file_results+problem+"cm.png", classes=class_names, normalize=True, 263 | title='Normalized confusion matrix') 264 | 265 | prec=precision_score(ytv, ypredv, average='weighted') 266 | rec=recall_score(ytv, ypredv, average='weighted') 267 | f1=f1_score(ytv, ypredv, average='weighted') 268 | 269 | content=problem+", "+str(history.history[problem+"_categorical_accuracy"][-1])+", "+str(history.history["val_"+problem+"_categorical_accuracy"][-1])+", " 270 | content+=str(history.history[problem+'_loss'][-1])+", "+str(history.history['val_'+problem+'_loss'][-1])+", " 271 | content+=str(len(history.history["loss"]))+", "+str(f1)+", "+str(prec)+", "+str(rec)+'\n' 272 | 273 | 274 | F.write(content) 275 | F.close() 276 | 277 | 278 | -------------------------------------------------------------------------------- /phonet/train/main_train_RNN_phoneme.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | from six.moves import cPickle as pickle 4 | 5 | from keras.layers import Input, BatchNormalization, Bidirectional, GRU, Permute, Reshape, Lambda, Dense, RepeatVector, multiply, TimeDistributed, Dropout, LSTM 6 | from keras.utils import np_utils 7 | from keras.models import Model 8 | from keras import optimizers 9 | import keras.backend as K 10 | from keras.callbacks import EarlyStopping, ModelCheckpoint 11 | 12 | import numpy as np 13 | import os 14 | from utils import plot_confusion_matrix 15 | from sklearn.metrics import classification_report, recall_score, precision_score, f1_score 16 | #import pandas as pd 17 | 18 | import matplotlib 19 | matplotlib.use('agg') 20 | import matplotlib.pyplot as plt 21 | from sklearn.utils import class_weight 22 | 23 | from utils import confusion_matrix, get_scaler 24 | from Phonological import Phonological 25 | 26 | Phon=Phonological() 27 | 28 | 29 | def generate_data(directory, batch_size, problem, mu, std, num_labels): 30 | i = 0 31 | file_list = os.listdir(directory) 32 | file_list.sort() 33 | while True: 34 | seq_batch = [] 35 | y=[] 36 | for b in range(batch_size): 37 | if i == len(file_list): 38 | i = 0 39 | np.random.shuffle(file_list) 40 | with open(directory+file_list[i], 'rb') as f: 41 | save = pickle.load(f) 42 | f.close() 43 | seq_batch.append((save['features']-mu)/std) 44 | y.append(save['labels'][problem]) 45 | i += 1 46 | y=np.stack(y, axis=0) 47 | 48 | y2=np_utils.to_categorical(y) 49 | ystack=np.concatenate(y, axis=0) 50 | ystack=np.hstack(ystack) 51 | lab, count=np.unique(ystack, return_counts=True) 52 | class_weights=class_weight.compute_class_weight('balanced', np.unique(y), ystack[0,:]) 53 | weights=np.zeros((y.shape)) 54 | 55 | for j in range(len(lab)): 56 | p=np.where(y==lab[j]) 57 | weights[p]=class_weights[j] 58 | 59 | #print(num_labels, len(lab), y2[:,:,0,:].shape, np.unique(y)) 60 | # if len(lab) ") 126 | sys.exit() 127 | 128 | file_feat_train=sys.argv[1] 129 | file_feat_test=sys.argv[2] 130 | file_results=sys.argv[3] 131 | problem="phoneme_code" 132 | 133 | 134 | Nfiles_train=len(os.listdir(file_feat_train)) 135 | Nfiles_test=len(os.listdir(file_feat_test)) 136 | 137 | 138 | if not os.path.exists(file_results): 139 | os.makedirs(file_results) 140 | 141 | 142 | 143 | checkpointer = ModelCheckpoint(filepath=file_results+'phonemes_weights.hdf5', verbose=1, save_best_only=True) 144 | 145 | #perc=test_labels(file_feat_test) 146 | #print("perc_classes=", perc) 147 | if os.path.exists(file_results+"mu.npy"): 148 | 149 | mu=np.load(file_results+"mu.npy") 150 | std=np.load(file_results+"std.npy") 151 | else: 152 | mu, std=get_scaler(file_feat_train) 153 | 154 | np.save(file_results+"mu.npy", mu) 155 | np.save(file_results+"std.npy", std) 156 | 157 | 158 | phonemes=Phon.get_list_phonemes() 159 | input_size=(40,34) 160 | GRU_size=128 161 | hidden=128 162 | num_labels=len(phonemes) 163 | Learning_rate=0.0005 164 | recurrent_droput_prob=0.0 165 | epochs=1000 166 | batch_size=64 167 | 168 | modelPH=DeepArch(input_size, GRU_size, hidden, num_labels, Learning_rate, recurrent_droput_prob) 169 | print(modelPH.summary()) 170 | 171 | steps_per_epoch=int(Nfiles_train/batch_size) 172 | validation_steps=int(Nfiles_test/batch_size) 173 | 174 | 175 | if os.path.exists(file_results+'phonemes_weights.hdf5'): 176 | modelPH.load_weights(file_results+'phonemes_weights.hdf5') 177 | 178 | earlystopper = EarlyStopping(monitor='val_loss', patience=3, verbose=0) 179 | history=modelPH.fit_generator(generate_data(file_feat_train, batch_size, problem, mu, std, num_labels), steps_per_epoch=steps_per_epoch, #workers=4, use_multiprocessing=False, 180 | epochs=epochs, shuffle=True, validation_data=generate_data(file_feat_test, batch_size, problem, mu, std, num_labels), 181 | verbose=1, callbacks=[earlystopper, checkpointer], validation_steps=validation_steps) 182 | 183 | plt.figure() 184 | plt.plot(np.log(history.history['loss'])) 185 | plt.plot(np.log(history.history['val_loss'])) 186 | plt.xlabel("epochs") 187 | plt.ylabel("log-Loss") 188 | plt.savefig(file_results+'Loss.png') 189 | #plt.show() 190 | plt.close('all') 191 | 192 | 193 | model_json = modelPH.to_json() 194 | with open(file_results+problem+".json", "w") as json_file: 195 | json_file.write(model_json) 196 | try: 197 | modelPH.save_weights(file_results+problem+'.h5') 198 | except: 199 | print('------------------------------------------------------------------------------------------------------------') 200 | print('┌───────────────────────────────────────────────────────────────────────────────────────────────────────────┐') 201 | print('| |') 202 | print('| FILE '+file_results+problem+'.h5'+' |') 203 | print('| could not be saved |') 204 | print('| |') 205 | print('└────────────────────────────────────────────────────────────────────────────────────────────────────────────┘') 206 | print('------------------------------------------------------------------------------------------------------------') 207 | 208 | np.set_printoptions(precision=4) 209 | batch_size_val=1 210 | validation_steps=int(Nfiles_test/batch_size_val) 211 | 212 | ypred=modelPH.predict_generator(generate_data_test(file_feat_test, batch_size_val, mu, std), steps=validation_steps) 213 | 214 | ypredv=np.argmax(ypred, axis=2) 215 | ypredv=np.concatenate(ypredv, axis=0) 216 | 217 | yt=get_test_labels(file_feat_test, batch_size, problem) 218 | ytv=np.concatenate(yt,0) 219 | print(ytv.shape, ypredv.shape, ypred.shape) 220 | dfclass=classification_report(ytv, ypredv,digits=4) 221 | 222 | 223 | print(dfclass) 224 | 225 | class_names=["not "+problem, problem] 226 | 227 | # Plot non-normalized confusion matrix 228 | #ax1=plot_confusion_matrix(yt, ypredv, classes=class_names, 229 | # title='Confusion matrix, without normalization') 230 | 231 | # Plot normalized confusion matrix 232 | ax2=plot_confusion_matrix(ytv, ypredv, file_res=file_results+"/cm.png", classes=class_names, normalize=True, 233 | title='Normalized confusion matrix') 234 | 235 | prec=precision_score(ytv, ypredv, average='weighted') 236 | rec=recall_score(ytv, ypredv, average='weighted') 237 | f1=f1_score(ytv, ypredv, average='weighted') 238 | 239 | F=open(file_results+"params.csv", "w") 240 | header="acc_train, acc_dev, loss, val_loss, epochs_run, Fscore, precision, recall\n" 241 | content=str(history.history["categorical_accuracy"][-1])+", "+str(history.history["val_categorical_accuracy"][-1])+", " 242 | content+=str(history.history["loss"][-1])+", "+str(history.history["val_loss"][-1])+", " 243 | content+=str(len(history.history["loss"]))+", "+str(f1)+", "+str(prec)+", "+str(rec) 244 | 245 | F.write(header) 246 | F.write(content) 247 | F.close() 248 | 249 | 250 | -------------------------------------------------------------------------------- /phonet/train/read_textgrids.py: -------------------------------------------------------------------------------- 1 | 2 | import sys 3 | import os 4 | from scipy.io.wavfile import read 5 | import numpy as np 6 | import pysptk.sptk as sptk 7 | from six.moves import cPickle as pickle 8 | import pandas as pd 9 | PATH=os.path.dirname(os.path.abspath(__file__)) 10 | sys.path.append(PATH) 11 | from Phonological import Phonological 12 | from tqdm import tqdm 13 | 14 | 15 | def phoneme2list_phonological(phoneme): 16 | Phon=Phonological() 17 | list_phonological=Phon.get_list_phonological() 18 | list_phon_values=np.zeros(len(list_phonological)) 19 | keys=list(list_phonological.keys()) 20 | for j in range(len(list_phonological.keys())): 21 | if phoneme in list_phonological[keys[j]]: 22 | list_phon_values[j]=1 23 | return list_phon_values,list_phonological 24 | 25 | 26 | def phoneme2number(phoneme): 27 | Phon=Phonological() 28 | list_phonemes=Phon.get_list_phonemes() 29 | number=np.where(list_phonemes==phoneme)[0] 30 | if len(number)>0: 31 | return number 32 | else: 33 | print("phoneme:*"+ phoneme+"*is not in the list") 34 | sys.exit() 35 | return np.nan 36 | 37 | 38 | def read_textgrid(list_textgrid): 39 | time_shift=0.01 40 | pos_start_phonemes=list_textgrid.index("item[3]:") 41 | n_phonemes=int(list_textgrid[pos_start_phonemes+5].replace("intervals:size=","")) 42 | list_phonemes=[list_textgrid[j] for j in range(pos_start_phonemes+6,len(list_textgrid))] 43 | Phon=Phonological() 44 | dict_1=Phon.get_d1() 45 | dict_2=Phon.get_d2() 46 | 47 | for j in range(1,n_phonemes+1): 48 | pos_phoneme=list_phonemes.index("intervals["+str(j)+"]:") 49 | xmin_line=list_phonemes[pos_phoneme+1] 50 | dict_1["xmin"].append(float(xmin_line.replace("xmin=",""))) 51 | xmax_line=list_phonemes[pos_phoneme+2] 52 | dict_1["xmax"].append(float(xmax_line.replace("xmax=",""))) 53 | phoneme_line=list_phonemes[pos_phoneme+3] 54 | phoneme_=phoneme_line.replace("text=","") 55 | phoneme=phoneme_.replace('"','') 56 | 57 | dict_1["phoneme"].append(phoneme) 58 | dict_1["phoneme_code"].append(phoneme2number(phoneme)) 59 | list_phonological,list_keys=phoneme2list_phonological(phoneme) 60 | 61 | keys=list(list_keys.keys()) 62 | for k in range(len(keys)): 63 | dict_1[keys[k]].append(list_phonological[k]) 64 | 65 | start=0. 66 | n=0 67 | for j in range(len(dict_1["xmin"])): 68 | while start ") 81 | sys.exit() 82 | 83 | path_textgrid=sys.argv[1] 84 | path_labels=sys.argv[2] 85 | 86 | hf=os.listdir(path_textgrid) 87 | hf.sort() 88 | pbar=tqdm(range(len(hf))) 89 | for j in pbar: 90 | pbar.set_description("Processing %s" % hf[j]) 91 | f=open(path_textgrid+hf[j], "r") 92 | data=f.readlines() 93 | f.close() 94 | data2=[] 95 | for k in range(len(data)): 96 | datat=data[k].replace(" ","") 97 | data2.append(datat.replace("\n","")) 98 | list_phon, list_labels=read_textgrid(data2) 99 | #sys.exit() 100 | file_results=path_labels+hf[j].replace(".TextGrid", ".pickle") 101 | try: 102 | f = open(file_results, 'wb') 103 | pickle.dump((list_phon, list_labels), f, pickle.HIGHEST_PROTOCOL) 104 | f.close() 105 | except Exception as e: 106 | print('Unable to save data to', file_results, ':', e) 107 | -------------------------------------------------------------------------------- /phonet/train/requirements.txt: -------------------------------------------------------------------------------- 1 | pandas==0.23.4 2 | scipy==1.1.0 3 | Keras==2.2.4 4 | matplotlib==3.0.2 5 | six==1.11.0 6 | numpy==1.15.4 7 | GPyOpt==1.2.5 8 | pysptk==0.1.17 9 | python_speech_features==0.6 10 | scikit_learn==0.21.3 11 | -------------------------------------------------------------------------------- /phonet/train/utils.py: -------------------------------------------------------------------------------- 1 | 2 | import matplotlib.pyplot as plt 3 | import numpy as np 4 | from sklearn.metrics import confusion_matrix 5 | import os 6 | from six.moves import cPickle as pickle 7 | from tqdm import tqdm 8 | from Phonological import Phonological 9 | 10 | Phon=Phonological() 11 | 12 | def test_labels(directory): 13 | 14 | file_list = os.listdir(directory) 15 | file_list.sort() 16 | with open(directory+file_list[0], 'rb') as f: 17 | save = pickle.load(f) 18 | f.close() 19 | seq=save['labels'] 20 | keys=Phon.get_list_phonological_keys() 21 | 22 | pbar=tqdm(range(len(file_list))) 23 | percall=np.zeros(len(keys)) 24 | percall2=np.zeros(len(keys)) 25 | 26 | for j in pbar: 27 | pbar.set_description("Processing %s" % file_list[j]) 28 | 29 | with open(directory+file_list[j], 'rb') as f: 30 | save = pickle.load(f) 31 | f.close() 32 | seq=save['labels'] 33 | perc1=np.zeros(len(keys)) 34 | for e, k in enumerate(keys): 35 | perc1[e]=np.mean(seq[k]) 36 | percall+=perc1 37 | percall2+=perc1 38 | 39 | percall=percall/len(file_list) 40 | 41 | return percall 42 | 43 | 44 | def get_scaler(directory): 45 | i = 0 46 | file_list = os.listdir(directory) 47 | file_list.sort() 48 | seq_sum=np.zeros((40,34)) 49 | seq_std=np.zeros((40,34)) 50 | pbar=tqdm(range(len(file_list))) 51 | nans=0 52 | infs=0 53 | for j in pbar: 54 | pbar.set_description("Processing %s" % file_list[j]) 55 | with open(directory+file_list[j], 'rb') as f: 56 | save = pickle.load(f) 57 | f.close() 58 | seq=save['features'] 59 | seq_sum+=seq 60 | if np.sum(np.isnan(seq))>0: 61 | nans+=1 62 | if np.sum(np.isinf(seq))>0: 63 | infs+=1 64 | N=len(file_list) 65 | mu=seq_sum/N 66 | 67 | print("--------------------------") 68 | print("NAN", nans) 69 | print("INF", infs) 70 | pbar2=tqdm(range(len(file_list))) 71 | for j in pbar2: 72 | pbar2.set_description("Processing %s" % file_list[j]) 73 | with open(directory+file_list[j], 'rb') as f: 74 | save = pickle.load(f) 75 | f.close() 76 | seq=save['features'] 77 | seq_std+=(seq-mu)**2 78 | 79 | std=seq_std/len(file_list) 80 | 81 | find0=np.where(std==0)[0] 82 | print("std0", len(find0)) 83 | 84 | return mu, std 85 | 86 | def plot_confusion_matrix(y_true, y_pred, classes, file_res, 87 | normalize=False, 88 | title=None, 89 | cmap=plt.cm.Blues): 90 | """ 91 | This function prints and plots the confusion matrix. 92 | Normalization can be applied by setting `normalize=True`. 93 | """ 94 | if not title: 95 | if normalize: 96 | title = 'Normalized confusion matrix' 97 | else: 98 | title = 'Confusion matrix, without normalization' 99 | 100 | # Compute confusion matrix 101 | cm = confusion_matrix(y_true, y_pred) 102 | # Only use the labels that appear in the data 103 | #print(unique_labels(y_true, y_pred)) 104 | #classes = classes[unique_labels(y_true, y_pred)] 105 | if normalize: 106 | cm = cm.astype('float') / cm.sum(axis=1)[:, np.newaxis] 107 | print("Normalized confusion matrix") 108 | else: 109 | print('Confusion matrix, without normalization') 110 | np.set_printoptions() 111 | print(cm) 112 | 113 | fig, ax = plt.subplots() 114 | im = ax.imshow(cm, interpolation='nearest', cmap=cmap) 115 | ax.figure.colorbar(im, ax=ax) 116 | # We want to show all ticks... 117 | ax.set(xticks=np.arange(cm.shape[1]), 118 | yticks=np.arange(cm.shape[0]), 119 | # ... and label them with the respective list entries 120 | xticklabels=classes, yticklabels=classes, 121 | title=title, 122 | ylabel='True phoneme', 123 | xlabel='Predicted phoneme') 124 | 125 | # Rotate the tick labels and set their alignment. 126 | plt.setp(ax.get_xticklabels(), rotation=45, ha="right", 127 | rotation_mode="anchor") 128 | 129 | # Loop over data dimensions and create text annotations. 130 | fmt = '.0f' if normalize else 'd' 131 | thresh = cm.max() / 2. 132 | for i in range(cm.shape[0]): 133 | for j in range(cm.shape[1]): 134 | ax.text(j, i, format(100*cm[i, j], fmt), 135 | ha="center", va="center", 136 | color="white" if cm[i, j] > thresh else "black") 137 | plt.tight_layout() 138 | plt.savefig(file_res) 139 | return ax 140 | -------------------------------------------------------------------------------- /readthedocs.yml: -------------------------------------------------------------------------------- 1 | conda: 2 | file: docs/environment.yml 3 | 4 | python: 5 | version: 3.7 6 | pip_install: true -------------------------------------------------------------------------------- /requirements.txt: -------------------------------------------------------------------------------- 1 | --index-url https://pypi.python.org/simple/ 2 | 3 | -e . 4 | tqdm==4.23.4 5 | pysptk==0.1.7 6 | setuptools==42.0.2 7 | python_speech_features==0.6 8 | numpy==1.14.0 9 | six==1.11.0 10 | scipy==1.0.0 11 | Keras==2.3.1 12 | pandas==0.22.0 13 | matplotlib==2.2.2 14 | scikit_learn==0.22.1 15 | pip==18.1 16 | -------------------------------------------------------------------------------- /setup.cfg: -------------------------------------------------------------------------------- 1 | # Inside of setup.cfg 2 | [metadata] 3 | description-file = README.rst -------------------------------------------------------------------------------- /setup.py: -------------------------------------------------------------------------------- 1 | try: 2 | from setuptools import setup #enables develop 3 | except ImportError: 4 | from distutils.core import setup 5 | 6 | install_requires = [ 7 | 'tensorflow', 8 | 'pandas', 9 | 'pysptk', 10 | 'six', 11 | 'matplotlib', 12 | 'python_speech_features', 13 | 'tqdm', 14 | 'scikit_learn', 15 | 'setuptools', 16 | 'numpy', 17 | 'scipy', 18 | 'matplotlib', 19 | ] 20 | 21 | setup( 22 | name='phonet', 23 | version='0.3.7', 24 | description='Compute phonological posteriors from speech signals using a deep learning scheme', 25 | author='J. C. Vasquez-Correa', 26 | author_email='juan.vasquez@fau.de', 27 | url='https://github.com/jcvasquezc/phonet', 28 | download_url='https://github.com/jcvasquezc/phonet/archive/0.3.7.tar.gz', 29 | license='MIT', 30 | install_requires=install_requires, 31 | packages=['phonet'], 32 | package_data={'': ['audios/*', 'models/*']}, 33 | keywords = ['phonological', 'speech', 'speech features', 'articulatory features', 'phoneme recognition'], 34 | dependency_links=['git+git://github.com/jameslyons/python_speech_features'], 35 | classifiers=[ 36 | 'Development Status :: 4 - Beta', # Chose either "3 - Alpha", "4 - Beta" or "5 - Production/Stable" as the current state of your package 37 | 'Intended Audience :: Developers', # Define that your audience are developers 38 | 'Topic :: Software Development :: Build Tools', 39 | 'License :: OSI Approved :: MIT License', # Again, pick a license 40 | 'Programming Language :: Python :: 3', #Specify which pyhton versions that you want to support 41 | 'Programming Language :: Python :: 3.4', 42 | 'Programming Language :: Python :: 3.5', 43 | 'Programming Language :: Python :: 3.6', 44 | ], 45 | 46 | ) 47 | 48 | 49 | 50 | 51 | 52 | 53 | 54 | 55 | 56 | 57 | -------------------------------------------------------------------------------- /tests/test_phonet.py: -------------------------------------------------------------------------------- 1 | # -*- coding: utf-8 -*- 2 | """ 3 | Created on Feb 28 2019 4 | @author: J. C. Vasquez-Correa 5 | Pattern recognition Lab, University of Erlangen-Nuremberg 6 | Faculty of Engineering, University of Antioquia, 7 | juan.vasquez@fau.de 8 | """ 9 | 10 | from phonet import Phonet 11 | import os 12 | 13 | if __name__=="__main__": 14 | 15 | PATH=os.path.dirname(os.path.abspath(__file__)) 16 | 17 | path_results=os.path.join(PATH, "examples") 18 | 19 | if not os.path.exists(path_results): 20 | os.makedirs(path_results) 21 | ## get the "stop" phonological posterior from a single file 22 | file_audio=PATH+"/audios/pataka.wav" 23 | file_feat=path_results+"/pataka" 24 | phon=Phonet(["stop"]) 25 | phon.get_phon_wav(file_audio, file_feat, True) 26 | 27 | # get the "nasal" phonological posterior from a single file 28 | file_audio=PATH+"/audios/sentence.wav" 29 | file_feat=path_results+"/sentence_nasal" 30 | phon=Phonet(["nasal"]) 31 | phon.get_phon_wav(file_audio, file_feat, True) 32 | 33 | # get the "strident" phonological posterior from a single file 34 | file_feat=path_results+"/sentence_strident" 35 | phon=Phonet(["strident"]) 36 | phon.get_phon_wav(file_audio, file_feat, True) 37 | 38 | # get "strident, nasal, and back" phonological posteriors from a single file 39 | file_feat=path_results+"/sentence_all" 40 | phon=Phonet(["strident", "nasal", "back"]) 41 | phon.get_phon_wav(file_audio, file_feat, True) 42 | 43 | 44 | # compute the posteriorgram for an audio_file for different phonological posteriors 45 | phon=Phonet(["vocalic", "strident", "nasal", "back", "stop", "pause"]) 46 | phon.get_posteriorgram(file_audio) 47 | 48 | 49 | # get phonological posteriors from de audio files included in a directory 50 | directory=PATH+"/phonclasses/" 51 | phon=Phonet(["vocalic", "strident", "nasal", "back", "stop", "pause"]) 52 | phon.get_phon_path(PATH+"/audios/", path_results) 53 | 54 | ## get the PLLR features from an audio file 55 | phon=Phonet(["all"]) 56 | PLLR=phon.get_PLLR(file_audio, plot_flag=True) 57 | print(PLLR.head()) --------------------------------------------------------------------------------