├── .Rhistory
├── .gitignore
├── .readthedocs.yaml
├── LICENSE.md
├── README.md
├── docs
├── .readthedocs.yml
├── Makefile
├── _static
│ ├── basic.css
│ ├── css
│ │ ├── badge_only.css
│ │ └── theme.css
│ ├── doctools.js
│ ├── documentation_options.js
│ ├── file.png
│ ├── fonts
│ │ ├── Inconsolata-Bold.ttf
│ │ ├── Inconsolata-Regular.ttf
│ │ ├── Inconsolata.ttf
│ │ ├── Lato-Bold.ttf
│ │ ├── Lato-Regular.ttf
│ │ ├── Lato
│ │ │ ├── lato-bold.eot
│ │ │ ├── lato-bold.ttf
│ │ │ ├── lato-bold.woff
│ │ │ ├── lato-bold.woff2
│ │ │ ├── lato-bolditalic.eot
│ │ │ ├── lato-bolditalic.ttf
│ │ │ ├── lato-bolditalic.woff
│ │ │ ├── lato-bolditalic.woff2
│ │ │ ├── lato-italic.eot
│ │ │ ├── lato-italic.ttf
│ │ │ ├── lato-italic.woff
│ │ │ ├── lato-italic.woff2
│ │ │ ├── lato-regular.eot
│ │ │ ├── lato-regular.ttf
│ │ │ ├── lato-regular.woff
│ │ │ └── lato-regular.woff2
│ │ ├── RobotoSlab-Bold.ttf
│ │ ├── RobotoSlab-Regular.ttf
│ │ ├── RobotoSlab
│ │ │ ├── roboto-slab-v7-bold.eot
│ │ │ ├── roboto-slab-v7-bold.ttf
│ │ │ ├── roboto-slab-v7-bold.woff
│ │ │ ├── roboto-slab-v7-bold.woff2
│ │ │ ├── roboto-slab-v7-regular.eot
│ │ │ ├── roboto-slab-v7-regular.ttf
│ │ │ ├── roboto-slab-v7-regular.woff
│ │ │ └── roboto-slab-v7-regular.woff2
│ │ ├── fontawesome-webfont.eot
│ │ ├── fontawesome-webfont.svg
│ │ ├── fontawesome-webfont.ttf
│ │ ├── fontawesome-webfont.woff
│ │ └── fontawesome-webfont.woff2
│ ├── jquery-3.2.1.js
│ ├── jquery.js
│ ├── js
│ │ ├── modernizr.min.js
│ │ └── theme.js
│ ├── language_data.js
│ ├── minus.png
│ ├── plus.png
│ ├── pygments.css
│ ├── searchtools.js
│ ├── underscore-1.3.1.js
│ └── underscore.js
├── changelog.rst
├── conf.py
├── index.rst
├── intro.rst
├── make.bat
├── mneflow_api.rst
├── models.rst
└── requirements.txt
├── examples
├── .ipynb
├── continuous_example.py
├── mneflow_example_tf2.ipynb
├── mneflow_save_restore.ipynb
├── own_graph_example.ipynb
├── regression_example.ipynb
└── sequence_data_example.ipynb
├── mneflow
├── __init__.py
├── data.py
├── layers.py
├── models.py
└── utils.py
├── py3ml.yml
├── requirements.txt
├── scripts
└── basic_example.py
└── setup.py
/.Rhistory:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/.Rhistory
--------------------------------------------------------------------------------
/.gitignore:
--------------------------------------------------------------------------------
1 | *.pyc
2 | *.pyo
3 | scripts/*
4 | docs/_build/*
5 | examples/*.py
6 | examples/.ipynb_checkpoints/
7 | build
8 | datasets
9 | dist
10 | tests
11 | mneflow.egg-info
12 | tfr
13 |
--------------------------------------------------------------------------------
/.readthedocs.yaml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | version: 2
6 |
7 | build:
8 | os: "ubuntu-22.04"
9 | tools:
10 | python: "3.8"
11 |
12 | python:
13 | install:
14 | - requirements: docs/requirements.txt
15 | - method: pip
16 | path: .
17 |
18 | sphinx:
19 | configuration: docs/conf.py
--------------------------------------------------------------------------------
/LICENSE.md:
--------------------------------------------------------------------------------
1 | BSD 3-Clause License
2 |
3 | Copyright (c) 2019, Ivan Zubarev
4 | All rights reserved.
5 |
6 | Redistribution and use in source and binary forms, with or without
7 | modification, are permitted provided that the following conditions are met:
8 |
9 | 1. Redistributions of source code must retain the above copyright notice, this
10 | list of conditions and the following disclaimer.
11 |
12 | 2. Redistributions in binary form must reproduce the above copyright notice,
13 | this list of conditions and the following disclaimer in the documentation
14 | and/or other materials provided with the distribution.
15 |
16 | 3. Neither the name of the copyright holder nor the names of its
17 | contributors may be used to endorse or promote products derived from
18 | this software without specific prior written permission.
19 |
20 | THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
21 | AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
22 | IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
23 | DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
24 | FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
25 | DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
26 | SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
27 | CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
28 | OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
29 | OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
30 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # MNEflow
2 | Neural networks for EEG-MEG decoding with MNE-python and Tensorflow.
3 |
4 | # Installation
5 | ```
6 | pip install mneflow
7 | ```
8 |
9 | # Dependencies
10 |
11 | - tensorflow >= 2.1.0
12 | - mne > 0.24.0
13 |
14 | # Documentation
15 |
16 | API reference is avalable in the [Documentation](https://mneflow.readthedocs.io/en/latest/).
17 |
18 | Also check the [example notebooks](https://mneflow.readthedocs.io/en/latest/intro.html#examples).
19 |
20 | # References
21 | Zubarev I, Vranou G, Parkkonen L. MNEflow: Neural networks for EEG/MEG decoding and interpretation [link](https://www.sciencedirect.com/science/article/pii/S2352711021001795)
22 |
23 | When using the implemented models please cite:
24 |
25 | ### for LF-CNN or VAR-CNN
26 | Zubarev I, Zetter R, Halme HL, Parkkonen L. Adaptive neural network classifier for decoding MEG signals. Neuroimage. 2019 May 4;197:425-434. [link](https://www.sciencedirect.com/science/article/pii/S1053811919303544?via%3Dihub)
27 |
28 | ```
29 | @article{Zubarev2019AdaptiveSignals.,
30 | title = {{Adaptive neural network classifier for decoding MEG signals.}},
31 | year = {2019},
32 | journal = {NeuroImage},
33 | author = {Zubarev, Ivan and Zetter, Rasmus and Halme, Hanna-Leena and Parkkonen, Lauri},
34 | month = {5},
35 | pages = {425--434},
36 | volume = {197},
37 | url = {https://linkinghub.elsevier.com/retrieve/pii/S1053811919303544 http://www.ncbi.nlm.nih.gov/pubmed/31059799},
38 | doi = {10.1016/j.neuroimage.2019.04.068},
39 | issn = {1095-9572},
40 | pmid = {31059799},
41 | keywords = {Brain–computer interface, Convolutional neural network, Magnetoencephalography}
42 | }
43 | ```
44 |
45 | ### for EEGNet
46 | ```
47 | @article{Lawhern2018,
48 | author={Vernon J Lawhern and Amelia J Solon and Nicholas R Waytowich and Stephen M Gordon and Chou P Hung and Brent J Lance},
49 | title={EEGNet: a compact convolutional neural network for EEG-based brain–computer interfaces},
50 | journal={Journal of Neural Engineering},
51 | volume={15},
52 | number={5},
53 | pages={056013},
54 | url={http://stacks.iop.org/1741-2552/15/i=5/a=056013},
55 | year={2018}
56 | }
57 | ```
58 |
59 |
60 | ### for FBCSP-ShallowNet and Deep4
61 | ```
62 | @article{Schirrmeister2017DeepVisualization,
63 | title = {{Deep learning with convolutional neural networks for EEG decoding and visualization}},
64 | year = {2017},
65 | journal = {Human Brain Mapping},
66 | author = {Schirrmeister, Robin Tibor and Springenberg, Jost Tobias and Fiederer, Lukas Dominique Josef and Glasstetter, Martin and Eggensperger, Katharina and Tangermann, Michael and Hutter, Frank and Burgard, Wolfram and Ball, Tonio},
67 | number = {11},
68 | month = {11},
69 | pages = {5391--5420},
70 | volume = {38},
71 | url = {http://doi.wiley.com/10.1002/hbm.23730},
72 | doi = {10.1002/hbm.23730},
73 | issn = {10659471},
74 | keywords = {EEG analysis, brain, brain mapping, computer interface, electroencephalography, end‐to‐end learning, machine interface, machine learning, model interpretability}
75 | }```
76 |
--------------------------------------------------------------------------------
/docs/.readthedocs.yml:
--------------------------------------------------------------------------------
1 | # .readthedocs.yml
2 | # Read the Docs configuration file
3 | # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
4 |
5 | # Required
6 | version: 2
7 |
8 | # Build documentation in the docs/ directory with Sphinx
9 | sphinx:
10 | configuration: docs/conf.py
11 |
12 | # Optionally build your docs in additional formats such as PDF
13 | formats:
14 | - pdf
15 |
16 | # Optionally set the version of Python and requirements required to build your docs
17 | python:
18 | version: 3.7
19 | install:
20 | - requirements: docs/requirements.txt
21 |
--------------------------------------------------------------------------------
/docs/Makefile:
--------------------------------------------------------------------------------
1 | # Minimal makefile for Sphinx documentation
2 | #
3 |
4 | # You can set these variables from the command line.
5 | SPHINXOPTS =
6 | SPHINXBUILD = sphinx-build
7 | SOURCEDIR = .
8 | BUILDDIR = _build
9 |
10 | # Put it first so that "make" without argument is like "make help".
11 | help:
12 | @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
13 |
14 | .PHONY: help Makefile
15 |
16 | # Catch-all target: route all unknown targets to Sphinx using the new
17 | # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
18 | %: Makefile
19 | @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
--------------------------------------------------------------------------------
/docs/_static/basic.css:
--------------------------------------------------------------------------------
1 | /*
2 | * basic.css
3 | * ~~~~~~~~~
4 | *
5 | * Sphinx stylesheet -- basic theme.
6 | *
7 | * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
8 | * :license: BSD, see LICENSE for details.
9 | *
10 | */
11 |
12 | /* -- main layout ----------------------------------------------------------- */
13 |
14 | div.clearer {
15 | clear: both;
16 | }
17 |
18 | /* -- relbar ---------------------------------------------------------------- */
19 |
20 | div.related {
21 | width: 100%;
22 | font-size: 90%;
23 | }
24 |
25 | div.related h3 {
26 | display: none;
27 | }
28 |
29 | div.related ul {
30 | margin: 0;
31 | padding: 0 0 0 10px;
32 | list-style: none;
33 | }
34 |
35 | div.related li {
36 | display: inline;
37 | }
38 |
39 | div.related li.right {
40 | float: right;
41 | margin-right: 5px;
42 | }
43 |
44 | /* -- sidebar --------------------------------------------------------------- */
45 |
46 | div.sphinxsidebarwrapper {
47 | padding: 10px 5px 0 10px;
48 | }
49 |
50 | div.sphinxsidebar {
51 | float: left;
52 | width: 230px;
53 | margin-left: -100%;
54 | font-size: 90%;
55 | word-wrap: break-word;
56 | overflow-wrap : break-word;
57 | }
58 |
59 | div.sphinxsidebar ul {
60 | list-style: none;
61 | }
62 |
63 | div.sphinxsidebar ul ul,
64 | div.sphinxsidebar ul.want-points {
65 | margin-left: 20px;
66 | list-style: square;
67 | }
68 |
69 | div.sphinxsidebar ul ul {
70 | margin-top: 0;
71 | margin-bottom: 0;
72 | }
73 |
74 | div.sphinxsidebar form {
75 | margin-top: 10px;
76 | }
77 |
78 | div.sphinxsidebar input {
79 | border: 1px solid #98dbcc;
80 | font-family: sans-serif;
81 | font-size: 1em;
82 | }
83 |
84 | div.sphinxsidebar #searchbox form.search {
85 | overflow: hidden;
86 | }
87 |
88 | div.sphinxsidebar #searchbox input[type="text"] {
89 | float: left;
90 | width: 80%;
91 | padding: 0.25em;
92 | box-sizing: border-box;
93 | }
94 |
95 | div.sphinxsidebar #searchbox input[type="submit"] {
96 | float: left;
97 | width: 20%;
98 | border-left: none;
99 | padding: 0.25em;
100 | box-sizing: border-box;
101 | }
102 |
103 |
104 | img {
105 | border: 0;
106 | max-width: 100%;
107 | }
108 |
109 | /* -- search page ----------------------------------------------------------- */
110 |
111 | ul.search {
112 | margin: 10px 0 0 20px;
113 | padding: 0;
114 | }
115 |
116 | ul.search li {
117 | padding: 5px 0 5px 20px;
118 | background-image: url(file.png);
119 | background-repeat: no-repeat;
120 | background-position: 0 7px;
121 | }
122 |
123 | ul.search li a {
124 | font-weight: bold;
125 | }
126 |
127 | ul.search li div.context {
128 | color: #888;
129 | margin: 2px 0 0 30px;
130 | text-align: left;
131 | }
132 |
133 | ul.keywordmatches li.goodmatch a {
134 | font-weight: bold;
135 | }
136 |
137 | /* -- index page ------------------------------------------------------------ */
138 |
139 | table.contentstable {
140 | width: 90%;
141 | margin-left: auto;
142 | margin-right: auto;
143 | }
144 |
145 | table.contentstable p.biglink {
146 | line-height: 150%;
147 | }
148 |
149 | a.biglink {
150 | font-size: 1.3em;
151 | }
152 |
153 | span.linkdescr {
154 | font-style: italic;
155 | padding-top: 5px;
156 | font-size: 90%;
157 | }
158 |
159 | /* -- general index --------------------------------------------------------- */
160 |
161 | table.indextable {
162 | width: 100%;
163 | }
164 |
165 | table.indextable td {
166 | text-align: left;
167 | vertical-align: top;
168 | }
169 |
170 | table.indextable ul {
171 | margin-top: 0;
172 | margin-bottom: 0;
173 | list-style-type: none;
174 | }
175 |
176 | table.indextable > tbody > tr > td > ul {
177 | padding-left: 0em;
178 | }
179 |
180 | table.indextable tr.pcap {
181 | height: 10px;
182 | }
183 |
184 | table.indextable tr.cap {
185 | margin-top: 10px;
186 | background-color: #f2f2f2;
187 | }
188 |
189 | img.toggler {
190 | margin-right: 3px;
191 | margin-top: 3px;
192 | cursor: pointer;
193 | }
194 |
195 | div.modindex-jumpbox {
196 | border-top: 1px solid #ddd;
197 | border-bottom: 1px solid #ddd;
198 | margin: 1em 0 1em 0;
199 | padding: 0.4em;
200 | }
201 |
202 | div.genindex-jumpbox {
203 | border-top: 1px solid #ddd;
204 | border-bottom: 1px solid #ddd;
205 | margin: 1em 0 1em 0;
206 | padding: 0.4em;
207 | }
208 |
209 | /* -- domain module index --------------------------------------------------- */
210 |
211 | table.modindextable td {
212 | padding: 2px;
213 | border-collapse: collapse;
214 | }
215 |
216 | /* -- general body styles --------------------------------------------------- */
217 |
218 | div.body {
219 | min-width: 450px;
220 | max-width: 800px;
221 | }
222 |
223 | div.body p, div.body dd, div.body li, div.body blockquote {
224 | -moz-hyphens: auto;
225 | -ms-hyphens: auto;
226 | -webkit-hyphens: auto;
227 | hyphens: auto;
228 | }
229 |
230 | a.headerlink {
231 | visibility: hidden;
232 | }
233 |
234 | a.brackets:before,
235 | span.brackets > a:before{
236 | content: "[";
237 | }
238 |
239 | a.brackets:after,
240 | span.brackets > a:after {
241 | content: "]";
242 | }
243 |
244 | h1:hover > a.headerlink,
245 | h2:hover > a.headerlink,
246 | h3:hover > a.headerlink,
247 | h4:hover > a.headerlink,
248 | h5:hover > a.headerlink,
249 | h6:hover > a.headerlink,
250 | dt:hover > a.headerlink,
251 | caption:hover > a.headerlink,
252 | p.caption:hover > a.headerlink,
253 | div.code-block-caption:hover > a.headerlink {
254 | visibility: visible;
255 | }
256 |
257 | div.body p.caption {
258 | text-align: inherit;
259 | }
260 |
261 | div.body td {
262 | text-align: left;
263 | }
264 |
265 | .first {
266 | margin-top: 0 !important;
267 | }
268 |
269 | p.rubric {
270 | margin-top: 30px;
271 | font-weight: bold;
272 | }
273 |
274 | img.align-left, .figure.align-left, object.align-left {
275 | clear: left;
276 | float: left;
277 | margin-right: 1em;
278 | }
279 |
280 | img.align-right, .figure.align-right, object.align-right {
281 | clear: right;
282 | float: right;
283 | margin-left: 1em;
284 | }
285 |
286 | img.align-center, .figure.align-center, object.align-center {
287 | display: block;
288 | margin-left: auto;
289 | margin-right: auto;
290 | }
291 |
292 | .align-left {
293 | text-align: left;
294 | }
295 |
296 | .align-center {
297 | text-align: center;
298 | }
299 |
300 | .align-right {
301 | text-align: right;
302 | }
303 |
304 | /* -- sidebars -------------------------------------------------------------- */
305 |
306 | div.sidebar {
307 | margin: 0 0 0.5em 1em;
308 | border: 1px solid #ddb;
309 | padding: 7px 7px 0 7px;
310 | background-color: #ffe;
311 | width: 40%;
312 | float: right;
313 | }
314 |
315 | p.sidebar-title {
316 | font-weight: bold;
317 | }
318 |
319 | /* -- topics ---------------------------------------------------------------- */
320 |
321 | div.topic {
322 | border: 1px solid #ccc;
323 | padding: 7px 7px 0 7px;
324 | margin: 10px 0 10px 0;
325 | }
326 |
327 | p.topic-title {
328 | font-size: 1.1em;
329 | font-weight: bold;
330 | margin-top: 10px;
331 | }
332 |
333 | /* -- admonitions ----------------------------------------------------------- */
334 |
335 | div.admonition {
336 | margin-top: 10px;
337 | margin-bottom: 10px;
338 | padding: 7px;
339 | }
340 |
341 | div.admonition dt {
342 | font-weight: bold;
343 | }
344 |
345 | div.admonition dl {
346 | margin-bottom: 0;
347 | }
348 |
349 | p.admonition-title {
350 | margin: 0px 10px 5px 0px;
351 | font-weight: bold;
352 | }
353 |
354 | div.body p.centered {
355 | text-align: center;
356 | margin-top: 25px;
357 | }
358 |
359 | /* -- tables ---------------------------------------------------------------- */
360 |
361 | table.docutils {
362 | border: 0;
363 | border-collapse: collapse;
364 | }
365 |
366 | table.align-center {
367 | margin-left: auto;
368 | margin-right: auto;
369 | }
370 |
371 | table caption span.caption-number {
372 | font-style: italic;
373 | }
374 |
375 | table caption span.caption-text {
376 | }
377 |
378 | table.docutils td, table.docutils th {
379 | padding: 1px 8px 1px 5px;
380 | border-top: 0;
381 | border-left: 0;
382 | border-right: 0;
383 | border-bottom: 1px solid #aaa;
384 | }
385 |
386 | table.footnote td, table.footnote th {
387 | border: 0 !important;
388 | }
389 |
390 | th {
391 | text-align: left;
392 | padding-right: 5px;
393 | }
394 |
395 | table.citation {
396 | border-left: solid 1px gray;
397 | margin-left: 1px;
398 | }
399 |
400 | table.citation td {
401 | border-bottom: none;
402 | }
403 |
404 | th > p:first-child,
405 | td > p:first-child {
406 | margin-top: 0px;
407 | }
408 |
409 | th > p:last-child,
410 | td > p:last-child {
411 | margin-bottom: 0px;
412 | }
413 |
414 | /* -- figures --------------------------------------------------------------- */
415 |
416 | div.figure {
417 | margin: 0.5em;
418 | padding: 0.5em;
419 | }
420 |
421 | div.figure p.caption {
422 | padding: 0.3em;
423 | }
424 |
425 | div.figure p.caption span.caption-number {
426 | font-style: italic;
427 | }
428 |
429 | div.figure p.caption span.caption-text {
430 | }
431 |
432 | /* -- field list styles ----------------------------------------------------- */
433 |
434 | table.field-list td, table.field-list th {
435 | border: 0 !important;
436 | }
437 |
438 | .field-list ul {
439 | margin: 0;
440 | padding-left: 1em;
441 | }
442 |
443 | .field-list p {
444 | margin: 0;
445 | }
446 |
447 | .field-name {
448 | -moz-hyphens: manual;
449 | -ms-hyphens: manual;
450 | -webkit-hyphens: manual;
451 | hyphens: manual;
452 | }
453 |
454 | /* -- hlist styles ---------------------------------------------------------- */
455 |
456 | table.hlist td {
457 | vertical-align: top;
458 | }
459 |
460 |
461 | /* -- other body styles ----------------------------------------------------- */
462 |
463 | ol.arabic {
464 | list-style: decimal;
465 | }
466 |
467 | ol.loweralpha {
468 | list-style: lower-alpha;
469 | }
470 |
471 | ol.upperalpha {
472 | list-style: upper-alpha;
473 | }
474 |
475 | ol.lowerroman {
476 | list-style: lower-roman;
477 | }
478 |
479 | ol.upperroman {
480 | list-style: upper-roman;
481 | }
482 |
483 | li > p:first-child {
484 | margin-top: 0px;
485 | }
486 |
487 | li > p:last-child {
488 | margin-bottom: 0px;
489 | }
490 |
491 | dl.footnote > dt,
492 | dl.citation > dt {
493 | float: left;
494 | }
495 |
496 | dl.footnote > dd,
497 | dl.citation > dd {
498 | margin-bottom: 0em;
499 | }
500 |
501 | dl.footnote > dd:after,
502 | dl.citation > dd:after {
503 | content: "";
504 | clear: both;
505 | }
506 |
507 | dl.field-list {
508 | display: flex;
509 | flex-wrap: wrap;
510 | }
511 |
512 | dl.field-list > dt {
513 | flex-basis: 20%;
514 | font-weight: bold;
515 | word-break: break-word;
516 | }
517 |
518 | dl.field-list > dt:after {
519 | content: ":";
520 | }
521 |
522 | dl.field-list > dd {
523 | flex-basis: 70%;
524 | padding-left: 1em;
525 | margin-left: 0em;
526 | margin-bottom: 0em;
527 | }
528 |
529 | dl {
530 | margin-bottom: 15px;
531 | }
532 |
533 | dd > p:first-child {
534 | margin-top: 0px;
535 | }
536 |
537 | dd ul, dd table {
538 | margin-bottom: 10px;
539 | }
540 |
541 | dd {
542 | margin-top: 3px;
543 | margin-bottom: 10px;
544 | margin-left: 30px;
545 | }
546 |
547 | dt:target, span.highlighted {
548 | background-color: #fbe54e;
549 | }
550 |
551 | rect.highlighted {
552 | fill: #fbe54e;
553 | }
554 |
555 | dl.glossary dt {
556 | font-weight: bold;
557 | font-size: 1.1em;
558 | }
559 |
560 | .optional {
561 | font-size: 1.3em;
562 | }
563 |
564 | .sig-paren {
565 | font-size: larger;
566 | }
567 |
568 | .versionmodified {
569 | font-style: italic;
570 | }
571 |
572 | .system-message {
573 | background-color: #fda;
574 | padding: 5px;
575 | border: 3px solid red;
576 | }
577 |
578 | .footnote:target {
579 | background-color: #ffa;
580 | }
581 |
582 | .line-block {
583 | display: block;
584 | margin-top: 1em;
585 | margin-bottom: 1em;
586 | }
587 |
588 | .line-block .line-block {
589 | margin-top: 0;
590 | margin-bottom: 0;
591 | margin-left: 1.5em;
592 | }
593 |
594 | .guilabel, .menuselection {
595 | font-family: sans-serif;
596 | }
597 |
598 | .accelerator {
599 | text-decoration: underline;
600 | }
601 |
602 | .classifier {
603 | font-style: oblique;
604 | }
605 |
606 | .classifier:before {
607 | font-style: normal;
608 | margin: 0.5em;
609 | content: ":";
610 | }
611 |
612 | abbr, acronym {
613 | border-bottom: dotted 1px;
614 | cursor: help;
615 | }
616 |
617 | /* -- code displays --------------------------------------------------------- */
618 |
619 | pre {
620 | overflow: auto;
621 | overflow-y: hidden; /* fixes display issues on Chrome browsers */
622 | }
623 |
624 | span.pre {
625 | -moz-hyphens: none;
626 | -ms-hyphens: none;
627 | -webkit-hyphens: none;
628 | hyphens: none;
629 | }
630 |
631 | td.linenos pre {
632 | padding: 5px 0px;
633 | border: 0;
634 | background-color: transparent;
635 | color: #aaa;
636 | }
637 |
638 | table.highlighttable {
639 | margin-left: 0.5em;
640 | }
641 |
642 | table.highlighttable td {
643 | padding: 0 0.5em 0 0.5em;
644 | }
645 |
646 | div.code-block-caption {
647 | padding: 2px 5px;
648 | font-size: small;
649 | }
650 |
651 | div.code-block-caption code {
652 | background-color: transparent;
653 | }
654 |
655 | div.code-block-caption + div > div.highlight > pre {
656 | margin-top: 0;
657 | }
658 |
659 | div.code-block-caption span.caption-number {
660 | padding: 0.1em 0.3em;
661 | font-style: italic;
662 | }
663 |
664 | div.code-block-caption span.caption-text {
665 | }
666 |
667 | div.literal-block-wrapper {
668 | padding: 1em 1em 0;
669 | }
670 |
671 | div.literal-block-wrapper div.highlight {
672 | margin: 0;
673 | }
674 |
675 | code.descname {
676 | background-color: transparent;
677 | font-weight: bold;
678 | font-size: 1.2em;
679 | }
680 |
681 | code.descclassname {
682 | background-color: transparent;
683 | }
684 |
685 | code.xref, a code {
686 | background-color: transparent;
687 | font-weight: bold;
688 | }
689 |
690 | h1 code, h2 code, h3 code, h4 code, h5 code, h6 code {
691 | background-color: transparent;
692 | }
693 |
694 | .viewcode-link {
695 | float: right;
696 | }
697 |
698 | .viewcode-back {
699 | float: right;
700 | font-family: sans-serif;
701 | }
702 |
703 | div.viewcode-block:target {
704 | margin: -1px -10px;
705 | padding: 0 10px;
706 | }
707 |
708 | /* -- math display ---------------------------------------------------------- */
709 |
710 | img.math {
711 | vertical-align: middle;
712 | }
713 |
714 | div.body div.math p {
715 | text-align: center;
716 | }
717 |
718 | span.eqno {
719 | float: right;
720 | }
721 |
722 | span.eqno a.headerlink {
723 | position: relative;
724 | left: 0px;
725 | z-index: 1;
726 | }
727 |
728 | div.math:hover a.headerlink {
729 | visibility: visible;
730 | }
731 |
732 | /* -- printout stylesheet --------------------------------------------------- */
733 |
734 | @media print {
735 | div.document,
736 | div.documentwrapper,
737 | div.bodywrapper {
738 | margin: 0 !important;
739 | width: 100%;
740 | }
741 |
742 | div.sphinxsidebar,
743 | div.related,
744 | div.footer,
745 | #top-link {
746 | display: none;
747 | }
748 | }
--------------------------------------------------------------------------------
/docs/_static/css/badge_only.css:
--------------------------------------------------------------------------------
1 | .fa:before{-webkit-font-smoothing:antialiased}.clearfix{*zoom:1}.clearfix:before,.clearfix:after{display:table;content:""}.clearfix:after{clear:both}@font-face{font-family:FontAwesome;font-weight:normal;font-style:normal;src:url("../fonts/fontawesome-webfont.eot");src:url("../fonts/fontawesome-webfont.eot?#iefix") format("embedded-opentype"),url("../fonts/fontawesome-webfont.woff") format("woff"),url("../fonts/fontawesome-webfont.ttf") format("truetype"),url("../fonts/fontawesome-webfont.svg#FontAwesome") format("svg")}.fa:before{display:inline-block;font-family:FontAwesome;font-style:normal;font-weight:normal;line-height:1;text-decoration:inherit}a .fa{display:inline-block;text-decoration:inherit}li .fa{display:inline-block}li .fa-large:before,li .fa-large:before{width:1.875em}ul.fas{list-style-type:none;margin-left:2em;text-indent:-0.8em}ul.fas li .fa{width:.8em}ul.fas li .fa-large:before,ul.fas li .fa-large:before{vertical-align:baseline}.fa-book:before{content:""}.icon-book:before{content:""}.fa-caret-down:before{content:""}.icon-caret-down:before{content:""}.fa-caret-up:before{content:""}.icon-caret-up:before{content:""}.fa-caret-left:before{content:""}.icon-caret-left:before{content:""}.fa-caret-right:before{content:""}.icon-caret-right:before{content:""}.rst-versions{position:fixed;bottom:0;left:0;width:300px;color:#fcfcfc;background:#1f1d1d;font-family:"Lato","proxima-nova","Helvetica Neue",Arial,sans-serif;z-index:400}.rst-versions a{color:#2980B9;text-decoration:none}.rst-versions .rst-badge-small{display:none}.rst-versions .rst-current-version{padding:12px;background-color:#272525;display:block;text-align:right;font-size:90%;cursor:pointer;color:#27AE60;*zoom:1}.rst-versions .rst-current-version:before,.rst-versions .rst-current-version:after{display:table;content:""}.rst-versions .rst-current-version:after{clear:both}.rst-versions .rst-current-version .fa{color:#fcfcfc}.rst-versions .rst-current-version .fa-book{float:left}.rst-versions .rst-current-version .icon-book{float:left}.rst-versions .rst-current-version.rst-out-of-date{background-color:#E74C3C;color:#fff}.rst-versions .rst-current-version.rst-active-old-version{background-color:#F1C40F;color:#000}.rst-versions.shift-up{height:auto;max-height:100%;overflow-y:scroll}.rst-versions.shift-up .rst-other-versions{display:block}.rst-versions .rst-other-versions{font-size:90%;padding:12px;color:gray;display:none}.rst-versions .rst-other-versions hr{display:block;height:1px;border:0;margin:20px 0;padding:0;border-top:solid 1px #413d3d}.rst-versions .rst-other-versions dd{display:inline-block;margin:0}.rst-versions .rst-other-versions dd a{display:inline-block;padding:6px;color:#fcfcfc}.rst-versions.rst-badge{width:auto;bottom:20px;right:20px;left:auto;border:none;max-width:300px;max-height:90%}.rst-versions.rst-badge .icon-book{float:none}.rst-versions.rst-badge .fa-book{float:none}.rst-versions.rst-badge.shift-up .rst-current-version{text-align:right}.rst-versions.rst-badge.shift-up .rst-current-version .fa-book{float:left}.rst-versions.rst-badge.shift-up .rst-current-version .icon-book{float:left}.rst-versions.rst-badge .rst-current-version{width:auto;height:30px;line-height:30px;padding:0 6px;display:block;text-align:center}@media screen and (max-width: 768px){.rst-versions{width:85%;display:none}.rst-versions.shift{display:block}}
2 |
--------------------------------------------------------------------------------
/docs/_static/doctools.js:
--------------------------------------------------------------------------------
1 | /*
2 | * doctools.js
3 | * ~~~~~~~~~~~
4 | *
5 | * Sphinx JavaScript utilities for all documentation.
6 | *
7 | * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
8 | * :license: BSD, see LICENSE for details.
9 | *
10 | */
11 |
12 | /**
13 | * select a different prefix for underscore
14 | */
15 | $u = _.noConflict();
16 |
17 | /**
18 | * make the code below compatible with browsers without
19 | * an installed firebug like debugger
20 | if (!window.console || !console.firebug) {
21 | var names = ["log", "debug", "info", "warn", "error", "assert", "dir",
22 | "dirxml", "group", "groupEnd", "time", "timeEnd", "count", "trace",
23 | "profile", "profileEnd"];
24 | window.console = {};
25 | for (var i = 0; i < names.length; ++i)
26 | window.console[names[i]] = function() {};
27 | }
28 | */
29 |
30 | /**
31 | * small helper function to urldecode strings
32 | */
33 | jQuery.urldecode = function(x) {
34 | return decodeURIComponent(x).replace(/\+/g, ' ');
35 | };
36 |
37 | /**
38 | * small helper function to urlencode strings
39 | */
40 | jQuery.urlencode = encodeURIComponent;
41 |
42 | /**
43 | * This function returns the parsed url parameters of the
44 | * current request. Multiple values per key are supported,
45 | * it will always return arrays of strings for the value parts.
46 | */
47 | jQuery.getQueryParameters = function(s) {
48 | if (typeof s === 'undefined')
49 | s = document.location.search;
50 | var parts = s.substr(s.indexOf('?') + 1).split('&');
51 | var result = {};
52 | for (var i = 0; i < parts.length; i++) {
53 | var tmp = parts[i].split('=', 2);
54 | var key = jQuery.urldecode(tmp[0]);
55 | var value = jQuery.urldecode(tmp[1]);
56 | if (key in result)
57 | result[key].push(value);
58 | else
59 | result[key] = [value];
60 | }
61 | return result;
62 | };
63 |
64 | /**
65 | * highlight a given string on a jquery object by wrapping it in
66 | * span elements with the given class name.
67 | */
68 | jQuery.fn.highlightText = function(text, className) {
69 | function highlight(node, addItems) {
70 | if (node.nodeType === 3) {
71 | var val = node.nodeValue;
72 | var pos = val.toLowerCase().indexOf(text);
73 | if (pos >= 0 &&
74 | !jQuery(node.parentNode).hasClass(className) &&
75 | !jQuery(node.parentNode).hasClass("nohighlight")) {
76 | var span;
77 | var isInSVG = jQuery(node).closest("body, svg, foreignObject").is("svg");
78 | if (isInSVG) {
79 | span = document.createElementNS("http://www.w3.org/2000/svg", "tspan");
80 | } else {
81 | span = document.createElement("span");
82 | span.className = className;
83 | }
84 | span.appendChild(document.createTextNode(val.substr(pos, text.length)));
85 | node.parentNode.insertBefore(span, node.parentNode.insertBefore(
86 | document.createTextNode(val.substr(pos + text.length)),
87 | node.nextSibling));
88 | node.nodeValue = val.substr(0, pos);
89 | if (isInSVG) {
90 | var rect = document.createElementNS("http://www.w3.org/2000/svg", "rect");
91 | var bbox = node.parentElement.getBBox();
92 | rect.x.baseVal.value = bbox.x;
93 | rect.y.baseVal.value = bbox.y;
94 | rect.width.baseVal.value = bbox.width;
95 | rect.height.baseVal.value = bbox.height;
96 | rect.setAttribute('class', className);
97 | addItems.push({
98 | "parent": node.parentNode,
99 | "target": rect});
100 | }
101 | }
102 | }
103 | else if (!jQuery(node).is("button, select, textarea")) {
104 | jQuery.each(node.childNodes, function() {
105 | highlight(this, addItems);
106 | });
107 | }
108 | }
109 | var addItems = [];
110 | var result = this.each(function() {
111 | highlight(this, addItems);
112 | });
113 | for (var i = 0; i < addItems.length; ++i) {
114 | jQuery(addItems[i].parent).before(addItems[i].target);
115 | }
116 | return result;
117 | };
118 |
119 | /*
120 | * backward compatibility for jQuery.browser
121 | * This will be supported until firefox bug is fixed.
122 | */
123 | if (!jQuery.browser) {
124 | jQuery.uaMatch = function(ua) {
125 | ua = ua.toLowerCase();
126 |
127 | var match = /(chrome)[ \/]([\w.]+)/.exec(ua) ||
128 | /(webkit)[ \/]([\w.]+)/.exec(ua) ||
129 | /(opera)(?:.*version|)[ \/]([\w.]+)/.exec(ua) ||
130 | /(msie) ([\w.]+)/.exec(ua) ||
131 | ua.indexOf("compatible") < 0 && /(mozilla)(?:.*? rv:([\w.]+)|)/.exec(ua) ||
132 | [];
133 |
134 | return {
135 | browser: match[ 1 ] || "",
136 | version: match[ 2 ] || "0"
137 | };
138 | };
139 | jQuery.browser = {};
140 | jQuery.browser[jQuery.uaMatch(navigator.userAgent).browser] = true;
141 | }
142 |
143 | /**
144 | * Small JavaScript module for the documentation.
145 | */
146 | var Documentation = {
147 |
148 | init : function() {
149 | this.fixFirefoxAnchorBug();
150 | this.highlightSearchWords();
151 | this.initIndexTable();
152 | if (DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS) {
153 | this.initOnKeyListeners();
154 | }
155 | },
156 |
157 | /**
158 | * i18n support
159 | */
160 | TRANSLATIONS : {},
161 | PLURAL_EXPR : function(n) { return n === 1 ? 0 : 1; },
162 | LOCALE : 'unknown',
163 |
164 | // gettext and ngettext don't access this so that the functions
165 | // can safely bound to a different name (_ = Documentation.gettext)
166 | gettext : function(string) {
167 | var translated = Documentation.TRANSLATIONS[string];
168 | if (typeof translated === 'undefined')
169 | return string;
170 | return (typeof translated === 'string') ? translated : translated[0];
171 | },
172 |
173 | ngettext : function(singular, plural, n) {
174 | var translated = Documentation.TRANSLATIONS[singular];
175 | if (typeof translated === 'undefined')
176 | return (n == 1) ? singular : plural;
177 | return translated[Documentation.PLURALEXPR(n)];
178 | },
179 |
180 | addTranslations : function(catalog) {
181 | for (var key in catalog.messages)
182 | this.TRANSLATIONS[key] = catalog.messages[key];
183 | this.PLURAL_EXPR = new Function('n', 'return +(' + catalog.plural_expr + ')');
184 | this.LOCALE = catalog.locale;
185 | },
186 |
187 | /**
188 | * add context elements like header anchor links
189 | */
190 | addContextElements : function() {
191 | $('div[id] > :header:first').each(function() {
192 | $('').
193 | attr('href', '#' + this.id).
194 | attr('title', _('Permalink to this headline')).
195 | appendTo(this);
196 | });
197 | $('dt[id]').each(function() {
198 | $('').
199 | attr('href', '#' + this.id).
200 | attr('title', _('Permalink to this definition')).
201 | appendTo(this);
202 | });
203 | },
204 |
205 | /**
206 | * workaround a firefox stupidity
207 | * see: https://bugzilla.mozilla.org/show_bug.cgi?id=645075
208 | */
209 | fixFirefoxAnchorBug : function() {
210 | if (document.location.hash && $.browser.mozilla)
211 | window.setTimeout(function() {
212 | document.location.href += '';
213 | }, 10);
214 | },
215 |
216 | /**
217 | * highlight the search words provided in the url in the text
218 | */
219 | highlightSearchWords : function() {
220 | var params = $.getQueryParameters();
221 | var terms = (params.highlight) ? params.highlight[0].split(/\s+/) : [];
222 | if (terms.length) {
223 | var body = $('div.body');
224 | if (!body.length) {
225 | body = $('body');
226 | }
227 | window.setTimeout(function() {
228 | $.each(terms, function() {
229 | body.highlightText(this.toLowerCase(), 'highlighted');
230 | });
231 | }, 10);
232 | $('
' + _('Hide Search Matches') + '
')
234 | .appendTo($('#searchbox'));
235 | }
236 | },
237 |
238 | /**
239 | * init the domain index toggle buttons
240 | */
241 | initIndexTable : function() {
242 | var togglers = $('img.toggler').click(function() {
243 | var src = $(this).attr('src');
244 | var idnum = $(this).attr('id').substr(7);
245 | $('tr.cg-' + idnum).toggle();
246 | if (src.substr(-9) === 'minus.png')
247 | $(this).attr('src', src.substr(0, src.length-9) + 'plus.png');
248 | else
249 | $(this).attr('src', src.substr(0, src.length-8) + 'minus.png');
250 | }).css('display', '');
251 | if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) {
252 | togglers.click();
253 | }
254 | },
255 |
256 | /**
257 | * helper function to hide the search marks again
258 | */
259 | hideSearchWords : function() {
260 | $('#searchbox .highlight-link').fadeOut(300);
261 | $('span.highlighted').removeClass('highlighted');
262 | },
263 |
264 | /**
265 | * make the url absolute
266 | */
267 | makeURL : function(relativeURL) {
268 | return DOCUMENTATION_OPTIONS.URL_ROOT + '/' + relativeURL;
269 | },
270 |
271 | /**
272 | * get the current relative url
273 | */
274 | getCurrentURL : function() {
275 | var path = document.location.pathname;
276 | var parts = path.split(/\//);
277 | $.each(DOCUMENTATION_OPTIONS.URL_ROOT.split(/\//), function() {
278 | if (this === '..')
279 | parts.pop();
280 | });
281 | var url = parts.join('/');
282 | return path.substring(url.lastIndexOf('/') + 1, path.length - 1);
283 | },
284 |
285 | initOnKeyListeners: function() {
286 | $(document).keyup(function(event) {
287 | var activeElementType = document.activeElement.tagName;
288 | // don't navigate when in search box or textarea
289 | if (activeElementType !== 'TEXTAREA' && activeElementType !== 'INPUT' && activeElementType !== 'SELECT') {
290 | switch (event.keyCode) {
291 | case 37: // left
292 | var prevHref = $('link[rel="prev"]').prop('href');
293 | if (prevHref) {
294 | window.location.href = prevHref;
295 | return false;
296 | }
297 | case 39: // right
298 | var nextHref = $('link[rel="next"]').prop('href');
299 | if (nextHref) {
300 | window.location.href = nextHref;
301 | return false;
302 | }
303 | }
304 | }
305 | });
306 | }
307 | };
308 |
309 | // quick alias for translations
310 | _ = Documentation.gettext;
311 |
312 | $(document).ready(function() {
313 | Documentation.init();
314 | });
315 |
--------------------------------------------------------------------------------
/docs/_static/documentation_options.js:
--------------------------------------------------------------------------------
1 | var DOCUMENTATION_OPTIONS = {
2 | URL_ROOT: document.getElementById("documentation_options").getAttribute('data-url_root'),
3 | VERSION: '0.2.0',
4 | LANGUAGE: 'None',
5 | COLLAPSE_INDEX: false,
6 | FILE_SUFFIX: '.html',
7 | HAS_SOURCE: true,
8 | SOURCELINK_SUFFIX: '.txt',
9 | NAVIGATION_WITH_KEYS: false
10 | };
--------------------------------------------------------------------------------
/docs/_static/file.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/file.png
--------------------------------------------------------------------------------
/docs/_static/fonts/Inconsolata-Bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Inconsolata-Bold.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/Inconsolata-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Inconsolata-Regular.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/Inconsolata.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Inconsolata.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato-Bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato-Bold.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato-Regular.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-bold.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-bold.eot
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-bold.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-bold.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-bold.woff
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-bold.woff2
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-bolditalic.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-bolditalic.eot
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-bolditalic.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-bolditalic.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-bolditalic.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-bolditalic.woff
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-bolditalic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-bolditalic.woff2
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-italic.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-italic.eot
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-italic.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-italic.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-italic.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-italic.woff
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-italic.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-italic.woff2
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-regular.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-regular.eot
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-regular.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-regular.woff
--------------------------------------------------------------------------------
/docs/_static/fonts/Lato/lato-regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/Lato/lato-regular.woff2
--------------------------------------------------------------------------------
/docs/_static/fonts/RobotoSlab-Bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/RobotoSlab-Bold.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/RobotoSlab-Regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/RobotoSlab-Regular.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.eot
--------------------------------------------------------------------------------
/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff
--------------------------------------------------------------------------------
/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/RobotoSlab/roboto-slab-v7-bold.woff2
--------------------------------------------------------------------------------
/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.eot
--------------------------------------------------------------------------------
/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff
--------------------------------------------------------------------------------
/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/RobotoSlab/roboto-slab-v7-regular.woff2
--------------------------------------------------------------------------------
/docs/_static/fonts/fontawesome-webfont.eot:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/fontawesome-webfont.eot
--------------------------------------------------------------------------------
/docs/_static/fonts/fontawesome-webfont.ttf:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/fontawesome-webfont.ttf
--------------------------------------------------------------------------------
/docs/_static/fonts/fontawesome-webfont.woff:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/fontawesome-webfont.woff
--------------------------------------------------------------------------------
/docs/_static/fonts/fontawesome-webfont.woff2:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/fonts/fontawesome-webfont.woff2
--------------------------------------------------------------------------------
/docs/_static/js/modernizr.min.js:
--------------------------------------------------------------------------------
1 | /* Modernizr 2.6.2 (Custom Build) | MIT & BSD
2 | * Build: http://modernizr.com/download/#-fontface-backgroundsize-borderimage-borderradius-boxshadow-flexbox-hsla-multiplebgs-opacity-rgba-textshadow-cssanimations-csscolumns-generatedcontent-cssgradients-cssreflections-csstransforms-csstransforms3d-csstransitions-applicationcache-canvas-canvastext-draganddrop-hashchange-history-audio-video-indexeddb-input-inputtypes-localstorage-postmessage-sessionstorage-websockets-websqldatabase-webworkers-geolocation-inlinesvg-smil-svg-svgclippaths-touch-webgl-shiv-mq-cssclasses-addtest-prefixed-teststyles-testprop-testallprops-hasevent-prefixes-domprefixes-load
3 | */
4 | ;window.Modernizr=function(a,b,c){function D(a){j.cssText=a}function E(a,b){return D(n.join(a+";")+(b||""))}function F(a,b){return typeof a===b}function G(a,b){return!!~(""+a).indexOf(b)}function H(a,b){for(var d in a){var e=a[d];if(!G(e,"-")&&j[e]!==c)return b=="pfx"?e:!0}return!1}function I(a,b,d){for(var e in a){var f=b[a[e]];if(f!==c)return d===!1?a[e]:F(f,"function")?f.bind(d||b):f}return!1}function J(a,b,c){var d=a.charAt(0).toUpperCase()+a.slice(1),e=(a+" "+p.join(d+" ")+d).split(" ");return F(b,"string")||F(b,"undefined")?H(e,b):(e=(a+" "+q.join(d+" ")+d).split(" "),I(e,b,c))}function K(){e.input=function(c){for(var d=0,e=c.length;d',a,""].join(""),l.id=h,(m?l:n).innerHTML+=f,n.appendChild(l),m||(n.style.background="",n.style.overflow="hidden",k=g.style.overflow,g.style.overflow="hidden",g.appendChild(n)),i=c(l,a),m?l.parentNode.removeChild(l):(n.parentNode.removeChild(n),g.style.overflow=k),!!i},z=function(b){var c=a.matchMedia||a.msMatchMedia;if(c)return c(b).matches;var d;return y("@media "+b+" { #"+h+" { position: absolute; } }",function(b){d=(a.getComputedStyle?getComputedStyle(b,null):b.currentStyle)["position"]=="absolute"}),d},A=function(){function d(d,e){e=e||b.createElement(a[d]||"div"),d="on"+d;var f=d in e;return f||(e.setAttribute||(e=b.createElement("div")),e.setAttribute&&e.removeAttribute&&(e.setAttribute(d,""),f=F(e[d],"function"),F(e[d],"undefined")||(e[d]=c),e.removeAttribute(d))),e=null,f}var a={select:"input",change:"input",submit:"form",reset:"form",error:"img",load:"img",abort:"img"};return d}(),B={}.hasOwnProperty,C;!F(B,"undefined")&&!F(B.call,"undefined")?C=function(a,b){return B.call(a,b)}:C=function(a,b){return b in a&&F(a.constructor.prototype[b],"undefined")},Function.prototype.bind||(Function.prototype.bind=function(b){var c=this;if(typeof c!="function")throw new TypeError;var d=w.call(arguments,1),e=function(){if(this instanceof e){var a=function(){};a.prototype=c.prototype;var f=new a,g=c.apply(f,d.concat(w.call(arguments)));return Object(g)===g?g:f}return c.apply(b,d.concat(w.call(arguments)))};return e}),s.flexbox=function(){return J("flexWrap")},s.canvas=function(){var a=b.createElement("canvas");return!!a.getContext&&!!a.getContext("2d")},s.canvastext=function(){return!!e.canvas&&!!F(b.createElement("canvas").getContext("2d").fillText,"function")},s.webgl=function(){return!!a.WebGLRenderingContext},s.touch=function(){var c;return"ontouchstart"in a||a.DocumentTouch&&b instanceof DocumentTouch?c=!0:y(["@media (",n.join("touch-enabled),("),h,")","{#modernizr{top:9px;position:absolute}}"].join(""),function(a){c=a.offsetTop===9}),c},s.geolocation=function(){return"geolocation"in navigator},s.postmessage=function(){return!!a.postMessage},s.websqldatabase=function(){return!!a.openDatabase},s.indexedDB=function(){return!!J("indexedDB",a)},s.hashchange=function(){return A("hashchange",a)&&(b.documentMode===c||b.documentMode>7)},s.history=function(){return!!a.history&&!!history.pushState},s.draganddrop=function(){var a=b.createElement("div");return"draggable"in a||"ondragstart"in a&&"ondrop"in a},s.websockets=function(){return"WebSocket"in a||"MozWebSocket"in a},s.rgba=function(){return D("background-color:rgba(150,255,150,.5)"),G(j.backgroundColor,"rgba")},s.hsla=function(){return D("background-color:hsla(120,40%,100%,.5)"),G(j.backgroundColor,"rgba")||G(j.backgroundColor,"hsla")},s.multiplebgs=function(){return D("background:url(https://),url(https://),red url(https://)"),/(url\s*\(.*?){3}/.test(j.background)},s.backgroundsize=function(){return J("backgroundSize")},s.borderimage=function(){return J("borderImage")},s.borderradius=function(){return J("borderRadius")},s.boxshadow=function(){return J("boxShadow")},s.textshadow=function(){return b.createElement("div").style.textShadow===""},s.opacity=function(){return E("opacity:.55"),/^0.55$/.test(j.opacity)},s.cssanimations=function(){return J("animationName")},s.csscolumns=function(){return J("columnCount")},s.cssgradients=function(){var a="background-image:",b="gradient(linear,left top,right bottom,from(#9f9),to(white));",c="linear-gradient(left top,#9f9, white);";return D((a+"-webkit- ".split(" ").join(b+a)+n.join(c+a)).slice(0,-a.length)),G(j.backgroundImage,"gradient")},s.cssreflections=function(){return J("boxReflect")},s.csstransforms=function(){return!!J("transform")},s.csstransforms3d=function(){var a=!!J("perspective");return a&&"webkitPerspective"in g.style&&y("@media (transform-3d),(-webkit-transform-3d){#modernizr{left:9px;position:absolute;height:3px;}}",function(b,c){a=b.offsetLeft===9&&b.offsetHeight===3}),a},s.csstransitions=function(){return J("transition")},s.fontface=function(){var a;return y('@font-face {font-family:"font";src:url("https://")}',function(c,d){var e=b.getElementById("smodernizr"),f=e.sheet||e.styleSheet,g=f?f.cssRules&&f.cssRules[0]?f.cssRules[0].cssText:f.cssText||"":"";a=/src/i.test(g)&&g.indexOf(d.split(" ")[0])===0}),a},s.generatedcontent=function(){var a;return y(["#",h,"{font:0/0 a}#",h,':after{content:"',l,'";visibility:hidden;font:3px/1 a}'].join(""),function(b){a=b.offsetHeight>=3}),a},s.video=function(){var a=b.createElement("video"),c=!1;try{if(c=!!a.canPlayType)c=new Boolean(c),c.ogg=a.canPlayType('video/ogg; codecs="theora"').replace(/^no$/,""),c.h264=a.canPlayType('video/mp4; codecs="avc1.42E01E"').replace(/^no$/,""),c.webm=a.canPlayType('video/webm; codecs="vp8, vorbis"').replace(/^no$/,"")}catch(d){}return c},s.audio=function(){var a=b.createElement("audio"),c=!1;try{if(c=!!a.canPlayType)c=new Boolean(c),c.ogg=a.canPlayType('audio/ogg; codecs="vorbis"').replace(/^no$/,""),c.mp3=a.canPlayType("audio/mpeg;").replace(/^no$/,""),c.wav=a.canPlayType('audio/wav; codecs="1"').replace(/^no$/,""),c.m4a=(a.canPlayType("audio/x-m4a;")||a.canPlayType("audio/aac;")).replace(/^no$/,"")}catch(d){}return c},s.localstorage=function(){try{return localStorage.setItem(h,h),localStorage.removeItem(h),!0}catch(a){return!1}},s.sessionstorage=function(){try{return sessionStorage.setItem(h,h),sessionStorage.removeItem(h),!0}catch(a){return!1}},s.webworkers=function(){return!!a.Worker},s.applicationcache=function(){return!!a.applicationCache},s.svg=function(){return!!b.createElementNS&&!!b.createElementNS(r.svg,"svg").createSVGRect},s.inlinesvg=function(){var a=b.createElement("div");return a.innerHTML="",(a.firstChild&&a.firstChild.namespaceURI)==r.svg},s.smil=function(){return!!b.createElementNS&&/SVGAnimate/.test(m.call(b.createElementNS(r.svg,"animate")))},s.svgclippaths=function(){return!!b.createElementNS&&/SVGClipPath/.test(m.call(b.createElementNS(r.svg,"clipPath")))};for(var L in s)C(s,L)&&(x=L.toLowerCase(),e[x]=s[L](),v.push((e[x]?"":"no-")+x));return e.input||K(),e.addTest=function(a,b){if(typeof a=="object")for(var d in a)C(a,d)&&e.addTest(d,a[d]);else{a=a.toLowerCase();if(e[a]!==c)return e;b=typeof b=="function"?b():b,typeof f!="undefined"&&f&&(g.className+=" "+(b?"":"no-")+a),e[a]=b}return e},D(""),i=k=null,function(a,b){function k(a,b){var c=a.createElement("p"),d=a.getElementsByTagName("head")[0]||a.documentElement;return c.innerHTML="x",d.insertBefore(c.lastChild,d.firstChild)}function l(){var a=r.elements;return typeof a=="string"?a.split(" "):a}function m(a){var b=i[a[g]];return b||(b={},h++,a[g]=h,i[h]=b),b}function n(a,c,f){c||(c=b);if(j)return c.createElement(a);f||(f=m(c));var g;return f.cache[a]?g=f.cache[a].cloneNode():e.test(a)?g=(f.cache[a]=f.createElem(a)).cloneNode():g=f.createElem(a),g.canHaveChildren&&!d.test(a)?f.frag.appendChild(g):g}function o(a,c){a||(a=b);if(j)return a.createDocumentFragment();c=c||m(a);var d=c.frag.cloneNode(),e=0,f=l(),g=f.length;for(;e",f="hidden"in a,j=a.childNodes.length==1||function(){b.createElement("a");var a=b.createDocumentFragment();return typeof a.cloneNode=="undefined"||typeof a.createDocumentFragment=="undefined"||typeof a.createElement=="undefined"}()}catch(c){f=!0,j=!0}})();var r={elements:c.elements||"abbr article aside audio bdi canvas data datalist details figcaption figure footer header hgroup mark meter nav output progress section summary time video",shivCSS:c.shivCSS!==!1,supportsUnknownElements:j,shivMethods:c.shivMethods!==!1,type:"default",shivDocument:q,createElement:n,createDocumentFragment:o};a.html5=r,q(b)}(this,b),e._version=d,e._prefixes=n,e._domPrefixes=q,e._cssomPrefixes=p,e.mq=z,e.hasEvent=A,e.testProp=function(a){return H([a])},e.testAllProps=J,e.testStyles=y,e.prefixed=function(a,b,c){return b?J(a,b,c):J(a,"pfx")},g.className=g.className.replace(/(^|\s)no-js(\s|$)/,"$1$2")+(f?" js "+v.join(" "):""),e}(this,this.document),function(a,b,c){function d(a){return"[object Function]"==o.call(a)}function e(a){return"string"==typeof a}function f(){}function g(a){return!a||"loaded"==a||"complete"==a||"uninitialized"==a}function h(){var a=p.shift();q=1,a?a.t?m(function(){("c"==a.t?B.injectCss:B.injectJs)(a.s,0,a.a,a.x,a.e,1)},0):(a(),h()):q=0}function i(a,c,d,e,f,i,j){function k(b){if(!o&&g(l.readyState)&&(u.r=o=1,!q&&h(),l.onload=l.onreadystatechange=null,b)){"img"!=a&&m(function(){t.removeChild(l)},50);for(var d in y[c])y[c].hasOwnProperty(d)&&y[c][d].onload()}}var j=j||B.errorTimeout,l=b.createElement(a),o=0,r=0,u={t:d,s:c,e:f,a:i,x:j};1===y[c]&&(r=1,y[c]=[]),"object"==a?l.data=c:(l.src=c,l.type=a),l.width=l.height="0",l.onerror=l.onload=l.onreadystatechange=function(){k.call(this,r)},p.splice(e,0,u),"img"!=a&&(r||2===y[c]?(t.insertBefore(l,s?null:n),m(k,j)):y[c].push(l))}function j(a,b,c,d,f){return q=0,b=b||"j",e(a)?i("c"==b?v:u,a,b,this.i++,c,d,f):(p.splice(this.i++,0,a),1==p.length&&h()),this}function k(){var a=B;return a.loader={load:j,i:0},a}var l=b.documentElement,m=a.setTimeout,n=b.getElementsByTagName("script")[0],o={}.toString,p=[],q=0,r="MozAppearance"in l.style,s=r&&!!b.createRange().compareNode,t=s?l:n.parentNode,l=a.opera&&"[object Opera]"==o.call(a.opera),l=!!b.attachEvent&&!l,u=r?"object":l?"script":"img",v=l?"script":u,w=Array.isArray||function(a){return"[object Array]"==o.call(a)},x=[],y={},z={timeout:function(a,b){return b.length&&(a.timeout=b[0]),a}},A,B;B=function(a){function b(a){var a=a.split("!"),b=x.length,c=a.pop(),d=a.length,c={url:c,origUrl:c,prefixes:a},e,f,g;for(f=0;f"),i("table.docutils.footnote").wrap(""),i("table.docutils.citation").wrap(""),i(".wy-menu-vertical ul").not(".simple").siblings("a").each(function(){var e=i(this);expand=i(''),expand.on("click",function(n){return t.toggleCurrent(e),n.stopPropagation(),!1}),e.prepend(expand)})},reset:function(){var n=encodeURI(window.location.hash)||"#";try{var e=$(".wy-menu-vertical"),i=e.find('[href="'+n+'"]');if(0===i.length){var t=$('.document [id="'+n.substring(1)+'"]').closest("div.section");0===(i=e.find('[href="#'+t.attr("id")+'"]')).length&&(i=e.find('[href="#"]'))}0this.docHeight||(this.navBar.scrollTop(i),this.winPosition=n)},onResize:function(){this.winResize=!1,this.winHeight=this.win.height(),this.docHeight=$(document).height()},hashChange:function(){this.linkScroll=!0,this.win.one("hashchange",function(){this.linkScroll=!1})},toggleCurrent:function(n){var e=n.closest("li");e.siblings("li.current").removeClass("current"),e.siblings().find("li.current").removeClass("current"),e.find("> ul li.current").removeClass("current"),e.toggleClass("current")}},"undefined"!=typeof window&&(window.SphinxRtdTheme={Navigation:e.exports.ThemeNav,StickyNav:e.exports.ThemeNav}),function(){for(var r=0,n=["ms","moz","webkit","o"],e=0;e0
62 | var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
63 | var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
64 | var s_v = "^(" + C + ")?" + v; // vowel in stem
65 |
66 | this.stemWord = function (w) {
67 | var stem;
68 | var suffix;
69 | var firstch;
70 | var origword = w;
71 |
72 | if (w.length < 3)
73 | return w;
74 |
75 | var re;
76 | var re2;
77 | var re3;
78 | var re4;
79 |
80 | firstch = w.substr(0,1);
81 | if (firstch == "y")
82 | w = firstch.toUpperCase() + w.substr(1);
83 |
84 | // Step 1a
85 | re = /^(.+?)(ss|i)es$/;
86 | re2 = /^(.+?)([^s])s$/;
87 |
88 | if (re.test(w))
89 | w = w.replace(re,"$1$2");
90 | else if (re2.test(w))
91 | w = w.replace(re2,"$1$2");
92 |
93 | // Step 1b
94 | re = /^(.+?)eed$/;
95 | re2 = /^(.+?)(ed|ing)$/;
96 | if (re.test(w)) {
97 | var fp = re.exec(w);
98 | re = new RegExp(mgr0);
99 | if (re.test(fp[1])) {
100 | re = /.$/;
101 | w = w.replace(re,"");
102 | }
103 | }
104 | else if (re2.test(w)) {
105 | var fp = re2.exec(w);
106 | stem = fp[1];
107 | re2 = new RegExp(s_v);
108 | if (re2.test(stem)) {
109 | w = stem;
110 | re2 = /(at|bl|iz)$/;
111 | re3 = new RegExp("([^aeiouylsz])\\1$");
112 | re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
113 | if (re2.test(w))
114 | w = w + "e";
115 | else if (re3.test(w)) {
116 | re = /.$/;
117 | w = w.replace(re,"");
118 | }
119 | else if (re4.test(w))
120 | w = w + "e";
121 | }
122 | }
123 |
124 | // Step 1c
125 | re = /^(.+?)y$/;
126 | if (re.test(w)) {
127 | var fp = re.exec(w);
128 | stem = fp[1];
129 | re = new RegExp(s_v);
130 | if (re.test(stem))
131 | w = stem + "i";
132 | }
133 |
134 | // Step 2
135 | re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
136 | if (re.test(w)) {
137 | var fp = re.exec(w);
138 | stem = fp[1];
139 | suffix = fp[2];
140 | re = new RegExp(mgr0);
141 | if (re.test(stem))
142 | w = stem + step2list[suffix];
143 | }
144 |
145 | // Step 3
146 | re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
147 | if (re.test(w)) {
148 | var fp = re.exec(w);
149 | stem = fp[1];
150 | suffix = fp[2];
151 | re = new RegExp(mgr0);
152 | if (re.test(stem))
153 | w = stem + step3list[suffix];
154 | }
155 |
156 | // Step 4
157 | re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|iti|ous|ive|ize)$/;
158 | re2 = /^(.+?)(s|t)(ion)$/;
159 | if (re.test(w)) {
160 | var fp = re.exec(w);
161 | stem = fp[1];
162 | re = new RegExp(mgr1);
163 | if (re.test(stem))
164 | w = stem;
165 | }
166 | else if (re2.test(w)) {
167 | var fp = re2.exec(w);
168 | stem = fp[1] + fp[2];
169 | re2 = new RegExp(mgr1);
170 | if (re2.test(stem))
171 | w = stem;
172 | }
173 |
174 | // Step 5
175 | re = /^(.+?)e$/;
176 | if (re.test(w)) {
177 | var fp = re.exec(w);
178 | stem = fp[1];
179 | re = new RegExp(mgr1);
180 | re2 = new RegExp(meq1);
181 | re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
182 | if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
183 | w = stem;
184 | }
185 | re = /ll$/;
186 | re2 = new RegExp(mgr1);
187 | if (re.test(w) && re2.test(w)) {
188 | re = /.$/;
189 | w = w.replace(re,"");
190 | }
191 |
192 | // and turn initial Y back to y
193 | if (firstch == "y")
194 | w = firstch.toLowerCase() + w.substr(1);
195 | return w;
196 | }
197 | }
198 |
199 |
200 |
201 |
202 |
203 | var splitChars = (function() {
204 | var result = {};
205 | var singles = [96, 180, 187, 191, 215, 247, 749, 885, 903, 907, 909, 930, 1014, 1648,
206 | 1748, 1809, 2416, 2473, 2481, 2526, 2601, 2609, 2612, 2615, 2653, 2702,
207 | 2706, 2729, 2737, 2740, 2857, 2865, 2868, 2910, 2928, 2948, 2961, 2971,
208 | 2973, 3085, 3089, 3113, 3124, 3213, 3217, 3241, 3252, 3295, 3341, 3345,
209 | 3369, 3506, 3516, 3633, 3715, 3721, 3736, 3744, 3748, 3750, 3756, 3761,
210 | 3781, 3912, 4239, 4347, 4681, 4695, 4697, 4745, 4785, 4799, 4801, 4823,
211 | 4881, 5760, 5901, 5997, 6313, 7405, 8024, 8026, 8028, 8030, 8117, 8125,
212 | 8133, 8181, 8468, 8485, 8487, 8489, 8494, 8527, 11311, 11359, 11687, 11695,
213 | 11703, 11711, 11719, 11727, 11735, 12448, 12539, 43010, 43014, 43019, 43587,
214 | 43696, 43713, 64286, 64297, 64311, 64317, 64319, 64322, 64325, 65141];
215 | var i, j, start, end;
216 | for (i = 0; i < singles.length; i++) {
217 | result[singles[i]] = true;
218 | }
219 | var ranges = [[0, 47], [58, 64], [91, 94], [123, 169], [171, 177], [182, 184], [706, 709],
220 | [722, 735], [741, 747], [751, 879], [888, 889], [894, 901], [1154, 1161],
221 | [1318, 1328], [1367, 1368], [1370, 1376], [1416, 1487], [1515, 1519], [1523, 1568],
222 | [1611, 1631], [1642, 1645], [1750, 1764], [1767, 1773], [1789, 1790], [1792, 1807],
223 | [1840, 1868], [1958, 1968], [1970, 1983], [2027, 2035], [2038, 2041], [2043, 2047],
224 | [2070, 2073], [2075, 2083], [2085, 2087], [2089, 2307], [2362, 2364], [2366, 2383],
225 | [2385, 2391], [2402, 2405], [2419, 2424], [2432, 2436], [2445, 2446], [2449, 2450],
226 | [2483, 2485], [2490, 2492], [2494, 2509], [2511, 2523], [2530, 2533], [2546, 2547],
227 | [2554, 2564], [2571, 2574], [2577, 2578], [2618, 2648], [2655, 2661], [2672, 2673],
228 | [2677, 2692], [2746, 2748], [2750, 2767], [2769, 2783], [2786, 2789], [2800, 2820],
229 | [2829, 2830], [2833, 2834], [2874, 2876], [2878, 2907], [2914, 2917], [2930, 2946],
230 | [2955, 2957], [2966, 2968], [2976, 2978], [2981, 2983], [2987, 2989], [3002, 3023],
231 | [3025, 3045], [3059, 3076], [3130, 3132], [3134, 3159], [3162, 3167], [3170, 3173],
232 | [3184, 3191], [3199, 3204], [3258, 3260], [3262, 3293], [3298, 3301], [3312, 3332],
233 | [3386, 3388], [3390, 3423], [3426, 3429], [3446, 3449], [3456, 3460], [3479, 3481],
234 | [3518, 3519], [3527, 3584], [3636, 3647], [3655, 3663], [3674, 3712], [3717, 3718],
235 | [3723, 3724], [3726, 3731], [3752, 3753], [3764, 3772], [3774, 3775], [3783, 3791],
236 | [3802, 3803], [3806, 3839], [3841, 3871], [3892, 3903], [3949, 3975], [3980, 4095],
237 | [4139, 4158], [4170, 4175], [4182, 4185], [4190, 4192], [4194, 4196], [4199, 4205],
238 | [4209, 4212], [4226, 4237], [4250, 4255], [4294, 4303], [4349, 4351], [4686, 4687],
239 | [4702, 4703], [4750, 4751], [4790, 4791], [4806, 4807], [4886, 4887], [4955, 4968],
240 | [4989, 4991], [5008, 5023], [5109, 5120], [5741, 5742], [5787, 5791], [5867, 5869],
241 | [5873, 5887], [5906, 5919], [5938, 5951], [5970, 5983], [6001, 6015], [6068, 6102],
242 | [6104, 6107], [6109, 6111], [6122, 6127], [6138, 6159], [6170, 6175], [6264, 6271],
243 | [6315, 6319], [6390, 6399], [6429, 6469], [6510, 6511], [6517, 6527], [6572, 6592],
244 | [6600, 6607], [6619, 6655], [6679, 6687], [6741, 6783], [6794, 6799], [6810, 6822],
245 | [6824, 6916], [6964, 6980], [6988, 6991], [7002, 7042], [7073, 7085], [7098, 7167],
246 | [7204, 7231], [7242, 7244], [7294, 7400], [7410, 7423], [7616, 7679], [7958, 7959],
247 | [7966, 7967], [8006, 8007], [8014, 8015], [8062, 8063], [8127, 8129], [8141, 8143],
248 | [8148, 8149], [8156, 8159], [8173, 8177], [8189, 8303], [8306, 8307], [8314, 8318],
249 | [8330, 8335], [8341, 8449], [8451, 8454], [8456, 8457], [8470, 8472], [8478, 8483],
250 | [8506, 8507], [8512, 8516], [8522, 8525], [8586, 9311], [9372, 9449], [9472, 10101],
251 | [10132, 11263], [11493, 11498], [11503, 11516], [11518, 11519], [11558, 11567],
252 | [11622, 11630], [11632, 11647], [11671, 11679], [11743, 11822], [11824, 12292],
253 | [12296, 12320], [12330, 12336], [12342, 12343], [12349, 12352], [12439, 12444],
254 | [12544, 12548], [12590, 12592], [12687, 12689], [12694, 12703], [12728, 12783],
255 | [12800, 12831], [12842, 12880], [12896, 12927], [12938, 12976], [12992, 13311],
256 | [19894, 19967], [40908, 40959], [42125, 42191], [42238, 42239], [42509, 42511],
257 | [42540, 42559], [42592, 42593], [42607, 42622], [42648, 42655], [42736, 42774],
258 | [42784, 42785], [42889, 42890], [42893, 43002], [43043, 43055], [43062, 43071],
259 | [43124, 43137], [43188, 43215], [43226, 43249], [43256, 43258], [43260, 43263],
260 | [43302, 43311], [43335, 43359], [43389, 43395], [43443, 43470], [43482, 43519],
261 | [43561, 43583], [43596, 43599], [43610, 43615], [43639, 43641], [43643, 43647],
262 | [43698, 43700], [43703, 43704], [43710, 43711], [43715, 43738], [43742, 43967],
263 | [44003, 44015], [44026, 44031], [55204, 55215], [55239, 55242], [55292, 55295],
264 | [57344, 63743], [64046, 64047], [64110, 64111], [64218, 64255], [64263, 64274],
265 | [64280, 64284], [64434, 64466], [64830, 64847], [64912, 64913], [64968, 65007],
266 | [65020, 65135], [65277, 65295], [65306, 65312], [65339, 65344], [65371, 65381],
267 | [65471, 65473], [65480, 65481], [65488, 65489], [65496, 65497]];
268 | for (i = 0; i < ranges.length; i++) {
269 | start = ranges[i][0];
270 | end = ranges[i][1];
271 | for (j = start; j <= end; j++) {
272 | result[j] = true;
273 | }
274 | }
275 | return result;
276 | })();
277 |
278 | function splitQuery(query) {
279 | var result = [];
280 | var start = -1;
281 | for (var i = 0; i < query.length; i++) {
282 | if (splitChars[query.charCodeAt(i)]) {
283 | if (start !== -1) {
284 | result.push(query.slice(start, i));
285 | start = -1;
286 | }
287 | } else if (start === -1) {
288 | start = i;
289 | }
290 | }
291 | if (start !== -1) {
292 | result.push(query.slice(start));
293 | }
294 | return result;
295 | }
296 |
297 |
298 |
--------------------------------------------------------------------------------
/docs/_static/minus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/minus.png
--------------------------------------------------------------------------------
/docs/_static/plus.png:
--------------------------------------------------------------------------------
https://raw.githubusercontent.com/zubara/mneflow/92ee4f85c27427c869116ab6a1c9693b5da8089e/docs/_static/plus.png
--------------------------------------------------------------------------------
/docs/_static/pygments.css:
--------------------------------------------------------------------------------
1 | pre { line-height: 125%; margin: 0; }
2 | td.linenos pre { color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px; }
3 | span.linenos { color: #000000; background-color: #f0f0f0; padding: 0 5px 0 5px; }
4 | td.linenos pre.special { color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px; }
5 | span.linenos.special { color: #000000; background-color: #ffffc0; padding: 0 5px 0 5px; }
6 | .highlight .hll { background-color: #ffffcc }
7 | .highlight { background: #f8f8f8; }
8 | .highlight .c { color: #408080; font-style: italic } /* Comment */
9 | .highlight .err { border: 1px solid #FF0000 } /* Error */
10 | .highlight .k { color: #008000; font-weight: bold } /* Keyword */
11 | .highlight .o { color: #666666 } /* Operator */
12 | .highlight .ch { color: #408080; font-style: italic } /* Comment.Hashbang */
13 | .highlight .cm { color: #408080; font-style: italic } /* Comment.Multiline */
14 | .highlight .cp { color: #BC7A00 } /* Comment.Preproc */
15 | .highlight .cpf { color: #408080; font-style: italic } /* Comment.PreprocFile */
16 | .highlight .c1 { color: #408080; font-style: italic } /* Comment.Single */
17 | .highlight .cs { color: #408080; font-style: italic } /* Comment.Special */
18 | .highlight .gd { color: #A00000 } /* Generic.Deleted */
19 | .highlight .ge { font-style: italic } /* Generic.Emph */
20 | .highlight .gr { color: #FF0000 } /* Generic.Error */
21 | .highlight .gh { color: #000080; font-weight: bold } /* Generic.Heading */
22 | .highlight .gi { color: #00A000 } /* Generic.Inserted */
23 | .highlight .go { color: #888888 } /* Generic.Output */
24 | .highlight .gp { color: #000080; font-weight: bold } /* Generic.Prompt */
25 | .highlight .gs { font-weight: bold } /* Generic.Strong */
26 | .highlight .gu { color: #800080; font-weight: bold } /* Generic.Subheading */
27 | .highlight .gt { color: #0044DD } /* Generic.Traceback */
28 | .highlight .kc { color: #008000; font-weight: bold } /* Keyword.Constant */
29 | .highlight .kd { color: #008000; font-weight: bold } /* Keyword.Declaration */
30 | .highlight .kn { color: #008000; font-weight: bold } /* Keyword.Namespace */
31 | .highlight .kp { color: #008000 } /* Keyword.Pseudo */
32 | .highlight .kr { color: #008000; font-weight: bold } /* Keyword.Reserved */
33 | .highlight .kt { color: #B00040 } /* Keyword.Type */
34 | .highlight .m { color: #666666 } /* Literal.Number */
35 | .highlight .s { color: #BA2121 } /* Literal.String */
36 | .highlight .na { color: #7D9029 } /* Name.Attribute */
37 | .highlight .nb { color: #008000 } /* Name.Builtin */
38 | .highlight .nc { color: #0000FF; font-weight: bold } /* Name.Class */
39 | .highlight .no { color: #880000 } /* Name.Constant */
40 | .highlight .nd { color: #AA22FF } /* Name.Decorator */
41 | .highlight .ni { color: #999999; font-weight: bold } /* Name.Entity */
42 | .highlight .ne { color: #D2413A; font-weight: bold } /* Name.Exception */
43 | .highlight .nf { color: #0000FF } /* Name.Function */
44 | .highlight .nl { color: #A0A000 } /* Name.Label */
45 | .highlight .nn { color: #0000FF; font-weight: bold } /* Name.Namespace */
46 | .highlight .nt { color: #008000; font-weight: bold } /* Name.Tag */
47 | .highlight .nv { color: #19177C } /* Name.Variable */
48 | .highlight .ow { color: #AA22FF; font-weight: bold } /* Operator.Word */
49 | .highlight .w { color: #bbbbbb } /* Text.Whitespace */
50 | .highlight .mb { color: #666666 } /* Literal.Number.Bin */
51 | .highlight .mf { color: #666666 } /* Literal.Number.Float */
52 | .highlight .mh { color: #666666 } /* Literal.Number.Hex */
53 | .highlight .mi { color: #666666 } /* Literal.Number.Integer */
54 | .highlight .mo { color: #666666 } /* Literal.Number.Oct */
55 | .highlight .sa { color: #BA2121 } /* Literal.String.Affix */
56 | .highlight .sb { color: #BA2121 } /* Literal.String.Backtick */
57 | .highlight .sc { color: #BA2121 } /* Literal.String.Char */
58 | .highlight .dl { color: #BA2121 } /* Literal.String.Delimiter */
59 | .highlight .sd { color: #BA2121; font-style: italic } /* Literal.String.Doc */
60 | .highlight .s2 { color: #BA2121 } /* Literal.String.Double */
61 | .highlight .se { color: #BB6622; font-weight: bold } /* Literal.String.Escape */
62 | .highlight .sh { color: #BA2121 } /* Literal.String.Heredoc */
63 | .highlight .si { color: #BB6688; font-weight: bold } /* Literal.String.Interpol */
64 | .highlight .sx { color: #008000 } /* Literal.String.Other */
65 | .highlight .sr { color: #BB6688 } /* Literal.String.Regex */
66 | .highlight .s1 { color: #BA2121 } /* Literal.String.Single */
67 | .highlight .ss { color: #19177C } /* Literal.String.Symbol */
68 | .highlight .bp { color: #008000 } /* Name.Builtin.Pseudo */
69 | .highlight .fm { color: #0000FF } /* Name.Function.Magic */
70 | .highlight .vc { color: #19177C } /* Name.Variable.Class */
71 | .highlight .vg { color: #19177C } /* Name.Variable.Global */
72 | .highlight .vi { color: #19177C } /* Name.Variable.Instance */
73 | .highlight .vm { color: #19177C } /* Name.Variable.Magic */
74 | .highlight .il { color: #666666 } /* Literal.Number.Integer.Long */
--------------------------------------------------------------------------------
/docs/_static/searchtools.js:
--------------------------------------------------------------------------------
1 | /*
2 | * searchtools.js
3 | * ~~~~~~~~~~~~~~~~
4 | *
5 | * Sphinx JavaScript utilities for the full-text search.
6 | *
7 | * :copyright: Copyright 2007-2019 by the Sphinx team, see AUTHORS.
8 | * :license: BSD, see LICENSE for details.
9 | *
10 | */
11 |
12 | if (!Scorer) {
13 | /**
14 | * Simple result scoring code.
15 | */
16 | var Scorer = {
17 | // Implement the following function to further tweak the score for each result
18 | // The function takes a result array [filename, title, anchor, descr, score]
19 | // and returns the new score.
20 | /*
21 | score: function(result) {
22 | return result[4];
23 | },
24 | */
25 |
26 | // query matches the full name of an object
27 | objNameMatch: 11,
28 | // or matches in the last dotted part of the object name
29 | objPartialMatch: 6,
30 | // Additive scores depending on the priority of the object
31 | objPrio: {0: 15, // used to be importantResults
32 | 1: 5, // used to be objectResults
33 | 2: -5}, // used to be unimportantResults
34 | // Used when the priority is not in the mapping.
35 | objPrioDefault: 0,
36 |
37 | // query found in title
38 | title: 15,
39 | partialTitle: 7,
40 | // query found in terms
41 | term: 5,
42 | partialTerm: 2
43 | };
44 | }
45 |
46 | if (!splitQuery) {
47 | function splitQuery(query) {
48 | return query.split(/\s+/);
49 | }
50 | }
51 |
52 | /**
53 | * Search Module
54 | */
55 | var Search = {
56 |
57 | _index : null,
58 | _queued_query : null,
59 | _pulse_status : -1,
60 |
61 | htmlToText : function(htmlString) {
62 | var htmlElement = document.createElement('span');
63 | htmlElement.innerHTML = htmlString;
64 | $(htmlElement).find('.headerlink').remove();
65 | docContent = $(htmlElement).find('[role=main]')[0];
66 | return docContent.textContent || docContent.innerText;
67 | },
68 |
69 | init : function() {
70 | var params = $.getQueryParameters();
71 | if (params.q) {
72 | var query = params.q[0];
73 | $('input[name="q"]')[0].value = query;
74 | this.performSearch(query);
75 | }
76 | },
77 |
78 | loadIndex : function(url) {
79 | $.ajax({type: "GET", url: url, data: null,
80 | dataType: "script", cache: true,
81 | complete: function(jqxhr, textstatus) {
82 | if (textstatus != "success") {
83 | document.getElementById("searchindexloader").src = url;
84 | }
85 | }});
86 | },
87 |
88 | setIndex : function(index) {
89 | var q;
90 | this._index = index;
91 | if ((q = this._queued_query) !== null) {
92 | this._queued_query = null;
93 | Search.query(q);
94 | }
95 | },
96 |
97 | hasIndex : function() {
98 | return this._index !== null;
99 | },
100 |
101 | deferQuery : function(query) {
102 | this._queued_query = query;
103 | },
104 |
105 | stopPulse : function() {
106 | this._pulse_status = 0;
107 | },
108 |
109 | startPulse : function() {
110 | if (this._pulse_status >= 0)
111 | return;
112 | function pulse() {
113 | var i;
114 | Search._pulse_status = (Search._pulse_status + 1) % 4;
115 | var dotString = '';
116 | for (i = 0; i < Search._pulse_status; i++)
117 | dotString += '.';
118 | Search.dots.text(dotString);
119 | if (Search._pulse_status > -1)
120 | window.setTimeout(pulse, 500);
121 | }
122 | pulse();
123 | },
124 |
125 | /**
126 | * perform a search for something (or wait until index is loaded)
127 | */
128 | performSearch : function(query) {
129 | // create the required interface elements
130 | this.out = $('#search-results');
131 | this.title = $('' + _('Searching') + '
').appendTo(this.out);
132 | this.dots = $('').appendTo(this.title);
133 | this.status = $('
').appendTo(this.out);
134 | this.output = $('').appendTo(this.out);
135 |
136 | $('#search-progress').text(_('Preparing search...'));
137 | this.startPulse();
138 |
139 | // index already loaded, the browser was quick!
140 | if (this.hasIndex())
141 | this.query(query);
142 | else
143 | this.deferQuery(query);
144 | },
145 |
146 | /**
147 | * execute search (requires search index to be loaded)
148 | */
149 | query : function(query) {
150 | var i;
151 |
152 | // stem the searchterms and add them to the correct list
153 | var stemmer = new Stemmer();
154 | var searchterms = [];
155 | var excluded = [];
156 | var hlterms = [];
157 | var tmp = splitQuery(query);
158 | var objectterms = [];
159 | for (i = 0; i < tmp.length; i++) {
160 | if (tmp[i] !== "") {
161 | objectterms.push(tmp[i].toLowerCase());
162 | }
163 |
164 | if ($u.indexOf(stopwords, tmp[i].toLowerCase()) != -1 || tmp[i].match(/^\d+$/) ||
165 | tmp[i] === "") {
166 | // skip this "word"
167 | continue;
168 | }
169 | // stem the word
170 | var word = stemmer.stemWord(tmp[i].toLowerCase());
171 | // prevent stemmer from cutting word smaller than two chars
172 | if(word.length < 3 && tmp[i].length >= 3) {
173 | word = tmp[i];
174 | }
175 | var toAppend;
176 | // select the correct list
177 | if (word[0] == '-') {
178 | toAppend = excluded;
179 | word = word.substr(1);
180 | }
181 | else {
182 | toAppend = searchterms;
183 | hlterms.push(tmp[i].toLowerCase());
184 | }
185 | // only add if not already in the list
186 | if (!$u.contains(toAppend, word))
187 | toAppend.push(word);
188 | }
189 | var highlightstring = '?highlight=' + $.urlencode(hlterms.join(" "));
190 |
191 | // console.debug('SEARCH: searching for:');
192 | // console.info('required: ', searchterms);
193 | // console.info('excluded: ', excluded);
194 |
195 | // prepare search
196 | var terms = this._index.terms;
197 | var titleterms = this._index.titleterms;
198 |
199 | // array of [filename, title, anchor, descr, score]
200 | var results = [];
201 | $('#search-progress').empty();
202 |
203 | // lookup as object
204 | for (i = 0; i < objectterms.length; i++) {
205 | var others = [].concat(objectterms.slice(0, i),
206 | objectterms.slice(i+1, objectterms.length));
207 | results = results.concat(this.performObjectSearch(objectterms[i], others));
208 | }
209 |
210 | // lookup as search terms in fulltext
211 | results = results.concat(this.performTermsSearch(searchterms, excluded, terms, titleterms));
212 |
213 | // let the scorer override scores with a custom scoring function
214 | if (Scorer.score) {
215 | for (i = 0; i < results.length; i++)
216 | results[i][4] = Scorer.score(results[i]);
217 | }
218 |
219 | // now sort the results by score (in opposite order of appearance, since the
220 | // display function below uses pop() to retrieve items) and then
221 | // alphabetically
222 | results.sort(function(a, b) {
223 | var left = a[4];
224 | var right = b[4];
225 | if (left > right) {
226 | return 1;
227 | } else if (left < right) {
228 | return -1;
229 | } else {
230 | // same score: sort alphabetically
231 | left = a[1].toLowerCase();
232 | right = b[1].toLowerCase();
233 | return (left > right) ? -1 : ((left < right) ? 1 : 0);
234 | }
235 | });
236 |
237 | // for debugging
238 | //Search.lastresults = results.slice(); // a copy
239 | //console.info('search results:', Search.lastresults);
240 |
241 | // print the results
242 | var resultCount = results.length;
243 | function displayNextItem() {
244 | // results left, load the summary and display it
245 | if (results.length) {
246 | var item = results.pop();
247 | var listItem = $('');
248 | if (DOCUMENTATION_OPTIONS.FILE_SUFFIX === '') {
249 | // dirhtml builder
250 | var dirname = item[0] + '/';
251 | if (dirname.match(/\/index\/$/)) {
252 | dirname = dirname.substring(0, dirname.length-6);
253 | } else if (dirname == 'index/') {
254 | dirname = '';
255 | }
256 | listItem.append($('').attr('href',
257 | DOCUMENTATION_OPTIONS.URL_ROOT + dirname +
258 | highlightstring + item[2]).html(item[1]));
259 | } else {
260 | // normal html builders
261 | listItem.append($('').attr('href',
262 | item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX +
263 | highlightstring + item[2]).html(item[1]));
264 | }
265 | if (item[3]) {
266 | listItem.append($(' (' + item[3] + ')'));
267 | Search.output.append(listItem);
268 | listItem.slideDown(5, function() {
269 | displayNextItem();
270 | });
271 | } else if (DOCUMENTATION_OPTIONS.HAS_SOURCE) {
272 | $.ajax({url: DOCUMENTATION_OPTIONS.URL_ROOT + item[0] + DOCUMENTATION_OPTIONS.FILE_SUFFIX,
273 | dataType: "text",
274 | complete: function(jqxhr, textstatus) {
275 | var data = jqxhr.responseText;
276 | if (data !== '' && data !== undefined) {
277 | listItem.append(Search.makeSearchSummary(data, searchterms, hlterms));
278 | }
279 | Search.output.append(listItem);
280 | listItem.slideDown(5, function() {
281 | displayNextItem();
282 | });
283 | }});
284 | } else {
285 | // no source available, just display title
286 | Search.output.append(listItem);
287 | listItem.slideDown(5, function() {
288 | displayNextItem();
289 | });
290 | }
291 | }
292 | // search finished, update title and status message
293 | else {
294 | Search.stopPulse();
295 | Search.title.text(_('Search Results'));
296 | if (!resultCount)
297 | Search.status.text(_('Your search did not match any documents. Please make sure that all words are spelled correctly and that you\'ve selected enough categories.'));
298 | else
299 | Search.status.text(_('Search finished, found %s page(s) matching the search query.').replace('%s', resultCount));
300 | Search.status.fadeIn(500);
301 | }
302 | }
303 | displayNextItem();
304 | },
305 |
306 | /**
307 | * search for object names
308 | */
309 | performObjectSearch : function(object, otherterms) {
310 | var filenames = this._index.filenames;
311 | var docnames = this._index.docnames;
312 | var objects = this._index.objects;
313 | var objnames = this._index.objnames;
314 | var titles = this._index.titles;
315 |
316 | var i;
317 | var results = [];
318 |
319 | for (var prefix in objects) {
320 | for (var name in objects[prefix]) {
321 | var fullname = (prefix ? prefix + '.' : '') + name;
322 | if (fullname.toLowerCase().indexOf(object) > -1) {
323 | var score = 0;
324 | var parts = fullname.split('.');
325 | // check for different match types: exact matches of full name or
326 | // "last name" (i.e. last dotted part)
327 | if (fullname == object || parts[parts.length - 1] == object) {
328 | score += Scorer.objNameMatch;
329 | // matches in last name
330 | } else if (parts[parts.length - 1].indexOf(object) > -1) {
331 | score += Scorer.objPartialMatch;
332 | }
333 | var match = objects[prefix][name];
334 | var objname = objnames[match[1]][2];
335 | var title = titles[match[0]];
336 | // If more than one term searched for, we require other words to be
337 | // found in the name/title/description
338 | if (otherterms.length > 0) {
339 | var haystack = (prefix + ' ' + name + ' ' +
340 | objname + ' ' + title).toLowerCase();
341 | var allfound = true;
342 | for (i = 0; i < otherterms.length; i++) {
343 | if (haystack.indexOf(otherterms[i]) == -1) {
344 | allfound = false;
345 | break;
346 | }
347 | }
348 | if (!allfound) {
349 | continue;
350 | }
351 | }
352 | var descr = objname + _(', in ') + title;
353 |
354 | var anchor = match[3];
355 | if (anchor === '')
356 | anchor = fullname;
357 | else if (anchor == '-')
358 | anchor = objnames[match[1]][1] + '-' + fullname;
359 | // add custom score for some objects according to scorer
360 | if (Scorer.objPrio.hasOwnProperty(match[2])) {
361 | score += Scorer.objPrio[match[2]];
362 | } else {
363 | score += Scorer.objPrioDefault;
364 | }
365 | results.push([docnames[match[0]], fullname, '#'+anchor, descr, score, filenames[match[0]]]);
366 | }
367 | }
368 | }
369 |
370 | return results;
371 | },
372 |
373 | /**
374 | * search for full-text terms in the index
375 | */
376 | performTermsSearch : function(searchterms, excluded, terms, titleterms) {
377 | var docnames = this._index.docnames;
378 | var filenames = this._index.filenames;
379 | var titles = this._index.titles;
380 |
381 | var i, j, file;
382 | var fileMap = {};
383 | var scoreMap = {};
384 | var results = [];
385 |
386 | // perform the search on the required terms
387 | for (i = 0; i < searchterms.length; i++) {
388 | var word = searchterms[i];
389 | var files = [];
390 | var _o = [
391 | {files: terms[word], score: Scorer.term},
392 | {files: titleterms[word], score: Scorer.title}
393 | ];
394 | // add support for partial matches
395 | if (word.length > 2) {
396 | for (var w in terms) {
397 | if (w.match(word) && !terms[word]) {
398 | _o.push({files: terms[w], score: Scorer.partialTerm})
399 | }
400 | }
401 | for (var w in titleterms) {
402 | if (w.match(word) && !titleterms[word]) {
403 | _o.push({files: titleterms[w], score: Scorer.partialTitle})
404 | }
405 | }
406 | }
407 |
408 | // no match but word was a required one
409 | if ($u.every(_o, function(o){return o.files === undefined;})) {
410 | break;
411 | }
412 | // found search word in contents
413 | $u.each(_o, function(o) {
414 | var _files = o.files;
415 | if (_files === undefined)
416 | return
417 |
418 | if (_files.length === undefined)
419 | _files = [_files];
420 | files = files.concat(_files);
421 |
422 | // set score for the word in each file to Scorer.term
423 | for (j = 0; j < _files.length; j++) {
424 | file = _files[j];
425 | if (!(file in scoreMap))
426 | scoreMap[file] = {}
427 | scoreMap[file][word] = o.score;
428 | }
429 | });
430 |
431 | // create the mapping
432 | for (j = 0; j < files.length; j++) {
433 | file = files[j];
434 | if (file in fileMap)
435 | fileMap[file].push(word);
436 | else
437 | fileMap[file] = [word];
438 | }
439 | }
440 |
441 | // now check if the files don't contain excluded terms
442 | for (file in fileMap) {
443 | var valid = true;
444 |
445 | // check if all requirements are matched
446 | var filteredTermCount = // as search terms with length < 3 are discarded: ignore
447 | searchterms.filter(function(term){return term.length > 2}).length
448 | if (
449 | fileMap[file].length != searchterms.length &&
450 | fileMap[file].length != filteredTermCount
451 | ) continue;
452 |
453 | // ensure that none of the excluded terms is in the search result
454 | for (i = 0; i < excluded.length; i++) {
455 | if (terms[excluded[i]] == file ||
456 | titleterms[excluded[i]] == file ||
457 | $u.contains(terms[excluded[i]] || [], file) ||
458 | $u.contains(titleterms[excluded[i]] || [], file)) {
459 | valid = false;
460 | break;
461 | }
462 | }
463 |
464 | // if we have still a valid result we can add it to the result list
465 | if (valid) {
466 | // select one (max) score for the file.
467 | // for better ranking, we should calculate ranking by using words statistics like basic tf-idf...
468 | var score = $u.max($u.map(fileMap[file], function(w){return scoreMap[file][w]}));
469 | results.push([docnames[file], titles[file], '', null, score, filenames[file]]);
470 | }
471 | }
472 | return results;
473 | },
474 |
475 | /**
476 | * helper function to return a node containing the
477 | * search summary for a given text. keywords is a list
478 | * of stemmed words, hlwords is the list of normal, unstemmed
479 | * words. the first one is used to find the occurrence, the
480 | * latter for highlighting it.
481 | */
482 | makeSearchSummary : function(htmlText, keywords, hlwords) {
483 | var text = Search.htmlToText(htmlText);
484 | var textLower = text.toLowerCase();
485 | var start = 0;
486 | $.each(keywords, function() {
487 | var i = textLower.indexOf(this.toLowerCase());
488 | if (i > -1)
489 | start = i;
490 | });
491 | start = Math.max(start - 120, 0);
492 | var excerpt = ((start > 0) ? '...' : '') +
493 | $.trim(text.substr(start, 240)) +
494 | ((start + 240 - text.length) ? '...' : '');
495 | var rv = $('').text(excerpt);
496 | $.each(hlwords, function() {
497 | rv = rv.highlightText(this, 'highlighted');
498 | });
499 | return rv;
500 | }
501 | };
502 |
503 | $(document).ready(function() {
504 | Search.init();
505 | });
506 |
--------------------------------------------------------------------------------
/docs/_static/underscore.js:
--------------------------------------------------------------------------------
1 | // Underscore.js 1.3.1
2 | // (c) 2009-2012 Jeremy Ashkenas, DocumentCloud Inc.
3 | // Underscore is freely distributable under the MIT license.
4 | // Portions of Underscore are inspired or borrowed from Prototype,
5 | // Oliver Steele's Functional, and John Resig's Micro-Templating.
6 | // For all details and documentation:
7 | // http://documentcloud.github.com/underscore
8 | (function(){function q(a,c,d){if(a===c)return a!==0||1/a==1/c;if(a==null||c==null)return a===c;if(a._chain)a=a._wrapped;if(c._chain)c=c._wrapped;if(a.isEqual&&b.isFunction(a.isEqual))return a.isEqual(c);if(c.isEqual&&b.isFunction(c.isEqual))return c.isEqual(a);var e=l.call(a);if(e!=l.call(c))return false;switch(e){case "[object String]":return a==String(c);case "[object Number]":return a!=+a?c!=+c:a==0?1/a==1/c:a==+c;case "[object Date]":case "[object Boolean]":return+a==+c;case "[object RegExp]":return a.source==
9 | c.source&&a.global==c.global&&a.multiline==c.multiline&&a.ignoreCase==c.ignoreCase}if(typeof a!="object"||typeof c!="object")return false;for(var f=d.length;f--;)if(d[f]==a)return true;d.push(a);var f=0,g=true;if(e=="[object Array]"){if(f=a.length,g=f==c.length)for(;f--;)if(!(g=f in a==f in c&&q(a[f],c[f],d)))break}else{if("constructor"in a!="constructor"in c||a.constructor!=c.constructor)return false;for(var h in a)if(b.has(a,h)&&(f++,!(g=b.has(c,h)&&q(a[h],c[h],d))))break;if(g){for(h in c)if(b.has(c,
10 | h)&&!f--)break;g=!f}}d.pop();return g}var r=this,G=r._,n={},k=Array.prototype,o=Object.prototype,i=k.slice,H=k.unshift,l=o.toString,I=o.hasOwnProperty,w=k.forEach,x=k.map,y=k.reduce,z=k.reduceRight,A=k.filter,B=k.every,C=k.some,p=k.indexOf,D=k.lastIndexOf,o=Array.isArray,J=Object.keys,s=Function.prototype.bind,b=function(a){return new m(a)};if(typeof exports!=="undefined"){if(typeof module!=="undefined"&&module.exports)exports=module.exports=b;exports._=b}else r._=b;b.VERSION="1.3.1";var j=b.each=
11 | b.forEach=function(a,c,d){if(a!=null)if(w&&a.forEach===w)a.forEach(c,d);else if(a.length===+a.length)for(var e=0,f=a.length;e2;a==
12 | null&&(a=[]);if(y&&a.reduce===y)return e&&(c=b.bind(c,e)),f?a.reduce(c,d):a.reduce(c);j(a,function(a,b,i){f?d=c.call(e,d,a,b,i):(d=a,f=true)});if(!f)throw new TypeError("Reduce of empty array with no initial value");return d};b.reduceRight=b.foldr=function(a,c,d,e){var f=arguments.length>2;a==null&&(a=[]);if(z&&a.reduceRight===z)return e&&(c=b.bind(c,e)),f?a.reduceRight(c,d):a.reduceRight(c);var g=b.toArray(a).reverse();e&&!f&&(c=b.bind(c,e));return f?b.reduce(g,c,d,e):b.reduce(g,c)};b.find=b.detect=
13 | function(a,c,b){var e;E(a,function(a,g,h){if(c.call(b,a,g,h))return e=a,true});return e};b.filter=b.select=function(a,c,b){var e=[];if(a==null)return e;if(A&&a.filter===A)return a.filter(c,b);j(a,function(a,g,h){c.call(b,a,g,h)&&(e[e.length]=a)});return e};b.reject=function(a,c,b){var e=[];if(a==null)return e;j(a,function(a,g,h){c.call(b,a,g,h)||(e[e.length]=a)});return e};b.every=b.all=function(a,c,b){var e=true;if(a==null)return e;if(B&&a.every===B)return a.every(c,b);j(a,function(a,g,h){if(!(e=
14 | e&&c.call(b,a,g,h)))return n});return e};var E=b.some=b.any=function(a,c,d){c||(c=b.identity);var e=false;if(a==null)return e;if(C&&a.some===C)return a.some(c,d);j(a,function(a,b,h){if(e||(e=c.call(d,a,b,h)))return n});return!!e};b.include=b.contains=function(a,c){var b=false;if(a==null)return b;return p&&a.indexOf===p?a.indexOf(c)!=-1:b=E(a,function(a){return a===c})};b.invoke=function(a,c){var d=i.call(arguments,2);return b.map(a,function(a){return(b.isFunction(c)?c||a:a[c]).apply(a,d)})};b.pluck=
15 | function(a,c){return b.map(a,function(a){return a[c]})};b.max=function(a,c,d){if(!c&&b.isArray(a))return Math.max.apply(Math,a);if(!c&&b.isEmpty(a))return-Infinity;var e={computed:-Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;b>=e.computed&&(e={value:a,computed:b})});return e.value};b.min=function(a,c,d){if(!c&&b.isArray(a))return Math.min.apply(Math,a);if(!c&&b.isEmpty(a))return Infinity;var e={computed:Infinity};j(a,function(a,b,h){b=c?c.call(d,a,b,h):a;bd?1:0}),"value")};b.groupBy=function(a,c){var d={},e=b.isFunction(c)?c:function(a){return a[c]};j(a,function(a,b){var c=e(a,b);(d[c]||(d[c]=[])).push(a)});return d};b.sortedIndex=function(a,
17 | c,d){d||(d=b.identity);for(var e=0,f=a.length;e>1;d(a[g])=0})})};b.difference=function(a){var c=b.flatten(i.call(arguments,1));return b.filter(a,function(a){return!b.include(c,a)})};b.zip=function(){for(var a=i.call(arguments),c=b.max(b.pluck(a,"length")),d=Array(c),e=0;e=0;d--)b=[a[d].apply(this,b)];return b[0]}};
24 | b.after=function(a,b){return a<=0?b():function(){if(--a<1)return b.apply(this,arguments)}};b.keys=J||function(a){if(a!==Object(a))throw new TypeError("Invalid object");var c=[],d;for(d in a)b.has(a,d)&&(c[c.length]=d);return c};b.values=function(a){return b.map(a,b.identity)};b.functions=b.methods=function(a){var c=[],d;for(d in a)b.isFunction(a[d])&&c.push(d);return c.sort()};b.extend=function(a){j(i.call(arguments,1),function(b){for(var d in b)a[d]=b[d]});return a};b.defaults=function(a){j(i.call(arguments,
25 | 1),function(b){for(var d in b)a[d]==null&&(a[d]=b[d])});return a};b.clone=function(a){return!b.isObject(a)?a:b.isArray(a)?a.slice():b.extend({},a)};b.tap=function(a,b){b(a);return a};b.isEqual=function(a,b){return q(a,b,[])};b.isEmpty=function(a){if(b.isArray(a)||b.isString(a))return a.length===0;for(var c in a)if(b.has(a,c))return false;return true};b.isElement=function(a){return!!(a&&a.nodeType==1)};b.isArray=o||function(a){return l.call(a)=="[object Array]"};b.isObject=function(a){return a===Object(a)};
26 | b.isArguments=function(a){return l.call(a)=="[object Arguments]"};if(!b.isArguments(arguments))b.isArguments=function(a){return!(!a||!b.has(a,"callee"))};b.isFunction=function(a){return l.call(a)=="[object Function]"};b.isString=function(a){return l.call(a)=="[object String]"};b.isNumber=function(a){return l.call(a)=="[object Number]"};b.isNaN=function(a){return a!==a};b.isBoolean=function(a){return a===true||a===false||l.call(a)=="[object Boolean]"};b.isDate=function(a){return l.call(a)=="[object Date]"};
27 | b.isRegExp=function(a){return l.call(a)=="[object RegExp]"};b.isNull=function(a){return a===null};b.isUndefined=function(a){return a===void 0};b.has=function(a,b){return I.call(a,b)};b.noConflict=function(){r._=G;return this};b.identity=function(a){return a};b.times=function(a,b,d){for(var e=0;e/g,">").replace(/"/g,""").replace(/'/g,"'").replace(/\//g,"/")};b.mixin=function(a){j(b.functions(a),
28 | function(c){K(c,b[c]=a[c])})};var L=0;b.uniqueId=function(a){var b=L++;return a?a+b:b};b.templateSettings={evaluate:/<%([\s\S]+?)%>/g,interpolate:/<%=([\s\S]+?)%>/g,escape:/<%-([\s\S]+?)%>/g};var t=/.^/,u=function(a){return a.replace(/\\\\/g,"\\").replace(/\\'/g,"'")};b.template=function(a,c){var d=b.templateSettings,d="var __p=[],print=function(){__p.push.apply(__p,arguments);};with(obj||{}){__p.push('"+a.replace(/\\/g,"\\\\").replace(/'/g,"\\'").replace(d.escape||t,function(a,b){return"',_.escape("+
29 | u(b)+"),'"}).replace(d.interpolate||t,function(a,b){return"',"+u(b)+",'"}).replace(d.evaluate||t,function(a,b){return"');"+u(b).replace(/[\r\n\t]/g," ")+";__p.push('"}).replace(/\r/g,"\\r").replace(/\n/g,"\\n").replace(/\t/g,"\\t")+"');}return __p.join('');",e=new Function("obj","_",d);return c?e(c,b):function(a){return e.call(this,a,b)}};b.chain=function(a){return b(a).chain()};var m=function(a){this._wrapped=a};b.prototype=m.prototype;var v=function(a,c){return c?b(a).chain():a},K=function(a,c){m.prototype[a]=
30 | function(){var a=i.call(arguments);H.call(a,this._wrapped);return v(c.apply(b,a),this._chain)}};b.mixin(b);j("pop,push,reverse,shift,sort,splice,unshift".split(","),function(a){var b=k[a];m.prototype[a]=function(){var d=this._wrapped;b.apply(d,arguments);var e=d.length;(a=="shift"||a=="splice")&&e===0&&delete d[0];return v(d,this._chain)}});j(["concat","join","slice"],function(a){var b=k[a];m.prototype[a]=function(){return v(b.apply(this._wrapped,arguments),this._chain)}});m.prototype.chain=function(){this._chain=
31 | true;return this};m.prototype.value=function(){return this._wrapped}}).call(this);
32 |
--------------------------------------------------------------------------------
/docs/changelog.rst:
--------------------------------------------------------------------------------
1 | History
2 | =======
3 |
4 | 0.2.0-beta (24-09-2020)
5 | -----------------------
6 | * updated API and documentation for compatibility with Tensorflow 2.0
7 | * added Implementations of FBCSP-ShallowNet and Deep4
8 | * added sorting components for interpretation by recursive elimination to LFCNN
9 |
10 |
11 |
12 | 0.1.1-beta (24-06-2019)
13 | -----------------------
14 | * massively updated API and examples.
15 | * added basic documentation.
16 | * compatibility with previous version broken.
17 |
18 |
19 | 0.1.0-alpha (20-05-2019)
20 | ------------------------
21 | * initial version
22 |
--------------------------------------------------------------------------------
/docs/conf.py:
--------------------------------------------------------------------------------
1 | # Configuration file for the Sphinx documentation builder.
2 | #
3 | # This file only contains a selection of the most common options. For a full
4 | # list see the documentation:
5 | # http://www.sphinx-doc.org/en/master/config
6 |
7 | # -- Path setup --------------------------------------------------------------
8 |
9 | # If extensions (or modules to document with autodoc) are in another directory,
10 | # add these directories to sys.path here. If the directory is relative to the
11 | # documentation root, use os.path.abspath to make it absolute, like shown here.
12 | #
13 | import os
14 | import sys
15 | sys.path.insert(0, os.path.abspath('..'))
16 |
17 |
18 | # -- Project information -----------------------------------------------------
19 |
20 | project = 'MNEflow'
21 | copyright = '2019, Ivan Zubarev'
22 | author = 'Ivan Zubarev'
23 |
24 | # The full version, including alpha/beta/rc tags
25 | release = '0.5.3'
26 |
27 | def skip(app, what, name, obj, would_skip, options):
28 | if name == "__init__":
29 | return False
30 | return would_skip
31 |
32 |
33 | def setup(app):
34 | app.connect("autodoc-skip-member", skip)
35 |
36 | # -- General configuration ---------------------------------------------------
37 |
38 | # Add any Sphinx extension module names here, as strings. They can be
39 | # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
40 | # ones.
41 | extensions = ['sphinx.ext.autodoc', 'sphinx.ext.napoleon']
42 |
43 | # Add any paths that contain templates here, relative to this directory.
44 | templates_path = ['_templates']
45 |
46 | # List of patterns, relative to source directory, that match files and
47 | # directories to ignore when looking for source files.
48 | # This pattern also affects html_static_path and html_extra_path.
49 | exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
50 |
51 |
52 | # -- Options for HTML output -------------------------------------------------
53 |
54 | # The theme to use for HTML and HTML Help pages. See the documentation for
55 | # a list of builtin themes.
56 | #
57 | html_theme = 'sphinx_rtd_theme'
58 |
59 | # Add any paths that contain custom static files (such as style sheets) here,
60 | # relative to this directory. They are copied after the builtin static files,
61 | # so a file named "default.css" will overwrite the builtin "default.css".
62 | html_static_path = ['_static']
63 |
--------------------------------------------------------------------------------
/docs/index.rst:
--------------------------------------------------------------------------------
1 | .. MNEflow documentation master file, created by
2 | sphinx-quickstart on Thu Jun 20 14:43:58 2019.
3 | You can adapt this file completely to your liking, but it should at least
4 | contain the root `toctree` directive.
5 |
6 | Welcome to MNEflow's documentation!
7 | ===================================
8 |
9 | .. toctree::
10 | :maxdepth: 2
11 | :caption: Contents:
12 |
13 | intro
14 | models
15 | mneflow_api
16 | changelog
17 |
18 |
19 |
20 |
21 | Indices and tables
22 | ==================
23 | * :ref:`genindex`
24 | * :ref:`modindex`
25 | * :ref:`search`
26 |
--------------------------------------------------------------------------------
/docs/intro.rst:
--------------------------------------------------------------------------------
1 | MNEflow
2 | =======
3 | MNEflow provides a convenient way to apply neural networks implemmented in Tensorflow to EEG/MEG data.
4 |
5 |
6 | Installation
7 | ============
8 | >>> pip install mneflow
9 |
10 | Dependencies
11 | ============
12 | * tensorflow > 2.12.0
13 | * mne > 0.19
14 |
15 |
16 | Examples
17 | ========
18 | * Data Import and `basic MNEflow pipeline `_.
19 | * `How to build your own custom network `_.
20 | * `Loading and saving mneflow models `_.
21 |
22 |
23 |
24 | References
25 | ==========
26 | When using the implemented models please cite:
27 |
28 | *for LF-CNN or VAR-CNN*
29 |
30 | Zubarev I, Zetter R, Halme HL, Parkkonen L. Adaptive neural network classifier for decoding MEG signals. Neuroimage. 2019 May 4;197:425-434.
31 | `[link] `_::
32 |
33 | ``@article{Zubarev2019AdaptiveSignals.,
34 | title = {{Adaptive neural network classifier for decoding MEG signals.}},
35 | year = {2019},
36 | journal = {NeuroImage},
37 | author = {Zubarev, Ivan and Zetter, Rasmus and Halme, Hanna-Leena and Parkkonen, Lauri},
38 | month = {5},
39 | pages = {425--434},
40 | volume = {197},
41 | url = {https://linkinghub.elsevier.com/retrieve/pii/S1053811919303544 http://www.ncbi.nlm.nih.gov/pubmed/31059799},
42 | doi = {10.1016/j.neuroimage.2019.04.068},
43 | issn = {1095-9572},
44 | pmid = {31059799},
45 | keywords = {Brain–computer interface, Convolutional neural network, Magnetoencephalography}}``
46 |
47 |
48 | *for EEGNet*::
49 |
50 | ``@article{Lawhern2018,
51 | author={Vernon J Lawhern and Amelia J Solon and Nicholas R Waytowich and Stephen M Gordon and Chou P Hung and Brent J Lance},
52 | title={EEGNet: a compact convolutional neural network for EEG-based brain–computer interfaces},
53 | journal={Journal of Neural Engineering},
54 | volume={15},
55 | number={5},
56 | pages={056013},
57 | url={http://stacks.iop.org/1741-2552/15/i=5/a=056013},
58 | year={2018}}``
59 |
60 |
61 | *for FBCSP-ShallowNet and Deep4*::
62 |
63 | ``@article{Schirrmeister2017DeepVisualization,
64 | title = {{Deep learning with convolutional neural networks for EEG decoding and visualization}},
65 | year = {2017},
66 | journal = {Human Brain Mapping},
67 | author = {Schirrmeister, Robin Tibor and Springenberg, Jost Tobias and Fiederer, Lukas Dominique Josef and Glasstetter, Martin and Eggensperger, Katharina and Tangermann, Michael and Hutter, Frank and Burgard, Wolfram and Ball, Tonio},
68 | number = {11},
69 | month = {11},
70 | pages = {5391--5420},
71 | volume = {38},
72 | url = {http://doi.wiley.com/10.1002/hbm.23730},
73 | doi = {10.1002/hbm.23730},
74 | issn = {10659471},
75 | keywords = {EEG analysis, brain, brain mapping, computer interface, electroencephalography, end‐to‐end learning, machine interface, machine learning, model interpretability}
76 | }``
77 |
78 |
79 |
--------------------------------------------------------------------------------
/docs/make.bat:
--------------------------------------------------------------------------------
1 | @ECHO OFF
2 |
3 | pushd %~dp0
4 |
5 | REM Command file for Sphinx documentation
6 |
7 | if "%SPHINXBUILD%" == "" (
8 | set SPHINXBUILD=sphinx-build
9 | )
10 | set SOURCEDIR=.
11 | set BUILDDIR=_build
12 |
13 | if "%1" == "" goto help
14 |
15 | %SPHINXBUILD% >NUL 2>NUL
16 | if errorlevel 9009 (
17 | echo.
18 | echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
19 | echo.installed, then set the SPHINXBUILD environment variable to point
20 | echo.to the full path of the 'sphinx-build' executable. Alternatively you
21 | echo.may add the Sphinx directory to PATH.
22 | echo.
23 | echo.If you don't have Sphinx installed, grab it from
24 | echo.http://sphinx-doc.org/
25 | exit /b 1
26 | )
27 |
28 | %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
29 | goto end
30 |
31 | :help
32 | %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
33 |
34 | :end
35 | popd
36 |
--------------------------------------------------------------------------------
/docs/mneflow_api.rst:
--------------------------------------------------------------------------------
1 | MNEflow API
2 | ===========
3 |
4 | mneflow.utils.MetaData class
5 | ******************************
6 |
7 | .. autoclass:: mneflow.utils.MetaData
8 | :members:
9 | :undoc-members:
10 |
11 |
12 | mneflow.models.BaseModel class
13 | ******************************
14 |
15 | .. autoclass:: mneflow.models.BaseModel
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
20 | mneflow.Dataset
21 | ***************
22 |
23 | .. autoclass:: mneflow.data.Dataset
24 | :members:
25 | :undoc-members:
26 | :show-inheritance:
27 |
28 | mneflow.utils
29 | *************
30 |
31 | .. automodule:: mneflow.utils
32 | :members:
33 | :undoc-members:
34 | :show-inheritance:
35 | :noindex: mneflow.utils.MetaData
36 |
37 |
38 | mneflow.layers
39 | **************
40 |
41 | .. automodule:: mneflow.layers
42 | :members:
43 | :show-inheritance:
44 |
45 |
46 |
47 |
--------------------------------------------------------------------------------
/docs/models.rst:
--------------------------------------------------------------------------------
1 | Implemented models
2 | ******************
3 |
4 | LFCNN
5 | -----
6 |
7 | .. autoclass:: mneflow.models.LFCNN
8 | :members:
9 | :undoc-members:
10 | :show-inheritance:
11 |
12 | VARCNN
13 | ------
14 |
15 | .. autoclass:: mneflow.models.VARCNN
16 | :members:
17 | :undoc-members:
18 | :show-inheritance:
19 |
20 | EEGNet
21 | ------
22 |
23 | .. autoclass:: mneflow.models.EEGNet
24 | :members:
25 | :undoc-members:
26 | :show-inheritance:
27 |
28 | FBCSP_ShallowNet
29 | ----------------
30 |
31 | .. autoclass:: mneflow.models.FBCSP_ShallowNet
32 | :members:
33 | :undoc-members:
34 | :show-inheritance:
35 |
36 | Deep4
37 | -----
38 |
39 | .. autoclass:: mneflow.models.Deep4
40 | :members:
41 | :undoc-members:
42 | :show-inheritance:
43 |
44 |
--------------------------------------------------------------------------------
/docs/requirements.txt:
--------------------------------------------------------------------------------
1 | tensorflow >= 2.12.0
2 | mne >= 0.19.0
3 | numpydoc>=0.9.1
4 | matplotlib
5 | pip>=8.1.1
6 | nose>=1.1.2
7 | sphinx>=2.0.1
8 | coverage>=3.7.1
9 | sphinx_rtd_theme>=0.1.6
10 |
--------------------------------------------------------------------------------
/examples/.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# Importing data\n"
8 | ]
9 | },
10 | {
11 | "cell_type": "code",
12 | "execution_count": 1,
13 | "metadata": {},
14 | "outputs": [],
15 | "source": [
16 | "import numpy as np\n",
17 | "import os \n",
18 | "import tensorflow as tf\n",
19 | "from time import time\n",
20 | "import mneflow\n"
21 | ]
22 | },
23 | {
24 | "cell_type": "markdown",
25 | "metadata": {},
26 | "source": [
27 | "## 1. from MNE epochs"
28 | ]
29 | },
30 | {
31 | "cell_type": "markdown",
32 | "metadata": {},
33 | "source": [
34 | "#### If you use MNE-python, all you need is to provide your epochs file (or list of epoch files) to mneflow.produce_tfrecords\n"
35 | ]
36 | },
37 | {
38 | "cell_type": "code",
39 | "execution_count": 2,
40 | "metadata": {},
41 | "outputs": [],
42 | "source": [
43 | "#get to epochs using your mne-python pipeline\n",
44 | "import mne\n",
45 | "from mne.datasets import multimodal\n",
46 | "mne.set_log_level(verbose='CRITICAL')\n",
47 | "#print(__doc__)\n",
48 | "\n",
49 | "fname_raw = os.path.join(multimodal.data_path(), 'multimodal_raw.fif')\n",
50 | "raw = mne.io.read_raw_fif(fname_raw)\n",
51 | "\n",
52 | "#event_id = {}\n",
53 | "cond = raw.acqparser.get_condition(raw, None)\n",
54 | "epochs_list = [mne.Epochs(raw, **c) for c in cond]\n",
55 | "\n",
56 | "#here we concatenate epochs because each input file contains just one condition\n",
57 | "#otherwise mneflow.produce_tfrecords can handle a list of epochs objects\n",
58 | "epochs = mne.concatenate_epochs(epochs_list)"
59 | ]
60 | },
61 | {
62 | "cell_type": "code",
63 | "execution_count": 3,
64 | "metadata": {},
65 | "outputs": [],
66 | "source": [
67 | "#Specify import options\n",
68 | "import_opt = dict(savepath='../my_TFRs/', #path to where the TFR files will be saved\n",
69 | " out_name='mne_sample_epochs', #name of TFRecords files\n",
70 | " input_type='epochs', #can also be \"epochs\"\n",
71 | " picks={'meg':'grad'}, #used only if input_type is mne.epochs.Epochs or path to saved '*-epo.fif'\n",
72 | " scale=True, #apply baseline_scaling?\n",
73 | " crop_baseline=True,\n",
74 | " bp_filter = (1.,45.),\n",
75 | " decimate = 2,\n",
76 | " scale_interval=78, #baseline, TODO: define automatically for epochs objects\n",
77 | " savebatch=8, # number of input files per TFRecord file \n",
78 | " save_orig=False, # whether to produce separate TFR-file for inputs in original order\n",
79 | " val_size=0.1)\n",
80 | "\n",
81 | "#whenever you import a dataset a copy of meta is also saved to savepath/meta.pkl so it can be restored at any time\n",
82 | "if os.path.exists(import_opt['savepath']+'meta.pkl'):\n",
83 | " meta = mneflow.load_meta(import_opt['savepath'])\n",
84 | "else:\n",
85 | " meta = mneflow.produce_tfrecords(epochs,**import_opt) "
86 | ]
87 | },
88 | {
89 | "cell_type": "markdown",
90 | "metadata": {},
91 | "source": [
92 | "Alternatively, if your epochs are saved to disk provide a str (or list of str) with path(s) to your -epo.fif files\n",
93 | "\n",
94 | "e.g. this will work\n",
95 | "\n",
96 | "```python\n",
97 | "epochs.save('test_saved_epochs.fif')\n",
98 | "meta = mneflow.produce_tfrecords('test_saved_epochs.fif',**opt)\n",
99 | "```\n",
100 | "\n",
101 | "if the first argument is str this function can also accept *.mat or *.npz format\n",
102 | "\n",
103 | "e.g.\n",
104 | "\n",
105 | "```python\n",
106 | "data_path = '/m/nbe/scratch/braindata/izbrv/detection_data/'\n",
107 | "filenames = [data_path +'sub' + str(i) + '-grad.npz' for i in range(1,4)]\n",
108 | "meta = mneflow.produce_tfrecords(filenames,**opt)\n",
109 | "```\n",
110 | "In this case, specify iput_type='array', and also provide array_keys keyword argument\n",
111 | "\n",
112 | "e.g. \n",
113 | "\n",
114 | "```python\n",
115 | "array_keys={'X':'my_data_samples','y':'my_labels'}\n",
116 | "```\n",
117 | "#note that \"picks\" works only for input_type=\"epochs\""
118 | ]
119 | },
120 | {
121 | "cell_type": "markdown",
122 | "metadata": {},
123 | "source": [
124 | "# Choose from already implemented models"
125 | ]
126 | },
127 | {
128 | "cell_type": "code",
129 | "execution_count": 4,
130 | "metadata": {},
131 | "outputs": [
132 | {
133 | "name": "stdout",
134 | "output_type": "stream",
135 | "text": [
136 | "WARNING:tensorflow:From /u/62/zubarei1/unix/.conda/envs/py3ml/lib/python3.7/site-packages/tensorflow/python/data/ops/dataset_ops.py:1419: colocate_with (from tensorflow.python.framework.ops) is deprecated and will be removed in a future version.\n",
137 | "Instructions for updating:\n",
138 | "Colocations handled automatically by placer.\n",
139 | "lf-conv _init\n",
140 | "WARNING:tensorflow:From /m/home/home6/62/zubarei1/data/Desktop/projects/papers/DLforMEG/mneflow/mneflow/layers.py:52: calling dropout (from tensorflow.python.ops.nn_ops) with keep_prob is deprecated and will be removed in a future version.\n",
141 | "Instructions for updating:\n",
142 | "Please use `rate` instead of `keep_prob`. Rate should be set to `rate = 1 - keep_prob`.\n",
143 | "dense _init\n",
144 | "Initialization complete!\n"
145 | ]
146 | }
147 | ],
148 | "source": [
149 | "#specify model parameters\n",
150 | "params = dict(l1_lambda=1e-7,\n",
151 | " learn_rate=3e-4,\n",
152 | " dropout = .5,\n",
153 | " patience = 3,# patientce for early stopping\n",
154 | " min_delta = 5e-6,\n",
155 | " test_upd_batch = 20,#pseudo-real time test batch size\n",
156 | " n_epochs = 1000, #total training epochs\n",
157 | " eval_step = 50, #evaluate validation loss each 10 epochs\n",
158 | " n_batch = 200,\n",
159 | " #these are specific to LF-CNN]\n",
160 | " n_ls=32, #number of latent factors\n",
161 | " nonlin_in = tf.identity, #input layer activation for var-cnn and lf-cnn\n",
162 | " nonlin_hid = tf.nn.relu, #convolution layer activation for var-cnn and lf-cnn\n",
163 | " nonlin_out = tf.identity, #output layer activation for var-cnn and lf-cnn\n",
164 | " filter_length=32, #convolutional filter length for var-cnn and lf-cnn\n",
165 | " pooling = 6, #convlayer pooling factor for var-cnn and lf-cnn\n",
166 | " stride = 1, #stride parameter for convolution filter\n",
167 | " ) #training batch size) \n",
168 | "\n",
169 | "#specify the path to store the saved model\n",
170 | "model_path = '/m/nbe/scratch/braindata/izbrv/detection_data/tfr/'\n",
171 | "\n",
172 | "model = mneflow.models.LFCNN(meta,params,model_path)"
173 | ]
174 | },
175 | {
176 | "cell_type": "code",
177 | "execution_count": 5,
178 | "metadata": {},
179 | "outputs": [
180 | {
181 | "name": "stdout",
182 | "output_type": "stream",
183 | "text": [
184 | "epoch 0, train_loss 2.21819, train acc 0.152174 val loss 2.49855, val acc 0.117021\n",
185 | "epoch 50, train_loss 0.568426, train acc 0.869565 val loss 1.3934, val acc 0.446809\n",
186 | "epoch 100, train_loss 0.108891, train acc 1 val loss 0.690103, val acc 0.776596\n",
187 | "epoch 150, train_loss 0.0415108, train acc 1 val loss 0.600958, val acc 0.797872\n",
188 | "epoch 200, train_loss 0.0132537, train acc 1 val loss 0.44546, val acc 0.87234\n",
189 | "* Patience count 1\n",
190 | "epoch 300, train_loss 0.0137334, train acc 1 val loss 0.381117, val acc 0.93617\n",
191 | "* Patience count 2\n",
192 | "* Patience count 3\n",
193 | "* Patience count 4\n",
194 | "* Patience count 5\n",
195 | "early stopping...\n",
196 | "WARNING:tensorflow:From /u/62/zubarei1/unix/.conda/envs/py3ml/lib/python3.7/site-packages/tensorflow/python/training/saver.py:1266: checkpoint_exists (from tensorflow.python.training.checkpoint_management) is deprecated and will be removed in a future version.\n",
197 | "Instructions for updating:\n",
198 | "Use standard file APIs to check for files with this prefix.\n",
199 | "INFO:tensorflow:Restoring parameters from /m/nbe/scratch/braindata/izbrv/detection_data/tfr/lf-cnn-mne_sample_epochs\n",
200 | "stopped at: epoch 500, val loss 0.381117, val acc 0.93617\n",
201 | "Trained in 137.08s\n"
202 | ]
203 | }
204 | ],
205 | "source": [
206 | "#train the model\n",
207 | "start = time()\n",
208 | "model.train()\n",
209 | "stop = time() - start\n",
210 | "print('Trained in {:.2f}s'.format(stop))"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 6,
216 | "metadata": {},
217 | "outputs": [],
218 | "source": [
219 | "#evaluate performance\n",
220 | "#TODO: add across-subject example\n",
221 | "#test_accs = model.evaluate_performance(meta['orig_paths'], batch_size=120)\n",
222 | "#prt_test_acc, prt_logits = model.evaluate_realtime(meta['orig_paths'], batch_size=120, step_size=params['test_upd_batch'])\n",
223 | "#results = {'val_acc':model.v_acc[0], 'test_init':np.mean(test_accs), 'test_upd':np.mean(prt_test_acc), 'sid':meta['architecture']} # 'train_time':stop,"
224 | ]
225 | },
226 | {
227 | "cell_type": "code",
228 | "execution_count": 7,
229 | "metadata": {},
230 | "outputs": [
231 | {
232 | "data": {
233 | "text/plain": [
234 | ""
235 | ]
236 | },
237 | "metadata": {},
238 | "output_type": "display_data"
239 | }
240 | ],
241 | "source": [
242 | "model.compute_patterns(output='patterns')\n",
243 | "#explore output layer weights\n",
244 | "#TODO: Fix bug related to varying sampling rates and pooling factors\n",
245 | "#f = model.plot_out_weihts()\n",
246 | "\n",
247 | "#explore informative spatial patterns(LF-CNN only)\n",
248 | "#TODO: Fix visualizations\n",
249 | "f = model.plot_patterns(sensor_layout='Vectorview-grad', sorting='best', spectra=True, scale=True)"
250 | ]
251 | },
252 | {
253 | "cell_type": "markdown",
254 | "metadata": {},
255 | "source": [
256 | "# Specify your own neural network"
257 | ]
258 | },
259 | {
260 | "cell_type": "code",
261 | "execution_count": 10,
262 | "metadata": {},
263 | "outputs": [
264 | {
265 | "name": "stdout",
266 | "output_type": "stream",
267 | "text": [
268 | "Initialization complete!\n",
269 | "epoch 0, train_loss 1.81289, train acc 0.630435 val loss 1.73704, val acc 0.691489\n",
270 | "epoch 50, train_loss 1.27426, train acc 1 val loss 1.39943, val acc 0.93617\n",
271 | "epoch 100, train_loss 1.27419, train acc 1 val loss 1.39228, val acc 0.93617\n",
272 | "* Patience count 1\n",
273 | "* Patience count 2\n",
274 | "epoch 250, train_loss 1.27413, train acc 1 val loss 1.38655, val acc 0.93617\n",
275 | "* Patience count 3\n",
276 | "early stopping...\n",
277 | "INFO:tensorflow:Restoring parameters from /m/nbe/scratch/braindata/izbrv/detection_data/tfr/my_own-mne_sample_epochs\n",
278 | "stopped at: epoch 300, val loss 1.38655, val acc 0.93617\n",
279 | "Trained in 113.67s\n"
280 | ]
281 | }
282 | ],
283 | "source": [
284 | "#let's make a simple linear classifier using all channels*timepoints as features with keras\n",
285 | "params = dict(l1_lambda=0,\n",
286 | " learn_rate=3e-4,\n",
287 | " dropout = .5,\n",
288 | " patience = 3,# patientce for early stopping\n",
289 | " min_delta = 5e-3, #note the increased convergence threshold1\n",
290 | " test_upd_batch = 20,#pseudo-real time test batch size\n",
291 | " n_epochs = 1000, #total training epochs\n",
292 | " #nonlin_out=tf.identity,\n",
293 | " eval_step = 50, #evaluate validation loss each 10 epochs\n",
294 | " n_batch = 200) #training batch size) \n",
295 | "\n",
296 | "from tensorflow.keras.layers import Dense, Flatten, Activation\n",
297 | "from tensorflow.keras.constraints import max_norm\n",
298 | "\n",
299 | "\n",
300 | "\n",
301 | "class MyNetwork(mneflow.models.Model):\n",
302 | " #all you need to do is to override the computational graph with your own\n",
303 | " def _build_graph(self):\n",
304 | " self.h_params['architecture'] = 'my_own'\n",
305 | " input_main = self.X\n",
306 | " flatten = Flatten()(input_main)\n",
307 | " dense = Dense(self.h_params['n_classes'], kernel_constraint = max_norm(0.5))(flatten)\n",
308 | " y_pred = Activation('softmax')(dense)\n",
309 | " return y_pred\n",
310 | " \n",
311 | "m2 = MyNetwork(meta,params,model_path)\n",
312 | "\n",
313 | "start = time()\n",
314 | "m2.train()\n",
315 | "stop = time() - start\n",
316 | "print('Trained in {:.2f}s'.format(stop))\n",
317 | "\n",
318 | "\n",
319 | "# #evaluate performance\n",
320 | "# test_accs = m2.evaluate_performance(meta['orig_paths'], batch_size=120)"
321 | ]
322 | },
323 | {
324 | "cell_type": "markdown",
325 | "metadata": {},
326 | "source": [
327 | "We observe that our \"custom\" model performed equally well as LF-CNN in terms of accuracy on the validation set. Yet, the loss function estimate on the validation set is much lower for LF-CNN. This result is not very surprising, since LF-CNN has much more constrained solution space optimized for across-subjects decoding."
328 | ]
329 | },
330 | {
331 | "cell_type": "markdown",
332 | "metadata": {},
333 | "source": [
334 | "#TODO: across-subject/leave-one-subject-out example"
335 | ]
336 | }
337 | ],
338 | "metadata": {
339 | "kernelspec": {
340 | "display_name": "Python 3",
341 | "language": "python",
342 | "name": "python3"
343 | },
344 | "language_info": {
345 | "codemirror_mode": {
346 | "name": "ipython",
347 | "version": 3
348 | },
349 | "file_extension": ".py",
350 | "mimetype": "text/x-python",
351 | "name": "python",
352 | "nbconvert_exporter": "python",
353 | "pygments_lexer": "ipython3",
354 | "version": "3.7.3"
355 | }
356 | },
357 | "nbformat": 4,
358 | "nbformat_minor": 2
359 | }
360 |
--------------------------------------------------------------------------------
/examples/continuous_example.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | """
4 | Created on Fri Aug 5 11:32:13 2022
5 |
6 | @author: zubarei1
7 | """
8 | import numpy as np
9 | import tensorflow as tf
10 | import os
11 | os.chdir('C:\\Users\\ipzub\\projs\\mneflow\\')
12 | import mneflow
13 |
14 |
15 |
16 | # Dataset parameters
17 | dpath = 'C:\\data\\bci4_ecog\\'
18 | fname = 'sub1_comp.mat'
19 |
20 | import scipy.io as sio
21 | datafile = sio.loadmat(dpath + fname)
22 | data = datafile['train_data'].T[np.newaxis,...]
23 | print(data.shape)
24 | events = datafile["train_dg"].T[np.newaxis,...]
25 | print(events.shape)
26 | #%% define transform_targets function
27 |
28 | def transform_targets(targets):
29 | """
30 | This function specifies transformations to the input signals
31 | (e.g. accelerometer) to produce the (one-dimensional) target variable.
32 |
33 | Parametes:
34 | ----------
35 | targets: np.array
36 | measurements used to produce the target variable. For continous
37 | inputs targets should have dimentsions (n_trials, t, channels),
38 | where t is the same number of time samples as in the data X.
39 | Returns:
40 | --------
41 | out : np.array
42 | transformed target variables with dimentions (n_trials, 1)
43 |
44 | """
45 | out = []
46 | #as an illustration we take the mean of last 50 samples of the 1st channel
47 | #of the motioncaputre
48 | out = np.array([t[-50:, 0].mean(axis=0, keepdims=True) for t in targets])
49 |
50 | print(out.shape)
51 | ##alternative treatment
52 | # targets = np.mean(targets[:,:,0], axis=1)
53 | # print(targets.shape)
54 | # targets -= targets.min()
55 | # targets /= targets.max()
56 | # for t in targets:
57 | # t = np.round(t,1)
58 | # out.append([t])
59 | # out = np.array(out)
60 | # out = mneflow.utils.produce_labels(out, return_stats=False)
61 | # out = np.expand_dims(out, 1)
62 | # print("Target values: ", np.unique(out))
63 | assert out.ndim == 2
64 | return out
65 | #%%
66 |
67 | import_opt = dict(fs=1000,
68 | savepath=dpath + '/tfr/',
69 | out_name='cont_example',
70 | input_type='continuous',
71 | overwrite=True,
72 | transform_targets=transform_targets,
73 | target_type='float',
74 | segment=625,
75 | array_keys={"X":"train_data", "y":"train_dg"},
76 | #augment=True,
77 | scale = True,
78 | # scale_y = True,
79 | aug_stride=125,
80 | save_as_numpy=True
81 | )
82 |
83 | meta = mneflow.produce_tfrecords((data, events), **import_opt)
84 |
85 | #%%
86 | dataset = mneflow.Dataset(meta, train_batch = 100)
87 |
88 | lf_params = dict(n_ls=32, #number of latent factors
89 | filter_length=32, #convolutional filter length in time samples
90 | pooling = 32,#pooling factor
91 | nonlin = tf.nn.relu,
92 | stride = 32, #stride parameter for pooling layer
93 | padding = 'SAME',
94 | dropout = 0.5,
95 | model_path = import_opt['savepath'],
96 | l1_lambda=3e-4,
97 | l1_scope=[ 'fc', 'dmx'],
98 | l2_scope=[ 'tconv'],
99 | l2_lambda=3e-4,
100 | pool_type='max') #path for storing the saved model
101 |
102 | model = mneflow.models.LFCNN(dataset, lf_params)
103 | model.build(loss='mae')
104 |
105 | model.train(n_epochs=500, eval_step=50, early_stopping=10)
106 | #model.update_log()
107 | #%%
108 | from matplotlib import pyplot as plt
109 | y_true, y_pred = model.predict(meta['train_paths'])
110 | plt.scatter(y_true, y_pred)
111 |
--------------------------------------------------------------------------------
/examples/mneflow_example_tf2.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# MNEflow basic example"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "# 1. Importing data\n",
15 | "\n",
16 | "### 1.1.from MNE epochs"
17 | ]
18 | },
19 | {
20 | "cell_type": "markdown",
21 | "metadata": {},
22 | "source": [
23 | "If you use MNE-python, all you need is to provide your epochs file (or list of epoch files) to mneflow.produce_tfrecords\n"
24 | ]
25 | },
26 | {
27 | "cell_type": "code",
28 | "execution_count": 1,
29 | "metadata": {},
30 | "outputs": [
31 | {
32 | "name": "stdout",
33 | "output_type": "stream",
34 | "text": [
35 | "0.5.3\n"
36 | ]
37 | }
38 | ],
39 | "source": [
40 | "import numpy as np\n",
41 | "\n",
42 | "import mne\n",
43 | "mne.set_log_level(verbose='CRITICAL')\n",
44 | "from mne.datasets import multimodal\n",
45 | "\n",
46 | "import os\n",
47 | "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\n",
48 | "\n",
49 | "import tensorflow as tf\n",
50 | "tf.get_logger().setLevel('ERROR')\n",
51 | "tf.autograph.set_verbosity(0)\n",
52 | "\n",
53 | "import mneflow\n",
54 | "print(mneflow.__version__)\n"
55 | ]
56 | },
57 | {
58 | "cell_type": "code",
59 | "execution_count": 2,
60 | "metadata": {},
61 | "outputs": [
62 | {
63 | "name": "stdout",
64 | "output_type": "stream",
65 | "text": [
66 | "\n"
75 | ]
76 | }
77 | ],
78 | "source": [
79 | "fname_raw = os.path.join(multimodal.data_path(), 'multimodal_raw.fif')\n",
80 | "raw = mne.io.read_raw_fif(fname_raw)\n",
81 | "\n",
82 | "cond = raw.acqparser.get_condition(raw, None)\n",
83 | "# get the list of condition names\n",
84 | "condition_names = [k for c in cond for k,v in c['event_id'].items()]\n",
85 | "epochs_list = [mne.Epochs(raw, **c) for c in cond]\n",
86 | "epochs = mne.concatenate_epochs(epochs_list)\n",
87 | "epochs = epochs.pick_types(meg='grad')\n",
88 | "print(epochs)"
89 | ]
90 | },
91 | {
92 | "cell_type": "markdown",
93 | "metadata": {},
94 | "source": [
95 | "### Convert epochs to TFRecord format\n",
96 | "See `mneflow.MetaData` and `mneflow.produce_tfrecords` docstrings for description of parameters."
97 | ]
98 | },
99 | {
100 | "cell_type": "code",
101 | "execution_count": 3,
102 | "metadata": {},
103 | "outputs": [
104 | {
105 | "name": "stdout",
106 | "output_type": "stream",
107 | "text": [
108 | "processing epochs\n",
109 | "Input shapes: X (n, ch, t) : (940, 204, 361) y (n, [signal_channels], y_shape) : (940, 1) \n",
110 | " input_type : trials target_type : int segment_y : False\n",
111 | "Preprocessing:\n",
112 | "Scaling to interval 0.0 - 60.0\n",
113 | "n: 940\n",
114 | "Splitting into: 6 folds x 156\n",
115 | "Preprocessed: (940, 1, 301, 204) (940, 1) folds: 6 x 156\n",
116 | "Preprocessed targets: (940, 1)\n",
117 | "Prepocessed sample shape: (1, 301, 204)\n",
118 | "Target shape actual/metadata: (8,) (8,)\n",
119 | "Saving TFRecord# 0\n",
120 | "Updating: meta.data\n",
121 | "Updating: meta.preprocessing\n"
122 | ]
123 | }
124 | ],
125 | "source": [
126 | "path = 'C:\\\\data\\\\'\n",
127 | "data_id = 'mne_sample_multimodal'\n",
128 | "\n",
129 | "#Specify import options. \n",
130 | "\n",
131 | "import_opt = dict(path=path, #renamed from 'savepath'\n",
132 | " data_id=data_id, #renamed from 'out_name'\n",
133 | " input_type='trials',\n",
134 | " target_type='int',\n",
135 | " n_folds= 5,\n",
136 | " test_set = 'holdout',\n",
137 | " fs=600,\n",
138 | " overwrite=True,\n",
139 | " picks={'meg':'grad'},\n",
140 | " scale=True, # apply baseline_scaling\n",
141 | " crop_baseline=True, # remove baseline interval after scaling\n",
142 | " decimate=None,\n",
143 | " scale_interval=(0, 60), # indices in time axis corresponding to baseline interval\n",
144 | " )\n",
145 | "\n",
146 | "#write TFRecord files and metadata file\n",
147 | "meta = mneflow.produce_tfrecords(epochs, **import_opt)"
148 | ]
149 | },
150 | {
151 | "cell_type": "markdown",
152 | "metadata": {},
153 | "source": [
154 | "## Other import options\n",
155 | "### 1.2 Saved mne.epochs (*-epo.fif) files\n",
156 | "Alternatively, if your epochs are saved to disk provide a str (or list of str) with path(s) to your -epo.fif files\n",
157 | "\n",
158 | "e.g. this will work\n",
159 | "\n",
160 | "```python\n",
161 | "epochs.save('test_saved_epochs.fif')\n",
162 | "meta = mneflow.produce_tfrecords('test_saved_epochs.fif',**opt)\n",
163 | "```\n",
164 | "### 1.3. Arrays in *.mat or *.npz format\n",
165 | "if the first argument is str mneflow.produce_tfrecords can also accept *.mat or *.npz format\n",
166 | "\n",
167 | "e.g.\n",
168 | "\n",
169 | "```python\n",
170 | "data_path = '.../data_path/'\n",
171 | "filenames = [data_path +'sub' + str(i) + '-grad.npz' for i in range(1,4)]\n",
172 | "meta = mneflow.produce_tfrecords(filenames,**opt)\n",
173 | "```\n",
174 | "In this case, specify iput_type='array', and also provide array_keys keyword argument\n",
175 | "\n",
176 | "e.g. \n",
177 | "\n",
178 | "```python\n",
179 | "array_keys={'X':'my_data_samples','y':'my_labels'}\n",
180 | "```\n",
181 | "\n",
182 | "### 1.4. Tuple of (data, labels)\n",
183 | "Finally, if you have a more complex preprocessing pipeline, you can feed you data and labels as a tuple of arrays\n",
184 | "\n",
185 | "```python\n",
186 | "X = epochs.get_data()\n",
187 | "y = epochs.events[:,2]\n",
188 | "meta = mneflow.produce_tfrecords((X,y),**opt)\n",
189 | "```\n"
190 | ]
191 | },
192 | {
193 | "cell_type": "markdown",
194 | "metadata": {},
195 | "source": [
196 | "# 2. Initialize the dataset object using the generated metadata file\n",
197 | "\n",
198 | "The dataset object includes several methods that allow experimenting with the dataset without the need to repeat the preprocessing or overwriting the TFRecord files each time.\n",
199 | "\n",
200 | "For example, you can train the model using any subset of classes, channels, or reduce the sampling rate by decimating across the time domain.\n",
201 | "\n",
202 | "See `mneflow.Dataset` docstring for more details."
203 | ]
204 | },
205 | {
206 | "cell_type": "code",
207 | "execution_count": 4,
208 | "metadata": {},
209 | "outputs": [
210 | {
211 | "name": "stdout",
212 | "output_type": "stream",
213 | "text": [
214 | "Using class_subset with 7 classes:\n",
215 | "Subset ratio 0.88, Multiplier 1.14\n",
216 | "Using class_subset with 7 classes:\n",
217 | "Subset ratio 1.00, Multiplier 1.00\n",
218 | "Updating: meta.data\n"
219 | ]
220 | }
221 | ],
222 | "source": [
223 | "dataset = mneflow.Dataset(meta, train_batch=100, class_subset=[0, 1, 2, 3, 4, 5, 6])"
224 | ]
225 | },
226 | {
227 | "cell_type": "markdown",
228 | "metadata": {},
229 | "source": [
230 | "# 3. Choose from already implemented models\n",
231 | "\n",
232 | "MNEflow pipeline consists of three major parts:\n",
233 | "1. dataset\n",
234 | "2. computational graph\n",
235 | "\n",
236 | "Each part has its own set of hyper-parameters and methods that can be tuned. See help for mneflow.Dataset\n",
237 | "and mneflow.models.BaseModel for more details.\n",
238 | "In this example will we use LF-CNN network\n"
239 | ]
240 | },
241 | {
242 | "cell_type": "code",
243 | "execution_count": 5,
244 | "metadata": {},
245 | "outputs": [
246 | {
247 | "name": "stdout",
248 | "output_type": "stream",
249 | "text": [
250 | "Updating: meta.model_specs\n",
251 | "Using class_subset with 7 classes:\n",
252 | "Subset ratio 1.00, Multiplier 1.00\n",
253 | "Using class_subset with 7 classes:\n",
254 | "Subset ratio 1.00, Multiplier 1.00\n",
255 | "Updating: meta.data\n",
256 | "Setting reg for dmx, to l1\n",
257 | "Built: dmx input: (None, 1, 301, 204)\n",
258 | "Setting reg for tconv, to l1\n",
259 | "Built: tconv input: (None, 1, 301, 32)\n",
260 | "Setting reg for fc, to l1\n",
261 | "Built: fc input: (None, 1, 61, 32)\n",
262 | "Input shape: (1, 301, 204)\n",
263 | "y_pred: (None, 7)\n",
264 | "Initialization complete!\n"
265 | ]
266 | }
267 | ],
268 | "source": [
269 | "# specify model parameters\n",
270 | "lfcnn_params = dict(n_latent=32, #number of latent factors\n",
271 | " filter_length=17, #convolutional filter length in time samples\n",
272 | " nonlin = tf.nn.relu,\n",
273 | " padding = 'SAME',\n",
274 | " pooling = 5,#pooling factor\n",
275 | " stride = 5, #stride parameter for pooling layer\n",
276 | " pool_type='max',\n",
277 | " dropout = .5,\n",
278 | " l1_scope = [\"weights\"],\n",
279 | " l1=3e-3)\n",
280 | "\n",
281 | "meta.update(model_specs=lfcnn_params)\n",
282 | "\n",
283 | "\"\"\"Initialize model\"\"\"\n",
284 | "model = mneflow.models.LFCNN(meta)\n",
285 | "model.build()"
286 | ]
287 | },
288 | {
289 | "cell_type": "markdown",
290 | "metadata": {},
291 | "source": [
292 | "## Training and (cross-)validation modes\n",
293 | "\n",
294 | "When training a model it is often useful to keep track of both: cross-validation and test-set performance.\n",
295 | "\n",
296 | "Default training mode is 'single_fold'\n",
297 | "Other training modes include n_fold cross-validation (mode='cv') and leave-one-subject-out cross-validation (mode='loso'). \n",
298 | "\n",
299 | "In 'cv' (cross-validation) mode with n_folds=5 and designated 'holdout' test set, test set performance is evaluated for each training fold and then averaged.\n",
300 | "\n",
301 | "In 'loso' mode each input tfrecord file is treated as a fold. Thus, if the data from each of n subjects is saved in a spearate .tfrecord file, on each fold the model will be trained on n - 1 subjects and then tested on the held out subject. In this setting each 'validation' fold comprises combined data from all (n - 1) subjects, but not the held out subject.\n",
302 | "\n",
303 | "See `mneflow.models.BaseModel.train` docstring for more details.\n"
304 | ]
305 | },
306 | {
307 | "cell_type": "code",
308 | "execution_count": 6,
309 | "metadata": {},
310 | "outputs": [
311 | {
312 | "name": "stdout",
313 | "output_type": "stream",
314 | "text": [
315 | "Updating: meta.train_params\n",
316 | "Class weights: None\n",
317 | "Using class_subset with 7 classes:\n",
318 | "Subset ratio 1.00, Multiplier 1.00\n",
319 | "Epoch 1/20\n",
320 | "50/50 - 5s - loss: 2.2868 - cat_ACC: 0.2262 - val_loss: 1.9014 - val_cat_ACC: 0.3853 - 5s/epoch - 92ms/step\n",
321 | "Epoch 2/20\n",
322 | "50/50 - 4s - loss: 1.7601 - cat_ACC: 0.4502 - val_loss: 1.5109 - val_cat_ACC: 0.6606 - 4s/epoch - 78ms/step\n",
323 | "Epoch 3/20\n",
324 | "50/50 - 4s - loss: 1.2576 - cat_ACC: 0.6834 - val_loss: 1.1294 - val_cat_ACC: 0.8165 - 4s/epoch - 78ms/step\n",
325 | "Epoch 4/20\n",
326 | "50/50 - 4s - loss: 0.8858 - cat_ACC: 0.8179 - val_loss: 0.8813 - val_cat_ACC: 0.8532 - 4s/epoch - 79ms/step\n",
327 | "Epoch 5/20\n",
328 | "50/50 - 4s - loss: 0.6648 - cat_ACC: 0.8926 - val_loss: 0.7658 - val_cat_ACC: 0.8807 - 4s/epoch - 78ms/step\n",
329 | "Epoch 6/20\n",
330 | "50/50 - 4s - loss: 0.5360 - cat_ACC: 0.9330 - val_loss: 0.6990 - val_cat_ACC: 0.9083 - 4s/epoch - 81ms/step\n",
331 | "Epoch 7/20\n",
332 | "50/50 - 4s - loss: 0.4521 - cat_ACC: 0.9589 - val_loss: 0.6786 - val_cat_ACC: 0.8899 - 4s/epoch - 75ms/step\n",
333 | "Epoch 8/20\n",
334 | "50/50 - 4s - loss: 0.4000 - cat_ACC: 0.9768 - val_loss: 0.6638 - val_cat_ACC: 0.9083 - 4s/epoch - 75ms/step\n",
335 | "Epoch 9/20\n",
336 | "50/50 - 4s - loss: 0.3728 - cat_ACC: 0.9800 - val_loss: 0.6511 - val_cat_ACC: 0.8991 - 4s/epoch - 75ms/step\n",
337 | "Epoch 10/20\n",
338 | "50/50 - 4s - loss: 0.3557 - cat_ACC: 0.9823 - val_loss: 0.6434 - val_cat_ACC: 0.8991 - 4s/epoch - 79ms/step\n",
339 | "Epoch 11/20\n",
340 | "50/50 - 4s - loss: 0.3334 - cat_ACC: 0.9923 - val_loss: 0.6458 - val_cat_ACC: 0.9083 - 4s/epoch - 74ms/step\n",
341 | "Epoch 12/20\n",
342 | "50/50 - 4s - loss: 0.3257 - cat_ACC: 0.9913 - val_loss: 0.6361 - val_cat_ACC: 0.9083 - 4s/epoch - 79ms/step\n",
343 | "Epoch 13/20\n",
344 | "50/50 - 4s - loss: 0.3145 - cat_ACC: 0.9943 - val_loss: 0.6406 - val_cat_ACC: 0.8991 - 4s/epoch - 79ms/step\n",
345 | "Epoch 14/20\n",
346 | "50/50 - 4s - loss: 0.3036 - cat_ACC: 0.9953 - val_loss: 0.6548 - val_cat_ACC: 0.9083 - 4s/epoch - 80ms/step\n",
347 | "Epoch 15/20\n",
348 | "50/50 - 4s - loss: 0.2978 - cat_ACC: 0.9972 - val_loss: 0.6325 - val_cat_ACC: 0.8991 - 4s/epoch - 80ms/step\n",
349 | "Epoch 16/20\n",
350 | "50/50 - 4s - loss: 0.2951 - cat_ACC: 0.9955 - val_loss: 0.6312 - val_cat_ACC: 0.9174 - 4s/epoch - 79ms/step\n",
351 | "Epoch 17/20\n",
352 | "50/50 - 4s - loss: 0.2866 - cat_ACC: 0.9981 - val_loss: 0.6249 - val_cat_ACC: 0.9083 - 4s/epoch - 79ms/step\n",
353 | "Epoch 18/20\n",
354 | "50/50 - 4s - loss: 0.2833 - cat_ACC: 0.9972 - val_loss: 0.6310 - val_cat_ACC: 0.9083 - 4s/epoch - 78ms/step\n",
355 | "Epoch 19/20\n",
356 | "50/50 - 4s - loss: 0.2779 - cat_ACC: 0.9985 - val_loss: 0.6306 - val_cat_ACC: 0.9083 - 4s/epoch - 80ms/step\n",
357 | "Epoch 20/20\n",
358 | "50/50 - 4s - loss: 0.2724 - cat_ACC: 0.9985 - val_loss: 0.6275 - val_cat_ACC: 0.9083 - 4s/epoch - 79ms/step\n",
359 | "Using class_subset with 7 classes:\n",
360 | "Subset ratio 1.00, Multiplier 1.00\n",
361 | "single_fold with 1 fold(s) completed. \n",
362 | "\n",
363 | " Validation Performance: \n",
364 | " Loss: 0.6249 +/- 0.0000.\n",
365 | " Metric: 0.9083 +/- 0.0000\n",
366 | "\n",
367 | "\n",
368 | " Test Performance: \n",
369 | " Loss: 0.8290 +/- 0.0000.\n",
370 | " Metric: 0.7895 +/- 0.0000\n",
371 | "Saving updated log to: C:\\data\\models\\lfcnn_log.csv\n"
372 | ]
373 | }
374 | ],
375 | "source": [
376 | "# train the model for 20 epochs and stop the training if validation loss does not decrease during 5 consequtive epochs. \n",
377 | "\n",
378 | "model.train(n_epochs=20, eval_step=50, early_stopping=3, mode='single_fold')"
379 | ]
380 | },
381 | {
382 | "cell_type": "code",
383 | "execution_count": 7,
384 | "metadata": {},
385 | "outputs": [
386 | {
387 | "name": "stdout",
388 | "output_type": "stream",
389 | "text": [
390 | "Using class_subset with 7 classes:\n",
391 | "Subset ratio 1.00, Multiplier 1.00\n",
392 | "Test set: Loss = 0.8290 Accuracy = 0.7895\n"
393 | ]
394 | }
395 | ],
396 | "source": [
397 | "test_loss, test_acc = model.evaluate(meta.data['test_paths'])\n",
398 | "print(\"Test set: Loss = {:.4f} Accuracy = {:.4f}\".format(test_loss, test_acc))"
399 | ]
400 | },
401 | {
402 | "cell_type": "markdown",
403 | "metadata": {},
404 | "source": [
405 | "## Saving the trained model and the associated metadata for future use"
406 | ]
407 | },
408 | {
409 | "cell_type": "code",
410 | "execution_count": 8,
411 | "metadata": {},
412 | "outputs": [
413 | {
414 | "name": "stdout",
415 | "output_type": "stream",
416 | "text": [
417 | "Saving MetaData as mne_sample_multimodal_meta.pkl \n",
418 | "\n",
419 | " to C:\\data\\\n",
420 | "Updating: meta.data\n",
421 | "Updating: meta.train_params\n",
422 | "Updating: meta.model_specs\n",
423 | "Updating: meta.patterns\n",
424 | "Saving MetaData as mne_sample_multimodal_meta.pkl \n",
425 | "\n",
426 | " to C:\\data\\\n"
427 | ]
428 | },
429 | {
430 | "name": "stderr",
431 | "output_type": "stream",
432 | "text": [
433 | "WARNING:absl: has the same name 'Dense' as a built-in Keras object. Consider renaming to avoid naming conflicts when loading with `tf.keras.models.load_model`. If renaming is not possible, pass the object in the `custom_objects` parameter of the load function.\n"
434 | ]
435 | }
436 | ],
437 | "source": [
438 | "meta.save()\n",
439 | "model.save()"
440 | ]
441 | }
442 | ],
443 | "metadata": {
444 | "kernelspec": {
445 | "display_name": "Python 3 (ipykernel)",
446 | "language": "python",
447 | "name": "python3"
448 | },
449 | "language_info": {
450 | "codemirror_mode": {
451 | "name": "ipython",
452 | "version": 3
453 | },
454 | "file_extension": ".py",
455 | "mimetype": "text/x-python",
456 | "name": "python",
457 | "nbconvert_exporter": "python",
458 | "pygments_lexer": "ipython3",
459 | "version": "3.8.3"
460 | }
461 | },
462 | "nbformat": 4,
463 | "nbformat_minor": 2
464 | }
465 |
--------------------------------------------------------------------------------
/examples/mneflow_save_restore.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "markdown",
5 | "metadata": {},
6 | "source": [
7 | "# MNEflow basic example"
8 | ]
9 | },
10 | {
11 | "cell_type": "markdown",
12 | "metadata": {},
13 | "source": [
14 | "### This example assumes that basic example notebook was run and processed tfrecords were saved to 'path'."
15 | ]
16 | },
17 | {
18 | "cell_type": "code",
19 | "execution_count": 1,
20 | "metadata": {},
21 | "outputs": [
22 | {
23 | "name": "stdout",
24 | "output_type": "stream",
25 | "text": [
26 | "0.5.4\n"
27 | ]
28 | }
29 | ],
30 | "source": [
31 | "import numpy as np\n",
32 | "import mne\n",
33 | "from mne.datasets import multimodal\n",
34 | "import os\n",
35 | "import tensorflow as tf\n",
36 | "\n",
37 | "tf.get_logger().setLevel('ERROR')\n",
38 | "tf.autograph.set_verbosity(0)\n",
39 | "mne.set_log_level(verbose='CRITICAL')\n",
40 | "os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'\n",
41 | "\n",
42 | "import mneflow\n",
43 | "print(mneflow.__version__)"
44 | ]
45 | },
46 | {
47 | "cell_type": "markdown",
48 | "metadata": {},
49 | "source": [
50 | "### If we've already imported the data variables 'path' and 'data_id' to reuse the mne_sample_dataset. This won't work if trecords from the basic example were not saved."
51 | ]
52 | },
53 | {
54 | "cell_type": "code",
55 | "execution_count": 2,
56 | "metadata": {},
57 | "outputs": [
58 | {
59 | "name": "stdout",
60 | "output_type": "stream",
61 | "text": [
62 | "Metadata file found, restoring\n"
63 | ]
64 | }
65 | ],
66 | "source": [
67 | "import_opt = dict(path='C:\\\\data\\\\',\n",
68 | " data_id='mne_sample_multimodal',\n",
69 | " overwrite=False,\n",
70 | " )\n",
71 | "\n",
72 | "#here we use None instead of the first required argument\n",
73 | "meta = mneflow.produce_tfrecords(None, **import_opt)"
74 | ]
75 | },
76 | {
77 | "cell_type": "markdown",
78 | "metadata": {},
79 | "source": [
80 | "### Alternatively, we can use path and data_id directly"
81 | ]
82 | },
83 | {
84 | "cell_type": "code",
85 | "execution_count": 3,
86 | "metadata": {},
87 | "outputs": [],
88 | "source": [
89 | "path='C:\\\\data\\\\'\n",
90 | "data_id='mne_sample_multimodal'\n",
91 | "meta = mneflow.utils.load_meta(path + data_id)"
92 | ]
93 | },
94 | {
95 | "cell_type": "markdown",
96 | "metadata": {},
97 | "source": [
98 | "### If metadata contains the trained model (i.e. the model was previously trained) the trained model can be restored using:"
99 | ]
100 | },
101 | {
102 | "cell_type": "code",
103 | "execution_count": 4,
104 | "metadata": {},
105 | "outputs": [
106 | {
107 | "name": "stdout",
108 | "output_type": "stream",
109 | "text": [
110 | "Using class_subset with 7 classes:\n",
111 | "Subset ratio 1.00, Multiplier 1.00\n",
112 | "Using class_subset with 7 classes:\n",
113 | "Subset ratio 1.00, Multiplier 1.00\n",
114 | "Updating: meta.data\n",
115 | "Setting reg for dmx, to l1\n",
116 | "Built: dmx input: (None, 1, 301, 204)\n",
117 | "Setting reg for tconv, to l1\n",
118 | "Built: tconv input: (None, 1, 301, 32)\n",
119 | "Setting reg for fc, to l1\n",
120 | "Built: fc input: (None, 1, 61, 32)\n"
121 | ]
122 | }
123 | ],
124 | "source": [
125 | "model = meta.restore_model()"
126 | ]
127 | },
128 | {
129 | "cell_type": "markdown",
130 | "metadata": {},
131 | "source": [
132 | "### Restored models can continue training. "
133 | ]
134 | },
135 | {
136 | "cell_type": "code",
137 | "execution_count": 5,
138 | "metadata": {},
139 | "outputs": [
140 | {
141 | "name": "stdout",
142 | "output_type": "stream",
143 | "text": [
144 | "Updating: meta.train_params\n",
145 | "Class weights: None\n",
146 | "Using class_subset with 7 classes:\n",
147 | "Subset ratio 1.00, Multiplier 1.00\n",
148 | "Epoch 1/10\n",
149 | "8/8 - 2s - loss: 0.2812 - cat_ACC: 0.9974 - val_loss: 0.6246 - val_cat_ACC: 0.8899 - 2s/epoch - 217ms/step\n",
150 | "Epoch 2/10\n",
151 | "8/8 - 1s - loss: 0.2834 - cat_ACC: 0.9973 - val_loss: 0.6518 - val_cat_ACC: 0.8991 - 774ms/epoch - 97ms/step\n",
152 | "Epoch 3/10\n",
153 | "8/8 - 1s - loss: 0.2753 - cat_ACC: 0.9987 - val_loss: 0.6653 - val_cat_ACC: 0.8991 - 974ms/epoch - 122ms/step\n",
154 | "Epoch 4/10\n",
155 | "8/8 - 1s - loss: 0.2752 - cat_ACC: 0.9973 - val_loss: 0.6449 - val_cat_ACC: 0.9083 - 727ms/epoch - 91ms/step\n",
156 | "Using class_subset with 7 classes:\n",
157 | "Subset ratio 1.00, Multiplier 1.00\n",
158 | "single_fold with 1 fold(s) completed. \n",
159 | "\n",
160 | " Validation Performance: \n",
161 | " Loss: 0.6246 +/- 0.0000.\n",
162 | " Metric: 0.8899 +/- 0.0000\n",
163 | "\n",
164 | "\n",
165 | " Test Performance: \n",
166 | " Loss: 0.8452 +/- 0.0000.\n",
167 | " Metric: 0.8120 +/- 0.0000\n",
168 | "Saving updated log to: C:\\data\\models\\lfcnn_log.csv\n"
169 | ]
170 | }
171 | ],
172 | "source": [
173 | "model.train()"
174 | ]
175 | },
176 | {
177 | "cell_type": "code",
178 | "execution_count": 6,
179 | "metadata": {},
180 | "outputs": [
181 | {
182 | "name": "stdout",
183 | "output_type": "stream",
184 | "text": [
185 | "Using class_subset with 7 classes:\n",
186 | "Subset ratio 1.00, Multiplier 1.00\n",
187 | "Test set: Loss = 0.8452 Accuracy = 0.8120\n"
188 | ]
189 | }
190 | ],
191 | "source": [
192 | "test_loss, test_acc = model.evaluate(meta.data['test_paths'])\n",
193 | "print(\"Test set: Loss = {:.4f} Accuracy = {:.4f}\".format(test_loss, test_acc))"
194 | ]
195 | },
196 | {
197 | "cell_type": "markdown",
198 | "metadata": {},
199 | "source": [
200 | "### Or predict new inputs"
201 | ]
202 | },
203 | {
204 | "cell_type": "code",
205 | "execution_count": 24,
206 | "metadata": {},
207 | "outputs": [],
208 | "source": [
209 | "X, y = [row for row in model.dataset.val.take(1)][0]\n",
210 | "y_pred = model.predict_sample(X[0])"
211 | ]
212 | },
213 | {
214 | "cell_type": "code",
215 | "execution_count": 28,
216 | "metadata": {},
217 | "outputs": [
218 | {
219 | "name": "stdout",
220 | "output_type": "stream",
221 | "text": [
222 | "Predicted: 4, Ground truth 4\n"
223 | ]
224 | }
225 | ],
226 | "source": [
227 | "print(\"Predicted: {}, Ground truth {}\".format(y_pred[0], np.argmax(y[0])))"
228 | ]
229 | }
230 | ],
231 | "metadata": {
232 | "kernelspec": {
233 | "display_name": "Python 3 (ipykernel)",
234 | "language": "python",
235 | "name": "python3"
236 | },
237 | "language_info": {
238 | "codemirror_mode": {
239 | "name": "ipython",
240 | "version": 3
241 | },
242 | "file_extension": ".py",
243 | "mimetype": "text/x-python",
244 | "name": "python",
245 | "nbconvert_exporter": "python",
246 | "pygments_lexer": "ipython3",
247 | "version": "3.8.3"
248 | }
249 | },
250 | "nbformat": 4,
251 | "nbformat_minor": 2
252 | }
253 |
--------------------------------------------------------------------------------
/examples/sequence_data_example.ipynb:
--------------------------------------------------------------------------------
1 | {
2 | "cells": [
3 | {
4 | "cell_type": "code",
5 | "execution_count": 1,
6 | "metadata": {},
7 | "outputs": [
8 | {
9 | "ename": "TypeError",
10 | "evalue": "produce_tfrecords() got an unexpected keyword argument 'val_size'",
11 | "output_type": "error",
12 | "traceback": [
13 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
14 | "\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)",
15 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 39\u001b[0m )\n\u001b[1;32m 40\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m---> 41\u001b[0;31m \u001b[0mmeta\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mmneflow\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mproduce_tfrecords\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mdpath\u001b[0m\u001b[0;34m+\u001b[0m\u001b[0mfname\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m**\u001b[0m\u001b[0mimport_opt\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m",
16 | "\u001b[0;31mTypeError\u001b[0m: produce_tfrecords() got an unexpected keyword argument 'val_size'"
17 | ]
18 | }
19 | ],
20 | "source": [
21 | "import warnings\n",
22 | "warnings.filterwarnings(\"ignore\", category=DeprecationWarning)\n",
23 | "warnings.filterwarnings(\"ignore\", category=PendingDeprecationWarning)\n",
24 | "warnings.filterwarnings(\"ignore\", category=FutureWarning)\n",
25 | "warnings.filterwarnings(\"ignore\", category=RuntimeWarning)\n",
26 | "\n",
27 | "import tensorflow as tf\n",
28 | "tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)\n",
29 | "\n",
30 | "import os\n",
31 | "os.chdir('/m/nbe/project/rtmeg/problearn/mneflow/')\n",
32 | "import mneflow\n",
33 | "\n",
34 | "\n",
35 | "\n",
36 | "# Dataset parameters\n",
37 | "dpath = '/m/nbe/work/zubarei1/collabs/bcicomp_ecog/sub1/'\n",
38 | "fname = 'sub1_comp.mat'\n",
39 | "\n",
40 | "import_opt = dict(fs=1000,\n",
41 | " savepath='./tfr/',\n",
42 | " out_name='bcig_ecog1_seq',\n",
43 | " input_type='iid',\n",
44 | " overwrite=True,\n",
45 | " val_size=0.1,\n",
46 | " array_keys={'X': 'train_data', 'y': 'train_dg'},\n",
47 | " picks=None,\n",
48 | " target_picks=None,\n",
49 | " target_type='float',\n",
50 | " segment=500,\n",
51 | " augment=True,\n",
52 | " aug_stride=50,\n",
53 | " transpose=('X', 'y'),\n",
54 | " scale=True,\n",
55 | " decimate=None,\n",
56 | " bp_filter=False,\n",
57 | " transform_targets=True,\n",
58 | " seq_length=10\n",
59 | " )\n",
60 | "\n",
61 | "meta = mneflow.produce_tfrecords([dpath+fname], **import_opt)"
62 | ]
63 | },
64 | {
65 | "cell_type": "code",
66 | "execution_count": 2,
67 | "metadata": {},
68 | "outputs": [
69 | {
70 | "name": "stdout",
71 | "output_type": "stream",
72 | "text": [
73 | "X0: (?, 62, 500)\n",
74 | "de-mix init : OK\n",
75 | "dmx (?, 500, 1, 32)\n",
76 | "conv init\n",
77 | "conv inint:OK shape: (?, 125, 1, 32)\n",
78 | "fc ::: 4000 1\n",
79 | "fc init : OK\n",
80 | "X: (?, 62, 500, 1)\n",
81 | "y_pred: (?, 1)\n",
82 | "(1,)\n",
83 | "L1 penalty applied to weights\n",
84 | "Initialization complete!\n",
85 | "i 0, tr_loss 2.51894, tr_acc -125.963 v_loss 3.31708, v_acc -160.549\n",
86 | "i 50, tr_loss 0.887892, tr_acc -15.3941 v_loss 0.850267, v_acc -40.273\n",
87 | "i 100, tr_loss 0.468449, tr_acc -2.96821 v_loss 0.55453, v_acc -25.8542\n"
88 | ]
89 | },
90 | {
91 | "ename": "KeyboardInterrupt",
92 | "evalue": "",
93 | "output_type": "error",
94 | "traceback": [
95 | "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
96 | "\u001b[0;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)",
97 | "\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m\u001b[0m\n\u001b[1;32m 23\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 24\u001b[0m results = model.train(early_stopping=3, min_delta=5e-6, n_iter=50000,\n\u001b[0;32m---> 25\u001b[0;31m eval_step=50)\n\u001b[0m",
98 | "\u001b[0;32m/m/nbe/project/rtmeg/problearn/mneflow/mneflow/models.py\u001b[0m in \u001b[0;36mtrain\u001b[0;34m(self, n_iter, eval_step, min_delta, early_stopping)\u001b[0m\n\u001b[1;32m 152\u001b[0m _ = self.sess.run([self.train_step],\n\u001b[1;32m 153\u001b[0m feed_dict={self.handle: self.train_handle,\n\u001b[0;32m--> 154\u001b[0;31m self.rate: self.specs['dropout']})\n\u001b[0m\u001b[1;32m 155\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mi\u001b[0m \u001b[0;34m%\u001b[0m \u001b[0meval_step\u001b[0m \u001b[0;34m==\u001b[0m \u001b[0;36m0\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 156\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mdataset\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mtrain\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mshuffle\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mbuffer_size\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m10000\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
99 | "\u001b[0;32m~/.conda/envs/py3ml/lib/python3.7/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36mrun\u001b[0;34m(self, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 948\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 949\u001b[0m result = self._run(None, fetches, feed_dict, options_ptr,\n\u001b[0;32m--> 950\u001b[0;31m run_metadata_ptr)\n\u001b[0m\u001b[1;32m 951\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mrun_metadata\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 952\u001b[0m \u001b[0mproto_data\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mtf_session\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mTF_GetBuffer\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mrun_metadata_ptr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
100 | "\u001b[0;32m~/.conda/envs/py3ml/lib/python3.7/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run\u001b[0;34m(self, handle, fetches, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1171\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mfinal_fetches\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0mfinal_targets\u001b[0m \u001b[0;32mor\u001b[0m \u001b[0;34m(\u001b[0m\u001b[0mhandle\u001b[0m \u001b[0;32mand\u001b[0m \u001b[0mfeed_dict_tensor\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1172\u001b[0m results = self._do_run(handle, final_targets, final_fetches,\n\u001b[0;32m-> 1173\u001b[0;31m feed_dict_tensor, options, run_metadata)\n\u001b[0m\u001b[1;32m 1174\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1175\u001b[0m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
101 | "\u001b[0;32m~/.conda/envs/py3ml/lib/python3.7/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_do_run\u001b[0;34m(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)\u001b[0m\n\u001b[1;32m 1348\u001b[0m \u001b[0;32mif\u001b[0m \u001b[0mhandle\u001b[0m \u001b[0;32mis\u001b[0m \u001b[0;32mNone\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1349\u001b[0m return self._do_call(_run_fn, feeds, fetches, targets, options,\n\u001b[0;32m-> 1350\u001b[0;31m run_metadata)\n\u001b[0m\u001b[1;32m 1351\u001b[0m \u001b[0;32melse\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1352\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0m_prun_fn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeeds\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetches\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
102 | "\u001b[0;32m~/.conda/envs/py3ml/lib/python3.7/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_do_call\u001b[0;34m(self, fn, *args)\u001b[0m\n\u001b[1;32m 1354\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_do_call\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1355\u001b[0m \u001b[0;32mtry\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1356\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mfn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0margs\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 1357\u001b[0m \u001b[0;32mexcept\u001b[0m \u001b[0merrors\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mOpError\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0me\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1358\u001b[0m \u001b[0mmessage\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcompat\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mas_text\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0me\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmessage\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
103 | "\u001b[0;32m~/.conda/envs/py3ml/lib/python3.7/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_run_fn\u001b[0;34m(feed_dict, fetch_list, target_list, options, run_metadata)\u001b[0m\n\u001b[1;32m 1339\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_extend_graph\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1340\u001b[0m return self._call_tf_sessionrun(\n\u001b[0;32m-> 1341\u001b[0;31m options, feed_dict, fetch_list, target_list, run_metadata)\n\u001b[0m\u001b[1;32m 1342\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1343\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_prun_fn\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
104 | "\u001b[0;32m~/.conda/envs/py3ml/lib/python3.7/site-packages/tensorflow/python/client/session.py\u001b[0m in \u001b[0;36m_call_tf_sessionrun\u001b[0;34m(self, options, feed_dict, fetch_list, target_list, run_metadata)\u001b[0m\n\u001b[1;32m 1427\u001b[0m return tf_session.TF_SessionRun_wrapper(\n\u001b[1;32m 1428\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_session\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moptions\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mtarget_list\u001b[0m\u001b[0;34m,\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m-> 1429\u001b[0;31m run_metadata)\n\u001b[0m\u001b[1;32m 1430\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 1431\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_call_tf_sessionprun\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mhandle\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfeed_dict\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mfetch_list\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
105 | "\u001b[0;31mKeyboardInterrupt\u001b[0m: "
106 | ]
107 | }
108 | ],
109 | "source": [
110 | "# batch the dataset according to that value\n",
111 | "dataset = mneflow.Dataset(meta, train_batch=1000, class_subset=None, pick_channels=None, decim=None)\n",
112 | "\n",
113 | "# training parameters\n",
114 | "optimizer_params = dict(l1_lambda=3e-6,learn_rate=3e-4, task='regression')\n",
115 | "\n",
116 | "optimizer = mneflow.Optimizer(**optimizer_params)\n",
117 | "# model parameters\n",
118 | "graph_specs = dict(n_ls=32, # number of latent factors\n",
119 | " filter_length=16, # convolutional filter length\n",
120 | " pooling=4, # convlayer pooling factor\n",
121 | " stride=4, # stride parameter for pooling layer\n",
122 | " padding='SAME',\n",
123 | " nonlin=tf.nn.relu,\n",
124 | " pool_type='max',\n",
125 | " model_path=meta['savepath'],\n",
126 | " dropout=.25,)\n",
127 | "\n",
128 | "model = mneflow.models.VARCNN(dataset,optimizer,graph_specs)\n",
129 | "#model = mneflow.models.LFCNNR(dataset,optimizer,graph_specs)\n",
130 | "\n",
131 | "model.build()\n",
132 | "\n",
133 | "results = model.train(early_stopping=3, min_delta=5e-6, n_iter=50000,\n",
134 | " eval_step=50)"
135 | ]
136 | },
137 | {
138 | "cell_type": "code",
139 | "execution_count": null,
140 | "metadata": {},
141 | "outputs": [],
142 | "source": []
143 | }
144 | ],
145 | "metadata": {
146 | "kernelspec": {
147 | "display_name": "Python 3",
148 | "language": "python",
149 | "name": "python3"
150 | },
151 | "language_info": {
152 | "codemirror_mode": {
153 | "name": "ipython",
154 | "version": 3
155 | },
156 | "file_extension": ".py",
157 | "mimetype": "text/x-python",
158 | "name": "python",
159 | "nbconvert_exporter": "python",
160 | "pygments_lexer": "ipython3",
161 | "version": "3.7.3"
162 | }
163 | },
164 | "nbformat": 4,
165 | "nbformat_minor": 2
166 | }
167 |
--------------------------------------------------------------------------------
/mneflow/__init__.py:
--------------------------------------------------------------------------------
1 | import mneflow
2 | from . import models
3 | from . import layers
4 | from . import utils
5 | from . import data
6 | from .utils import produce_tfrecords, load_meta
7 | from .data import Dataset
8 |
9 | mneflow.__version__ = '0.5.11dev'
10 |
11 |
--------------------------------------------------------------------------------
/mneflow/data.py:
--------------------------------------------------------------------------------
1 | #!/usr/bin/env python3
2 | # -*- coding: utf-8 -*-
3 | """
4 | Defines mneflow.Dataset object.
5 |
6 | @author: Ivan Zubarev, ivan.zubarev@aalto.fi
7 | """
8 | import tensorflow as tf
9 | #TODO: fix batching/epoching with training
10 | #TODO: dataset size form h_params
11 |
12 | # import tensorflow.compat.v1 as tf
13 | # tf.disable_v2_behavior()
14 | import numpy as np
15 | from mneflow.utils import _onehot
16 |
17 | class Dataset(object):
18 | """TFRecords dataset from TFRecords files using the metadata."""
19 |
20 | def __init__(self, meta, train_batch=50, test_batch=None, split=True,
21 | class_subset=None, pick_channels=None, decim=None,
22 | rebalance_classes=False, **kwargs):
23 |
24 | r"""Initialize tf.data.TFRdatasets.
25 |
26 | Parameters
27 | ----------
28 | meta : MetaData
29 | Instance of MetaData, output of mneflow.utils.produce_tfrecords.
30 | See mneflow.utils.produce_tfrecords and mneflow.MetaData for details.
31 |
32 | train_batch : int, None, optional
33 | Training mini-batch size. Defaults to 50. If None equals to the
34 | whole training set size
35 |
36 | test_batch : int, None, optional
37 | Training mini-batch size. Defaults to None. If None equals to the
38 | whole test/validation set size
39 |
40 | split : bool
41 | Whether to split dataset into training and validation folds based
42 | on h_params['folds']. Defaults to True. Can be False if dataset is
43 | imported for evaluationg performance on the held-out set or
44 | vizualisations
45 |
46 | class_subset : list of int
47 | Pick a susbet of the classes. Example in 5-class clalssification
48 | problem class_subset=[0, 2, 4] will filter the dataset to
49 | discriminate between these classes, without changing the parameters
50 | of the whole dataset (e.g. y_shape=5)
51 |
52 | pick_channels : array of int
53 | Pick a subset of channels
54 |
55 | decim : int
56 | Apply decimation in time. Note this feature does not check for
57 | aliasing effects.
58 |
59 | rebalance_classes : bool
60 | Apply rejection sampling to oversample underrepresented classes.
61 | Defaults to False.
62 |
63 | """
64 | self.h_params = meta.data
65 | if pick_channels or not 'channel_subset' in self.h_params.keys():
66 | self.h_params['channel_subset'] = pick_channels
67 | if class_subset or not 'class_subset' in self.h_params.keys():
68 | self.h_params['class_subset'] = class_subset
69 | if decim or not 'decim' in self.h_params.keys():
70 | self.h_params['decim'] = decim
71 | if train_batch or not 'train_batch' in self.h_params.keys():
72 | self.h_params['train_batch'] = train_batch
73 | if rebalance_classes or not 'rebalance_classes' in self.h_params.keys():
74 | self.h_params['rebalance_classes'] = rebalance_classes
75 |
76 | self.y_shape = self.h_params['y_shape']
77 | self.train, self.val = self._build_dataset(self.h_params['train_paths'],
78 | train_batch=self.h_params['train_batch'],
79 | test_batch=test_batch,
80 | split=True, val_fold_ind=0,
81 | rebalance_classes=self.h_params['rebalance_classes'])
82 | if len(self.h_params['test_paths']) > 0:
83 | self.test = self._build_dataset(self.h_params['test_paths'],
84 | train_batch=self.h_params['train_batch'],
85 | test_batch=test_batch,
86 | split=False,
87 | rebalance_classes=self.h_params['rebalance_classes'])
88 | meta.update(data=self.h_params)
89 |
90 |
91 |
92 | def _build_dataset(self, path, split=True,
93 | train_batch=100, test_batch=None,
94 | repeat=True, val_fold_ind=0, holdout=False,
95 | rebalance_classes=False):
96 |
97 | """Produce a tf.Dataset object and apply preprocessing
98 | functions if specified.
99 |
100 | """
101 | # import and process parent dataset
102 | dataset = tf.data.TFRecordDataset(path)
103 |
104 | dataset = dataset.map(self._parse_function)
105 |
106 | if self.h_params['channel_subset'] is not None:
107 | dataset = dataset.map(self._select_channels)
108 |
109 | if self.h_params['class_subset'] is not None and self.h_params['target_type'] == 'int':
110 | dataset = dataset.filter(self._select_classes)
111 | dataset = dataset.map(self._select_class_subset)
112 |
113 | subset_ratio = np.sum([v for k,v in self.h_params['class_ratio'].items()
114 | if k in self.h_params['class_subset']])
115 | ratio_multiplier = 1./subset_ratio
116 | print("Using class_subset with {} classes:".format(len(self.h_params['class_subset'])))
117 | #print(*[self.h_params['orig_classes'][i] for i in self.h_params['class_subset']])
118 | print("Subset ratio {:.2f}, Multiplier {:.2f}".format(subset_ratio,
119 | ratio_multiplier
120 | ))
121 | cp = {k:v*ratio_multiplier for k,v in self.h_params['class_ratio'].items()
122 | if k in self.h_params['class_subset']}
123 |
124 | self.h_params['class_ratio'] = cp
125 | self.y_shape = (len(self.h_params['class_subset']),)
126 |
127 |
128 | #print("y_shape:", self.h_params['y_shape'])
129 |
130 | if self.h_params['decim'] is not None:
131 | print('decimating')
132 |
133 | self.timepoints = tf.constant(
134 | np.arange(0, self.h_params['n_t'], self.h_params['decim']))
135 |
136 | self.h_params['n_t'] = len(self.timepoints)
137 | dataset = dataset.map(self._decimate)
138 |
139 | #TODO: test set case
140 |
141 | if split:
142 | train_folds = []
143 | val_folds = []
144 | #split into training and validation folds
145 |
146 | for i, fold in enumerate(self.h_params['folds']):
147 | f = fold.copy()
148 | vf = f.pop(val_fold_ind)
149 | val_folds.extend(vf)
150 | train_folds.extend(np.concatenate(f))
151 | #print("datafile: {} iter: {} val: {} train: {}".format(i, val_fold_ind, len(val_folds), len(train_folds)))
152 |
153 |
154 | self.val_fold = np.array(val_folds)
155 | self.train_fold = np.array(train_folds)
156 |
157 | # ovl = 0
158 | # for si in self.train_fold:
159 | # if si in self.val_fold:
160 | # ovl += 1
161 | # print('OVERLAP: ', ovl)
162 | #print(len(np.concatenate(folds)))
163 | #print("Train fold:", self.train_fold, self.train_fold.shape)
164 | #print("val fold:", self.val_fold, self.val_fold.shape)
165 | #self.train_fold = np.concatenate(self.train_fold)
166 |
167 | train_dataset = dataset.filter(self._cv_train_fold_filter)
168 | val_dataset = dataset.filter(self._cv_val_fold_filter)
169 |
170 | if self.h_params['rebalance_classes']:
171 | train_dataset = self._resample(train_dataset)
172 | val_dataset = self._resample(val_dataset)
173 | print("Rebalancing Train and Val")
174 |
175 | #batch
176 | if not test_batch:
177 | test_batch = len(self.val_fold)
178 |
179 | self.validation_steps = max(1, len(self.val_fold)//test_batch)
180 | self.training_steps = max(1, len(self.train_fold)//train_batch)
181 | self.validation_batch = test_batch
182 | self.training_batch = train_batch
183 |
184 | val_dataset = val_dataset.shuffle(5).batch(test_batch).repeat()
185 | val_dataset.batch_size = test_batch
186 | train_dataset = train_dataset.shuffle(5).batch(train_batch).repeat()
187 |
188 | train_dataset = train_dataset.map(self._unpack)
189 | val_dataset = val_dataset.map(self._unpack)
190 |
191 | return train_dataset, val_dataset
192 |
193 | else:
194 | #print(dataset)
195 | #batch
196 | if self.h_params['rebalance_classes']:
197 | dataset = self._resample(dataset)
198 | print("Rebalancing unsplit dataset")
199 | print()
200 | if np.any(['train' in tp for tp in path]):
201 | size = self.h_params['train_size']
202 | else:
203 | size = self.h_params['val_size']
204 | if not test_batch:
205 | test_batch = size
206 | dataset = dataset.shuffle(5).batch(test_batch)
207 | else:
208 | dataset = dataset.shuffle(5).batch(test_batch)#.repeat()
209 |
210 |
211 |
212 | #dataset = dataset.shuffle(5).batch(test_batch)#.repeat()
213 | dataset.batch = test_batch
214 |
215 |
216 | self.test_batch = test_batch
217 | self.test_steps = max(1, size // test_batch)
218 | dataset = dataset.map(self._unpack)
219 | return dataset#, None
220 | #else:
221 | # unsplit datasets are used for visuzalization and evaluation
222 | # if batching is not specified the whole set is used as batch
223 |
224 | # val_size = self.dataset.h_params['val_size']
225 | # self.validation_steps = val_size // val_batch)
226 | # else:
227 | # self.validation_steps = 1
228 |
229 |
230 | # print(dataset)
231 |
232 | # else:
233 | # test_batch = self.h_params['val_size']
234 | # dataset = dataset.shuffle(5).batch(test_batch).repeat()
235 |
236 |
237 |
238 |
239 | def _select_class_subset(self, example_proto):
240 | """Pick classes defined in self.h_params['class_subset'] from y"""
241 | example_proto['y'] = tf.gather(example_proto['y'],
242 | tf.constant(self.h_params['class_subset']),
243 | axis=0)
244 | return example_proto
245 |
246 | def _select_channels(self, example_proto):
247 | """Pick a subset of channels specified by self.channel_subset."""
248 | example_proto['X'] = tf.gather(example_proto['X'],
249 | tf.constant(self.h_params['channel_subset']),
250 | axis=3)
251 | return example_proto
252 |
253 | def _select_times(self, example_proto):
254 | """Pick a subset of channels specified by self.channel_subset."""
255 | example_proto['X'] = tf.gather(example_proto['X'],
256 | tf.constant(self.times),
257 | axis=2)
258 | return example_proto
259 |
260 | def class_weights(self):
261 | """Weights take class proportions into account."""
262 | weights = np.array(
263 | [v for k, v in self.h_params['class_ratio'].items()])
264 | return (1./np.mean(weights))/weights
265 |
266 | def _decimate(self, example_proto):
267 | """Downsample data."""
268 | example_proto['X'] = tf.gather(example_proto['X'],
269 | self.timepoints,
270 | axis=2)
271 | # return example_proto
272 |
273 | # def _get_n_samples(self, path):
274 | # """Count number of samples in TFRecord files specified by path."""
275 | # ns = path
276 | # return ns
277 |
278 | def _parse_function(self, example_proto):
279 | """Restore data shape from serialized records.
280 |
281 | Raises:
282 | -------
283 | ValueError: If the `input_type` does not have the supported
284 | value.
285 | """
286 | keys_to_features = {}
287 |
288 | if self.h_params['input_type'] == 'seq':
289 | y_sh = (self.h_params['n_seq'], *self.h_params['y_shape'])
290 | else:
291 | y_sh = self.h_params['y_shape']
292 |
293 | if self.h_params['input_type'] in ['trials', 'seq', 'continuous', 'fconn']:
294 | x_sh = (self.h_params['n_seq'], self.h_params['n_t'],
295 | self.h_params['n_ch'])
296 | else:
297 | raise ValueError('Invalid input type.')
298 |
299 | keys_to_features['X'] = tf.io.FixedLenFeature(x_sh, tf.float32)
300 | keys_to_features['n'] = tf.io.FixedLenFeature((), tf.int64)
301 |
302 | if self.h_params['target_type'] == 'int':
303 | keys_to_features['y'] = tf.io.FixedLenFeature(y_sh, tf.int64)
304 |
305 | elif self.h_params['target_type'] in ['float', 'signal']:
306 | keys_to_features['y'] = tf.io.FixedLenFeature(y_sh, tf.float32)
307 |
308 | else:
309 | raise ValueError('Invalid target type.')
310 |
311 | parsed_features = tf.io.parse_single_example(example_proto,
312 | keys_to_features)
313 | return parsed_features
314 |
315 | def _select_classes(self, sample):
316 | """Pick a subset of classes specified in self.h_params['class_subset']."""
317 | if self.h_params['class_subset']:
318 | # TODO: fix subsetting
319 | onehot_subset = _onehot(self.h_params['class_subset'],
320 | n_classes=self.h_params['y_shape'][0])
321 | #print(onehot_subset)
322 | subset = tf.constant(onehot_subset, dtype=tf.int64)
323 | out = tf.reduce_any(tf.reduce_all(tf.equal(sample['y'], subset), axis=1))
324 | # if out == False:
325 | # print("X")
326 | # else:
327 | # print("+")
328 |
329 | return out
330 | else:
331 | return tf.constant(True, dtype=tf.bool)
332 |
333 | def _cv_train_fold_filter(self, sample):
334 | """Pick a subset of classes specified in self.h_params['class_subset']."""
335 | if np.any(self.train_fold):
336 | subset = tf.constant(self.train_fold, dtype=tf.int64)
337 | #print(subset)
338 | out = tf.reduce_any(tf.equal(sample['n'], subset))
339 | return out
340 | else:
341 | return tf.constant(True, dtype=tf.bool)
342 |
343 | def _cv_val_fold_filter(self, sample):
344 | """Pick a subset of classes specified in self.h_params['class_subset']."""
345 | if np.any(self.val_fold):
346 | subset = tf.constant(self.val_fold, dtype=tf.int64)
347 | out = tf.reduce_any(tf.equal(sample['n'], subset))
348 | return out
349 | else:
350 | return tf.constant(True, dtype=tf.bool)
351 |
352 | def _unpack(self, sample):
353 | return sample['X'], sample['y']#, sample['n']
354 |
355 |
356 |
357 | def _resample(self, dataset):
358 | #print("Oversampling")
359 |
360 | n_classes = len(self.h_params['class_ratio'].items())
361 | #print(n_classes)
362 | target_dist = 1./n_classes*np.ones(n_classes)
363 | empirical_dist = [v for k, v in self.h_params['class_ratio'].items()]
364 | resample_ds = dataset.rejection_resample(class_func,
365 | target_dist=target_dist,
366 | initial_dist=empirical_dist)
367 | balanced_ds = resample_ds.map(lambda y, xy: xy)
368 | new_dist = {k: target_dist[0]
369 | for k in self.h_params['class_ratio'].keys()}
370 | #print("New class ratio: ", new_dist)
371 | #self.h_params['class_ratio'] = new_dist
372 | return balanced_ds
373 |
374 | def class_func(sample):
375 | return tf.argmax(sample['y'], -1)
376 | # def _onehot(y, n_classes=False):
377 | # if not n_classes:
378 | # """Create one-hot encoded labels."""
379 | # n_classes = len(set(y))
380 | # out = np.zeros((len(y), n_classes))
381 | # for i, ii in enumerate(y):
382 | # out[i][ii] += 1
383 | # return out.astype(int)
--------------------------------------------------------------------------------
/mneflow/layers.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Defines mneflow.layers for mneflow.models.
4 |
5 | @author: Ivan Zubarev, ivan.zubarev@aalto.fi
6 | """
7 | #TODO: keras compatible layers
8 | #TODO: pooling layer
9 |
10 | #import functools
11 | import tensorflow as tf
12 |
13 | from tensorflow.keras.initializers import Constant
14 | from tensorflow.keras.activations import relu
15 | from tensorflow.keras import constraints as k_con, regularizers as k_reg, saving
16 | # import tensorflow.compat.v1 as tf
17 | # tf.disable_v2_behavior()
18 | import numpy as np
19 |
20 | bias_const = 0.1
21 | bias_traiable = True
22 |
23 | class BaseLayer(tf.keras.layers.Layer):
24 | def __init__(self, size, nonlin, specs, **args):
25 | super(BaseLayer, self).__init__(**args)
26 | self.size = size
27 | self.nonlin = nonlin
28 | self.specs = specs
29 | # self.specs.setdefault('l1_lambda', 0.)
30 | # self.specs.setdefault('l2_lambda', 0.)
31 | # self.specs.setdefault('l1_scope', [])
32 | # self.specs.setdefault('l2_scope', [])
33 | # self.specs.setdefault('maxnorm_scope', [])
34 |
35 | def _set_regularizer(self):
36 | if self.scope in self.specs['l1_scope'] or 'weights' in self.specs['l1_scope']:
37 | reg = k_reg.l1(self.specs['l1_lambda'])
38 | print('Setting reg for {}, to l1'.format(self.scope))
39 | elif self.scope in self.specs['l2_scope'] or 'weights' in self.specs['l2_scope']:
40 | reg = k_reg.l2(self.specs['l2_lambda'])
41 | print('Setting reg for {}, to l2'.format(self.scope))
42 | else:
43 | reg = None
44 | return reg
45 |
46 | def _set_constraints(self, axis=0):
47 | if self.scope in self.specs['unitnorm_scope']:
48 | constr = k_con.UnitNorm(axis=axis)
49 | print('Setting constraint for {}, to UnitNorm'.format(self.scope))
50 | else:
51 | constr = None
52 | return constr
53 |
54 | @saving.register_keras_serializable(package="mneflow")
55 | class FullyConnected(BaseLayer, tf.keras.layers.Layer):
56 |
57 |
58 | """
59 | Fully-connected layer
60 |
61 | """
62 | def __init__(self, scope='fc', size=None, nonlin=tf.identity, specs={},
63 | **args):
64 | self.scope = scope
65 | super(FullyConnected, self).__init__(size=size, nonlin=nonlin, specs=specs,
66 | **args)
67 | self.constraint = self._set_constraints()
68 | self.reg = self._set_regularizer()
69 |
70 | def get_config(self):
71 | base_config = super(FullyConnected, self).get_config()
72 | config = {'scope': self.scope, 'size': self.size,
73 | 'nonlin': self.nonlin, 'specs': self.specs}
74 |
75 | return {**base_config, **config}
76 |
77 | @classmethod
78 | def from_config(cls, config):
79 | nonlin_config = config.pop("nonlin")
80 | scope = config.pop("scope")
81 | nonlin = saving.deserialize_keras_object(nonlin_config)
82 | return cls(nonlin, **config)
83 |
84 | def build(self, input_shape):
85 | super(FullyConnected, self).build(input_shape)
86 | # print(input_shape)
87 | self.flatsize = np.prod(input_shape[1:])
88 | #print(self.scope, ':::', )
89 |
90 | self.w = self.add_weight(shape=[self.flatsize, self.size],
91 | initializer='he_uniform',
92 | regularizer=self.reg,
93 | constraint=self.constraint,
94 | trainable=True,
95 | name='fc_weights',
96 | dtype=tf.float32)
97 |
98 | self.b = self.add_weight(shape=[self.size],
99 | initializer=Constant(bias_const),
100 | regularizer=None,
101 | trainable=bias_traiable,
102 | name='fc_bias',
103 | dtype=tf.float32)
104 |
105 | print("Built: {} input: {}".format(self.scope, input_shape))
106 |
107 |
108 | def call(self, x, training=None):
109 | """
110 | FullyConnected layer currying, to apply layer to any input tensor `x`
111 | """
112 | while True:
113 | with tf.name_scope(self.scope):
114 | if len(x.shape) > 2: # flatten if input is not 2d array
115 | x = tf.reshape(x, [-1, self.flatsize])
116 | tmp = tf.matmul(x, self.w) + self.b
117 | tmp = self.nonlin(tmp, name='out')
118 | #print(self.scope, ": output :", tmp.shape)
119 | return tmp
120 |
121 | @saving.register_keras_serializable(package="mneflow")
122 | class DeMixing(BaseLayer):
123 | """
124 | Spatial demixing Layer
125 |
126 | """
127 |
128 | def __init__(self, scope="dmx", size=None, nonlin=tf.identity, axis=-1,
129 | specs={}, **args):
130 | self.scope = scope
131 | self.axis = axis
132 | super(DeMixing, self).__init__(size=size, nonlin=nonlin, specs=specs,
133 | **args)
134 |
135 | def get_config(self):
136 | config = super(DeMixing, self).get_config()
137 | config.update({'scope': self.scope, 'size': self.size,
138 | 'nonlin': self.nonlin, 'axis': self.axis,
139 | 'specs':self.specs})
140 | return config
141 |
142 | @classmethod
143 | def from_config(cls, config):
144 | nonlin_config = config.pop("nonlin")
145 | scope = config.pop("scope")
146 | nonlin = saving.deserialize_keras_object(nonlin_config)
147 | return cls(nonlin, **config)
148 |
149 | def build(self, input_shape):
150 |
151 | super(DeMixing, self).build(input_shape)
152 | self.constraint = self._set_constraints(axis=0)
153 | self.reg = self._set_regularizer()
154 |
155 | self.w = self.add_weight(
156 | shape=(input_shape[self.axis], self.size),
157 | initializer='he_uniform',
158 | regularizer=self.reg,
159 | constraint = self.constraint,
160 | trainable=True,
161 | name='dmx_weights',
162 | dtype=tf.float32)
163 |
164 | self.b_in = self.add_weight(shape=([self.size]),
165 | initializer=Constant(bias_const),
166 | regularizer=None,
167 | trainable=bias_traiable,
168 | name='bias',
169 | dtype=tf.float32)
170 | print("Built: {} input: {}".format(self.scope, input_shape))
171 |
172 | #@tf.function
173 | def call(self, x, training=None):
174 | """
175 | """
176 | while True:
177 | with tf.name_scope(self.scope):
178 | try:
179 | demix = tf.tensordot(x, self.w, axes=[[self.axis], [0]],
180 | name='dmx')
181 | demix = self.nonlin(demix + self.b_in)
182 | #print(self.scope, ": output :", demix.shape)
183 | return demix
184 | except(AttributeError):
185 | input_shape = x.shape
186 | self.build(input_shape)
187 | @saving.register_keras_serializable(package="mneflow")
188 | class SquareSymm(BaseLayer):
189 | """
190 | SquaredSymmetric Layer
191 |
192 | """
193 | def __init__(self, scope='ssym', size=None, nonlin=tf.identity, axis=1,
194 | specs={}, **args):
195 | self.scope = scope
196 | self.axis = axis
197 | super(SquareSymm, self).__init__(size=size, nonlin=nonlin, specs=specs,
198 | **args)
199 |
200 | def get_config(self):
201 | config = super(SquareSymm, self).get_config()
202 | config.update({'scope': self.scope, 'size': self.size,
203 | 'nonlin': self.nonlin, 'axis': self.axis,
204 | 'specs':self.specs})
205 | return config
206 |
207 | @classmethod
208 | def from_config(cls, config):
209 | nonlin_config = config.pop("nonlin")
210 | scope = config.pop("scope")
211 | nonlin = saving.deserialize_keras_object(nonlin_config)
212 | return cls(nonlin, **config)
213 |
214 | def build(self, input_shape):
215 |
216 | super(SquareSymm, self).build(input_shape)
217 | self.constraint = self._set_constraints(axis=0)
218 | self.reg = self._set_regularizer()
219 |
220 | self.w = self.add_weight(
221 | shape=(input_shape[self.axis], self.size),
222 | initializer='he_uniform',
223 | regularizer=self.reg,
224 | constraint = self.constraint,
225 | trainable=True,
226 | name='ssym_weights',
227 | dtype=tf.float32)
228 |
229 | self.b_in = self.add_weight(shape=([self.size]),
230 | initializer=Constant(0.1),
231 | regularizer=None,
232 | trainable=True,
233 | name='bias',
234 | dtype=tf.float32)
235 | print("Built: {} input: {}".format(self.scope, input_shape))
236 |
237 | #@tf.function
238 | def call(self, x, training=None):
239 | """
240 | """
241 | while True:
242 | with tf.name_scope(self.scope):
243 | try:
244 | d1 = tf.tensordot(x, self.w, axes=[[1], [0]],
245 | name='smx') #output
246 | d2 = tf.tensordot(d1, self.w, axes=[[1], [0]],
247 | name='smx')
248 |
249 | demix = self.nonlin(d2 + self.b_in)
250 | return demix
251 | except(AttributeError):
252 | input_shape = x.shape
253 | self.build(input_shape)
254 |
255 | @saving.register_keras_serializable(package="mneflow")
256 | class LFTConv(BaseLayer):
257 | """
258 | Stackable temporal convolutional layer, interpreatble (LF)
259 |
260 | """
261 |
262 | def __init__(self, scope='tconv', size=32, nonlin=tf.nn.relu,
263 | filter_length=7, pooling=2, padding='SAME', specs={},
264 | **args):
265 | self.scope = scope
266 | super(LFTConv, self).__init__(size=size, nonlin=nonlin, specs=specs,
267 | **args)
268 | self.size = size
269 | self.filter_length = filter_length
270 | self.padding = padding
271 |
272 | def get_config(self):
273 |
274 | config = super(LFTConv, self).get_config()
275 | config.update({'scope': self.scope,
276 | 'filter_length': self.filter_length,
277 | 'nonlin': self.nonlin, 'padding': self.padding,
278 | 'specs':self.specs})
279 | return config
280 |
281 | @classmethod
282 | def from_config(cls, config):
283 | nonlin_config = config.pop("nonlin")
284 | scope = config.pop("scope")
285 | nonlin = saving.deserialize_keras_object(nonlin_config)
286 | return cls(nonlin, **config)
287 |
288 | def build(self, input_shape):
289 | super(LFTConv, self).build(input_shape)
290 | self.constraint = self._set_constraints(axis=1)
291 | self.reg = self._set_regularizer()
292 | shape = [1, self.filter_length, input_shape[-1], 1]
293 | self.filters = self.add_weight(shape=shape,
294 | initializer='he_uniform',
295 | regularizer=self.reg,
296 | constraint=self.constraint,
297 | trainable=True,
298 | name='tconv_weights',
299 | dtype=tf.float32)
300 |
301 | self.b = self.add_weight(shape=([input_shape[-1]]),
302 | initializer=Constant(bias_const),
303 | regularizer=None,
304 | trainable=bias_traiable,
305 | name='bias',
306 | dtype=tf.float32)
307 | print("Built: {} input: {}".format(self.scope, input_shape))
308 |
309 | #@tf.function
310 | def call(self, x, training=None):
311 | """
312 | """
313 | while True:
314 | with tf.name_scope(self.scope):
315 | try:
316 | conv = tf.nn.depthwise_conv2d(x,
317 | self.filters,
318 | padding=self.padding,
319 | strides=[1, 1, 1, 1],
320 | data_format='NHWC')
321 | conv = self.nonlin(conv + self.b)
322 |
323 | #print(self.scope, ": output :", conv.shape)
324 | return conv
325 | except(AttributeError):
326 | input_shape = x.shape
327 | self.build(input_shape)
328 |
329 | @saving.register_keras_serializable(package="mneflow")
330 | class VARConv(BaseLayer):
331 | """
332 | Stackable temporal convolutional layer
333 | """
334 |
335 | def __init__(self, scope='tconv', size=32, nonlin=tf.nn.relu,
336 | filter_length=7, pooling=2, padding='SAME', specs={},
337 | **args):
338 | self.scope = scope
339 | super(VARConv, self).__init__(size=size, nonlin=nonlin, specs=specs,
340 | **args)
341 | self.size = size
342 | self.nonlin = nonlin
343 | self.filter_length = filter_length
344 | self.padding = padding
345 |
346 |
347 | def get_config(self):
348 |
349 | config = super(VARConv, self).get_config()
350 | config.update({'scope': self.scope,
351 | 'filter_length': self.filter_length,
352 | 'nonlin': self.nonlin, 'padding': self.padding,
353 | 'specs':self.specs})
354 | return config
355 |
356 | @classmethod
357 | def from_config(cls, config):
358 | nonlin_config = config.pop("nonlin")
359 | scope = config.pop("scope")
360 | nonlin = saving.deserialize_keras_object(nonlin_config)
361 | return cls(nonlin, **config)
362 |
363 | def build(self, input_shape):
364 | print("input_shape:", input_shape)
365 | super(VARConv, self).build(input_shape)
366 |
367 | self.constraint = self._set_constraints()
368 | self.reg = self._set_regularizer()
369 | shape = [1, self.filter_length, input_shape[-1], self.size]
370 | self.filters = self.add_weight(shape=shape,
371 | initializer='he_uniform',
372 | regularizer=self.reg,
373 | constraint=self.constraint,
374 | trainable=True,
375 | name='tconv_weights',
376 | dtype=tf.float32)
377 |
378 | self.b = self.add_weight(shape=([input_shape[-1]]),
379 | initializer=Constant(bias_const),
380 | regularizer=None,
381 | trainable=bias_traiable,
382 | name='bias',
383 | dtype=tf.float32)
384 | print("Built: {} input: {}".format(self.scope, input_shape))
385 |
386 | #@tf.function
387 | def call(self, x, training=None):
388 | """
389 | """
390 | while True:
391 | with tf.name_scope(self.scope):
392 | try:
393 | conv = tf.nn.conv2d(x, self.filters,
394 | padding=self.padding,
395 | strides=[1, 1, 1, 1],
396 | data_format='NHWC')
397 |
398 | conv = self.nonlin(conv + self.b)
399 | #print(self.scope, ": output :", conv.shape)
400 | return conv
401 | except(AttributeError):
402 | input_shape = x.shape
403 | self.build(input_shape)
404 | #print(self.scope, 'building from call')
405 |
406 | @saving.register_keras_serializable(package="mneflow")
407 | class TempPooling(BaseLayer):
408 | def __init__(self, scope='pool', stride=2, pooling=2, specs={},
409 | padding='SAME', pool_type='max', **args):
410 | self.scope = '_'.join([pool_type, scope])
411 | super(TempPooling, self).__init__(size=None, nonlin=None, specs=specs,
412 | **args)
413 | self.strides = [1, 1, stride, 1]
414 | self.kernel = [1, 1, pooling, 1]
415 |
416 | self.padding = padding
417 | self.pool_type = pool_type
418 |
419 | def get_config(self):
420 |
421 | config = super(TempPooling, self).get_config()
422 | config.update({'scope': self.scope,
423 | 'stride': self.strides[2],
424 | 'pooling': self.kernel[2], 'padding': self.padding,
425 | 'specs':self.specs, 'pool_type' : self.pool_type})
426 | return config
427 |
428 |
429 | #@tf.function
430 | def call(self, x):
431 | """
432 | """
433 | if self.pool_type == 'avg':
434 | pooled = tf.nn.avg_pool2d(
435 | x,
436 | ksize=self.kernel,
437 | strides=self.strides,
438 | padding=self.padding,
439 | data_format='NHWC')
440 | else:
441 | pooled = tf.nn.max_pool2d(
442 | x,
443 | ksize=self.kernel,
444 | strides=self.strides,
445 | padding=self.padding,
446 | data_format='NHWC')
447 | #print(self.scope, ": output :", pooled.shape)
448 | return pooled
449 |
450 | def build(self, input_shape):
451 | super(TempPooling, self).build(input_shape)
452 | self.built = True
453 |
454 |
455 | # def get_config(self):
456 | # config = super(TempPooling, self).get_config()
457 | # config.update({'scope': self.scope,
458 | # 'pool_type': self.pool_type,
459 | # 'stride': self.strides, 'pooling': self.pooling,
460 | # 'padding': self.padding})
461 | # return config
462 |
463 |
464 | @saving.register_keras_serializable(package="mneflow")
465 | class LSTM(tf.keras.layers.LSTM):
466 | def __init__(self, scope='lstm', size=32, nonlin='tanh', dropout=0.0,
467 | recurrent_activation='tanh', recurrent_dropout=0.0,
468 | use_bias=True, unit_forget_bias=True,
469 | kernel_regularizer=None, bias_regularizer=None,
470 | return_sequences=True, stateful=False, unroll=False, **args):
471 | super(LSTM, self).__init__(name=scope,
472 | units=size,
473 | activation=nonlin,
474 | dropout=dropout,
475 | recurrent_activation=recurrent_activation,
476 | recurrent_dropout=recurrent_dropout,
477 | use_bias=use_bias,
478 | unit_forget_bias=unit_forget_bias,
479 | kernel_regularizer=kernel_regularizer,
480 | # kernel_initializer='glorot_uniform',
481 | # recurrent_initializer='orthogonal',
482 | bias_regularizer=bias_regularizer,
483 | return_sequences=return_sequences,
484 | stateful=stateful,
485 | unroll=unroll,
486 | **args)
487 | self.scope = scope
488 | self.size = size
489 | self.nonlin = nonlin
490 | print(self.scope, 'init : OK')
491 |
492 | def get_config(self):
493 | config = super(LSTM, self).get_config()
494 | config.update({'scope': self.scope, 'size': self.size,
495 | 'nonlin': self.nonlin})
496 | return config
497 |
498 | @classmethod
499 | def from_config(cls, config):
500 | nonlin_config = config.pop("nonlin")
501 | scope = config.pop("scope")
502 | nonlin = saving.deserialize_keras_object(nonlin_config)
503 | return cls(nonlin, **config)
504 |
505 | def build(self, input_shape):
506 | # print(self.scope, 'build : OK')
507 | super(LSTM, self).build(input_shape)
508 |
509 | @tf.function
510 | def call(self, inputs, mask=None, training=None, initial_state=None):
511 | # print(self.scope, inputs.shape)
512 | return super(LSTM, self).call(inputs, mask=mask, training=training,
513 | initial_state=initial_state)
514 |
515 |
516 |
517 |
518 | if __name__ == '__main__':
519 | print('Reloaded')
520 |
521 |
--------------------------------------------------------------------------------
/py3ml.yml:
--------------------------------------------------------------------------------
1 | name: py3ml
2 | channels:
3 | - https://repo.continuum.io/pkgs/main
4 | - https://repo.anaconda.com/pkgs/main
5 | - defaults
6 | dependencies:
7 | - alabaster=0.7.12=py37_0
8 | - asn1crypto=0.24.0=py37_0
9 | - astroid=2.2.5=py37_0
10 | - attrs=19.1.0=py37_1
11 | - babel=2.6.0=py37_0
12 | - backcall=0.1.0=py37_0
13 | - blas=1.0=mkl
14 | - bleach=3.1.0=py37_0
15 | - ca-certificates=2019.1.23=0
16 | - certifi=2019.3.9=py37_0
17 | - cffi=1.12.3=py37h2e261b9_0
18 | - chardet=3.0.4=py37_1
19 | - cloudpickle=1.0.0=py_0
20 | - cryptography=2.6.1=py37h1ba5d50_0
21 | - dbus=1.13.6=h746ee38_0
22 | - decorator=4.4.0=py37_1
23 | - defusedxml=0.6.0=py_0
24 | - docutils=0.14=py37_0
25 | - entrypoints=0.3=py37_0
26 | - expat=2.2.6=he6710b0_0
27 | - fontconfig=2.13.0=h9420a91_0
28 | - freetype=2.9.1=h8a8886c_1
29 | - glib=2.56.2=hd408876_0
30 | - gst-plugins-base=1.14.0=hbbd80ab_1
31 | - gstreamer=1.14.0=hb453b48_1
32 | - idna=2.8=py37_0
33 | - imagesize=1.1.0=py37_0
34 | - intel-openmp=2019.3=199
35 | - ipykernel=5.1.0=py37h39e3cac_0
36 | - ipython=7.5.0=py37h39e3cac_0
37 | - ipython_genutils=0.2.0=py37_0
38 | - ipywidgets=7.4.2=py37_0
39 | - isort=4.3.19=py37_0
40 | - jedi=0.13.3=py37_0
41 | - jeepney=0.4=py37_0
42 | - jinja2=2.10.1=py37_0
43 | - jsonschema=3.0.1=py37_0
44 | - jupyter=1.0.0=py37_7
45 | - jupyter_client=5.2.4=py37_0
46 | - jupyter_console=6.0.0=py37_0
47 | - jupyter_core=4.4.0=py37_0
48 | - keyring=18.0.0=py37_0
49 | - lazy-object-proxy=1.4.0=py37h7b6447c_0
50 | - libedit=3.1.20181209=hc058e9b_0
51 | - libgcc-ng=8.2.0=hdf63c60_1
52 | - libgfortran-ng=7.3.0=hdf63c60_0
53 | - libpng=1.6.37=hbc83047_0
54 | - libsodium=1.0.16=h1bed415_0
55 | - libstdcxx-ng=8.2.0=hdf63c60_1
56 | - libuuid=1.0.3=h1bed415_2
57 | - libxcb=1.13=h1bed415_1
58 | - libxml2=2.9.9=he19cac6_0
59 | - markupsafe=1.1.1=py37h7b6447c_0
60 | - mccabe=0.6.1=py37_1
61 | - mistune=0.8.4=py37h7b6447c_0
62 | - mkl=2019.3=199
63 | - mkl_fft=1.0.12=py37ha843d7b_0
64 | - mkl_random=1.0.2=py37hd81dba3_0
65 | - nbconvert=5.5.0=py_0
66 | - nbformat=4.4.0=py37_0
67 | - ncurses=6.1=he6710b0_1
68 | - notebook=5.7.8=py37_0
69 | - numpy=1.16.3=py37h7e9f1db_0
70 | - numpy-base=1.16.3=py37hde5b4d6_0
71 | - numpydoc=0.9.1=py_0
72 | - openssl=1.1.1b=h7b6447c_1
73 | - packaging=19.0=py37_0
74 | - pandoc=2.2.3.2=0
75 | - pandocfilters=1.4.2=py37_1
76 | - parso=0.4.0=py_0
77 | - pcre=8.43=he6710b0_0
78 | - pexpect=4.7.0=py37_0
79 | - pickleshare=0.7.5=py37_0
80 | - pip=19.1.1=py37_0
81 | - prometheus_client=0.6.0=py37_0
82 | - prompt_toolkit=2.0.9=py37_0
83 | - psutil=5.6.2=py37h7b6447c_0
84 | - ptyprocess=0.6.0=py37_0
85 | - pycodestyle=2.5.0=py37_0
86 | - pycparser=2.19=py37_0
87 | - pyflakes=2.1.1=py37_0
88 | - pygments=2.4.0=py_0
89 | - pylint=2.3.1=py37_0
90 | - pyopenssl=19.0.0=py37_0
91 | - pyparsing=2.4.0=py_0
92 | - pyqt=5.9.2=py37h05f1152_2
93 | - pyrsistent=0.14.11=py37h7b6447c_0
94 | - pysocks=1.7.0=py37_0
95 | - python=3.7.3=h0371630_0
96 | - python-dateutil=2.8.0=py37_0
97 | - pytz=2019.1=py_0
98 | - pyzmq=18.0.0=py37he6710b0_0
99 | - qt=5.9.7=h5867ecd_1
100 | - qtawesome=0.5.7=py37_1
101 | - qtconsole=4.4.4=py_0
102 | - qtpy=1.7.1=py_0
103 | - readline=7.0=h7b6447c_5
104 | - requests=2.21.0=py37_0
105 | - rope=0.14.0=py_0
106 | - scipy=1.2.1=py37h7c811a0_0
107 | - secretstorage=3.1.1=py37_0
108 | - send2trash=1.5.0=py37_0
109 | - setuptools=41.0.1=py37_0
110 | - sip=4.19.8=py37hf484d3e_0
111 | - six=1.12.0=py37_0
112 | - snowballstemmer=1.2.1=py37_0
113 | - sphinx=2.0.1=py_0
114 | - sphinxcontrib-applehelp=1.0.1=py_0
115 | - sphinxcontrib-devhelp=1.0.1=py_0
116 | - sphinxcontrib-htmlhelp=1.0.2=py_0
117 | - sphinxcontrib-jsmath=1.0.1=py_0
118 | - sphinxcontrib-qthelp=1.0.2=py_0
119 | - sphinxcontrib-serializinghtml=1.1.3=py_0
120 | - spyder=3.3.4=py37_0
121 | - spyder-kernels=0.4.4=py37_0
122 | - sqlite=3.28.0=h7b6447c_0
123 | - terminado=0.8.2=py37_0
124 | - testpath=0.4.2=py37_0
125 | - tk=8.6.8=hbc83047_0
126 | - tornado=6.0.2=py37h7b6447c_0
127 | - traitlets=4.3.2=py37_0
128 | - urllib3=1.24.2=py37_0
129 | - wcwidth=0.1.7=py37_0
130 | - webencodings=0.5.1=py37_1
131 | - wheel=0.33.4=py37_0
132 | - widgetsnbextension=3.4.2=py37_0
133 | - wrapt=1.11.1=py37h7b6447c_0
134 | - wurlitzer=1.0.2=py37_0
135 | - xz=5.2.4=h14c3975_4
136 | - zeromq=4.3.1=he6710b0_3
137 | - zlib=1.2.11=h7b6447c_3
138 | - gmp=6.1.2=h6c8ec71_1
139 | - icu=58.2=h9c2bf20_1
140 | - jpeg=9b=h024ee3a_2
141 | - libffi=3.2.1=hd88cf55_4
142 | prefix: /u/62/zubarei1/unix/.conda/envs/py3ml
143 |
144 |
--------------------------------------------------------------------------------
/requirements.txt:
--------------------------------------------------------------------------------
1 | tensorflow > 2.12.0, <=2.16rc
2 | mne >= 0.24.0, <=1.1.1.
3 |
--------------------------------------------------------------------------------
/scripts/basic_example.py:
--------------------------------------------------------------------------------
1 | # -*- coding: utf-8 -*-
2 | """
3 | Created on Mon Nov 30 12:46:54 2020
4 |
5 | @author: ipzub
6 | """
7 | import os
8 |
9 | os.chdir("C:\\Users\\ipzub\\projs\\mneflow")
10 | os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
11 |
12 | import tensorflow as tf
13 | #tf.get_logger().setLevel('ERROR')
14 | #tf.autograph.set_verbosity(0)
15 |
16 | import numpy as np
17 | import mne
18 | from mne.datasets import multimodal
19 |
20 | import mneflow
21 | mne.set_log_level(verbose='CRITICAL')
22 |
23 | fname_raw = os.path.join(multimodal.data_path(), 'multimodal_raw.fif')
24 | raw = mne.io.read_raw_fif(fname_raw)
25 |
26 | cond = raw.acqparser.get_condition(raw, None)
27 | # get the list of condition names
28 | condition_names = [k for c in cond for k,v in c['event_id'].items()]
29 | epochs_list = [mne.Epochs(raw, **c) for c in cond]
30 | epochs = mne.concatenate_epochs(epochs_list)
31 | epochs = epochs.pick_types(meg='mag')
32 | print(epochs.info)
33 | #%%
34 |
35 | #%%
36 | #Specify import options
37 | import_opt = dict(savepath='C:\\data\\tfr\\', # path where TFR files will be saved
38 | out_name='mne_sample_epochs', # name of TFRecords files
39 | fs=600,
40 | overwrite=False,
41 | input_type='trials',
42 | target_type='int',
43 | n_folds=5,
44 | picks={'meg':'grad'},
45 | scale=True, # apply baseline_scaling
46 | crop_baseline=True, # remove baseline interval after scaling
47 | scale_interval=(0, 40), # indices in time axis corresponding to baseline interval
48 | test_set='holdout')
49 |
50 |
51 | #write TFRecord files and metadata file to disk
52 | #meta = mneflow.produce_tfrecords([epochs], **import_opt)
53 | meta = mneflow.produce_tfrecords([epochs], **import_opt)
54 |
55 |
56 | dataset = mneflow.Dataset(meta, train_batch=100)
57 | #%%
58 | lf_params = dict(n_latent=64, #number of latent factors
59 | filter_length=17, #convolutional filter length in time samples
60 | nonlin = tf.nn.relu,
61 | padding = 'SAME',
62 | pooling = 5,#pooling factor
63 | stride = 5, #stride parameter for pooling layer
64 | pool_type='max',
65 | model_path = import_opt['savepath'],
66 | dropout = .5,
67 | l1_scope = ["weights"],
68 | l1=3e-3)
69 |
70 | model = mneflow.models.LFCNN3(dataset, lf_params)
71 | model.build()
72 |
73 |
74 |
75 | #train the model for 10 epochs
76 | model.train(n_epochs=30, eval_step=100, early_stopping=5)
77 |
78 | #%%
79 |
80 | model.evaluate(meta['test_paths'])
81 | #%%
82 | model.compute_patterns()
83 |
84 | #%%
85 | f1 = model.plot_patterns('Vectorview-grad', sorting='l2', class_names=condition_names)
86 | #%%
87 | f2 = model.plot_spectra(sorting='l2', norm_spectra='welch', class_names=condition_names)
88 | #%%
89 | f3 = model.plot_waveforms(sorting='l2', class_names=condition_names)
90 |
--------------------------------------------------------------------------------
/setup.py:
--------------------------------------------------------------------------------
1 | #! /usr/bin/env python
2 | from setuptools import setup
3 | import codecs
4 | import os
5 |
6 | if __name__ == '__main__':
7 | if os.path.exists('MANIFEST'):
8 | os.remove('MANIFEST')
9 |
10 | setup(name='mneflow',
11 | maintainer='Ivan Zubarev',
12 | maintainer_email='ivan.zubarev@aalto.fi',
13 | description='Neural networks for MEG and EEG data',
14 | license='BSD-3',
15 | url='https://github.com/zubara/mneflow',
16 | version='0.5.11dev',
17 | download_url='https://github.com/zubara/mneflow/archive/master.zip',
18 | #long_description=codecs.open('./docs/intro.rst', encoding='utf8').read(),
19 | long_description_content_type="text/x-rst",
20 | classifiers=['Intended Audience :: Science/Research',
21 | 'Intended Audience :: Developers',
22 | 'License :: OSI Approved',
23 | 'Programming Language :: Python',
24 | 'Topic :: Software Development',
25 | 'Topic :: Scientific/Engineering',
26 | 'Operating System :: Microsoft :: Windows',
27 | 'Operating System :: POSIX',
28 | 'Operating System :: Unix',
29 | 'Operating System :: MacOS'],
30 | platforms='any',
31 | packages=['mneflow'],
32 | install_requires=['numpy',
33 | 'scipy',
34 | 'mne >= 1.0, <=1.7',
35 | 'tensorflow >= 2.12.0, <=2.16.1',
36 | 'matplotlib'])
37 |
--------------------------------------------------------------------------------