├── .classpath
├── .gitignore
├── .project
├── .settings
└── org.eclipse.jdt.core.prefs
├── LICENSE
├── README.md
├── pom.xml
└── src
├── main
├── java
│ └── org
│ │ └── deeplearning4j
│ │ ├── examples
│ │ └── rnn
│ │ │ ├── shakespeare
│ │ │ ├── CharacterIterator.java
│ │ │ └── GravesLSTMCharModellingExample.java
│ │ │ ├── strata
│ │ │ └── physionet
│ │ │ │ ├── PhysioNetLabels.java
│ │ │ │ ├── PhysioNet_ICU_Mortality_Iterator.java
│ │ │ │ ├── PhysioNet_LSTM_Model.java
│ │ │ │ ├── PhysioNet_Model_Evaluation.java
│ │ │ │ ├── PhysioNet_Vectorizer.java
│ │ │ │ ├── output
│ │ │ │ └── single
│ │ │ │ │ ├── PhysioNet_ICU_SingleLabel_Iterator.java
│ │ │ │ │ ├── PhysioNet_LSTM_Model.java
│ │ │ │ │ └── PhysioNet_Vectorizer.java
│ │ │ │ ├── schema
│ │ │ │ ├── PhysioNet_CSVSchema.java
│ │ │ │ ├── TimeseriesDescriptorSchemaColumn.java
│ │ │ │ └── TimeseriesSchemaColumn.java
│ │ │ │ └── utils
│ │ │ │ ├── EvalScoreTracker.java
│ │ │ │ ├── PhysioNetDataUtils.java
│ │ │ │ └── PhysioNetVectorizationDebugTool.java
│ │ │ └── synthetic
│ │ │ ├── ND4JMatrixTool.java
│ │ │ ├── simple1
│ │ │ ├── SyntheticDataIterator.java
│ │ │ └── SyntheticData_LSTM_Model.java
│ │ │ ├── simple2
│ │ │ ├── Simple2Dataset_Iterator.java
│ │ │ └── Synthetic_Simple2_LSTM_Model.java
│ │ │ └── simple3
│ │ │ └── uneven
│ │ │ ├── Simple3_Uneven_Dataset_Iterator.java
│ │ │ └── Simple3_Uneven_LSTM_Model.java
│ │ └── jp
│ │ └── rnn
│ │ ├── general
│ │ └── timeseries
│ │ │ └── LSTM_GenericTimeseriesClassification.java
│ │ └── tweets
│ │ ├── demo
│ │ └── LSTM_TwitterBot.java
│ │ └── test
│ │ └── LSTM_TweetTest.java
└── resources
│ ├── nietzche.txt
│ ├── rnn_sammer.txt
│ └── rnn_txt_input.txt
└── test
├── java
└── org
│ └── deeplearning4j
│ └── jp
│ └── rnn
│ ├── general
│ └── timeseries
│ │ └── Test_LSTM_GenericTimeseriesClassification.java
│ ├── strata
│ └── physionet
│ │ ├── TestPhysioNet_Iterator.java
│ │ ├── TestPhysioNet_Vectorizer.java
│ │ └── utils
│ │ └── TestPhysioNetSubsetExtraction.java
│ └── synthetic
│ ├── TestSyntheticDataIterator.java
│ └── simple3
│ └── uneven
│ └── Test_Simple3_Uneven_Dataset_Iterator.java
└── resources
├── data
├── physionet
│ └── sample
│ │ ├── set-a-labels
│ │ └── Outcomes-a.txt
│ │ └── set-a
│ │ ├── 135458.txt
│ │ ├── 135476.txt
│ │ └── 136434.txt
└── synthetic
│ ├── simple
│ ├── simple_ts_data
│ └── simple_ts_labels.txt
│ ├── simple_2
│ ├── simple_2_data.txt
│ └── simple_2_labels.txt
│ └── simple_3_uneven
│ ├── simple_3_uneven_data.txt
│ └── simple_3_uneven_labels.txt
├── physionet_sample_data.txt
├── physionet_schema.txt
└── physionet_schema_zmzuv_0.txt
/.gitignore:
--------------------------------------------------------------------------------
1 | *.class
2 |
3 | # Mobile Tools for Java (J2ME)
4 | .mtj.tmp/
5 |
6 | # Package Files #
7 | *.jar
8 | *.war
9 | *.ear
10 |
11 | # virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
12 | hs_err_pid*
13 |
14 | target/
15 | .idea/*
16 | *.iml
17 | logs/
18 | *.ser
19 | *.log
20 | *.ipynb_checkpoints
21 | *.pyc
22 | graph-tmp/
23 |
24 | .DS_Store
25 | pom.xml.releaseBackup
26 |
27 | dependency-reduced-pom.xml
28 | *.ser
29 | application.home_IS_UNDEFINEiD
30 | README.md~
31 | *.png
32 | *.bin
33 | *.releaseBackup
34 | *.png
35 | *.bin
36 | *.out
37 | *~
38 | .pydevproject
39 | release.properties
40 | .idea/
41 | *.iml
42 | *.prefs
43 | *.settings/*
44 | model-saver-*
45 | mnist-pretrain-dbn.bin-*
46 | deeplearning4j-scaleout/deeplearning4j-aws/src/main/java/org/deeplearning4j/aws/ec2/provision/Ec2CommandRunner.java.settings/
47 | *.log
48 | .project
49 | .classpath
50 | metastore_db
51 | *.ipynb*
52 | *.html
53 | *.csv
54 | Word2vec-index/
55 | *.conf
56 | *.json
57 |
58 |
--------------------------------------------------------------------------------
/.project:
--------------------------------------------------------------------------------
1 |
2 |
3 | dl4j-rnn-timeseries-examples
4 | Examples of Recurrent Neural Networks training different data sets. NO_M2ECLIPSE_SUPPORT: Project files created with the maven-eclipse-plugin are not supported in M2Eclipse.
5 |
6 |
7 |
8 | org.eclipse.jdt.core.javabuilder
9 |
10 |
11 |
12 | org.eclipse.jdt.core.javanature
13 |
14 |
--------------------------------------------------------------------------------
/.settings/org.eclipse.jdt.core.prefs:
--------------------------------------------------------------------------------
1 | #Tue Jan 26 13:20:07 EST 2016
2 | org.eclipse.jdt.core.compiler.codegen.targetPlatform=1.7
3 | eclipse.preferences.version=1
4 | org.eclipse.jdt.core.compiler.source=1.7
5 | org.eclipse.jdt.core.compiler.compliance=1.7
6 |
--------------------------------------------------------------------------------
/LICENSE:
--------------------------------------------------------------------------------
1 | Apache License
2 | Version 2.0, January 2004
3 | http://www.apache.org/licenses/
4 |
5 | TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
6 |
7 | 1. Definitions.
8 |
9 | "License" shall mean the terms and conditions for use, reproduction,
10 | and distribution as defined by Sections 1 through 9 of this document.
11 |
12 | "Licensor" shall mean the copyright owner or entity authorized by
13 | the copyright owner that is granting the License.
14 |
15 | "Legal Entity" shall mean the union of the acting entity and all
16 | other entities that control, are controlled by, or are under common
17 | control with that entity. For the purposes of this definition,
18 | "control" means (i) the power, direct or indirect, to cause the
19 | direction or management of such entity, whether by contract or
20 | otherwise, or (ii) ownership of fifty percent (50%) or more of the
21 | outstanding shares, or (iii) beneficial ownership of such entity.
22 |
23 | "You" (or "Your") shall mean an individual or Legal Entity
24 | exercising permissions granted by this License.
25 |
26 | "Source" form shall mean the preferred form for making modifications,
27 | including but not limited to software source code, documentation
28 | source, and configuration files.
29 |
30 | "Object" form shall mean any form resulting from mechanical
31 | transformation or translation of a Source form, including but
32 | not limited to compiled object code, generated documentation,
33 | and conversions to other media types.
34 |
35 | "Work" shall mean the work of authorship, whether in Source or
36 | Object form, made available under the License, as indicated by a
37 | copyright notice that is included in or attached to the work
38 | (an example is provided in the Appendix below).
39 |
40 | "Derivative Works" shall mean any work, whether in Source or Object
41 | form, that is based on (or derived from) the Work and for which the
42 | editorial revisions, annotations, elaborations, or other modifications
43 | represent, as a whole, an original work of authorship. For the purposes
44 | of this License, Derivative Works shall not include works that remain
45 | separable from, or merely link (or bind by name) to the interfaces of,
46 | the Work and Derivative Works thereof.
47 |
48 | "Contribution" shall mean any work of authorship, including
49 | the original version of the Work and any modifications or additions
50 | to that Work or Derivative Works thereof, that is intentionally
51 | submitted to Licensor for inclusion in the Work by the copyright owner
52 | or by an individual or Legal Entity authorized to submit on behalf of
53 | the copyright owner. For the purposes of this definition, "submitted"
54 | means any form of electronic, verbal, or written communication sent
55 | to the Licensor or its representatives, including but not limited to
56 | communication on electronic mailing lists, source code control systems,
57 | and issue tracking systems that are managed by, or on behalf of, the
58 | Licensor for the purpose of discussing and improving the Work, but
59 | excluding communication that is conspicuously marked or otherwise
60 | designated in writing by the copyright owner as "Not a Contribution."
61 |
62 | "Contributor" shall mean Licensor and any individual or Legal Entity
63 | on behalf of whom a Contribution has been received by Licensor and
64 | subsequently incorporated within the Work.
65 |
66 | 2. Grant of Copyright License. Subject to the terms and conditions of
67 | this License, each Contributor hereby grants to You a perpetual,
68 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
69 | copyright license to reproduce, prepare Derivative Works of,
70 | publicly display, publicly perform, sublicense, and distribute the
71 | Work and such Derivative Works in Source or Object form.
72 |
73 | 3. Grant of Patent License. Subject to the terms and conditions of
74 | this License, each Contributor hereby grants to You a perpetual,
75 | worldwide, non-exclusive, no-charge, royalty-free, irrevocable
76 | (except as stated in this section) patent license to make, have made,
77 | use, offer to sell, sell, import, and otherwise transfer the Work,
78 | where such license applies only to those patent claims licensable
79 | by such Contributor that are necessarily infringed by their
80 | Contribution(s) alone or by combination of their Contribution(s)
81 | with the Work to which such Contribution(s) was submitted. If You
82 | institute patent litigation against any entity (including a
83 | cross-claim or counterclaim in a lawsuit) alleging that the Work
84 | or a Contribution incorporated within the Work constitutes direct
85 | or contributory patent infringement, then any patent licenses
86 | granted to You under this License for that Work shall terminate
87 | as of the date such litigation is filed.
88 |
89 | 4. Redistribution. You may reproduce and distribute copies of the
90 | Work or Derivative Works thereof in any medium, with or without
91 | modifications, and in Source or Object form, provided that You
92 | meet the following conditions:
93 |
94 | (a) You must give any other recipients of the Work or
95 | Derivative Works a copy of this License; and
96 |
97 | (b) You must cause any modified files to carry prominent notices
98 | stating that You changed the files; and
99 |
100 | (c) You must retain, in the Source form of any Derivative Works
101 | that You distribute, all copyright, patent, trademark, and
102 | attribution notices from the Source form of the Work,
103 | excluding those notices that do not pertain to any part of
104 | the Derivative Works; and
105 |
106 | (d) If the Work includes a "NOTICE" text file as part of its
107 | distribution, then any Derivative Works that You distribute must
108 | include a readable copy of the attribution notices contained
109 | within such NOTICE file, excluding those notices that do not
110 | pertain to any part of the Derivative Works, in at least one
111 | of the following places: within a NOTICE text file distributed
112 | as part of the Derivative Works; within the Source form or
113 | documentation, if provided along with the Derivative Works; or,
114 | within a display generated by the Derivative Works, if and
115 | wherever such third-party notices normally appear. The contents
116 | of the NOTICE file are for informational purposes only and
117 | do not modify the License. You may add Your own attribution
118 | notices within Derivative Works that You distribute, alongside
119 | or as an addendum to the NOTICE text from the Work, provided
120 | that such additional attribution notices cannot be construed
121 | as modifying the License.
122 |
123 | You may add Your own copyright statement to Your modifications and
124 | may provide additional or different license terms and conditions
125 | for use, reproduction, or distribution of Your modifications, or
126 | for any such Derivative Works as a whole, provided Your use,
127 | reproduction, and distribution of the Work otherwise complies with
128 | the conditions stated in this License.
129 |
130 | 5. Submission of Contributions. Unless You explicitly state otherwise,
131 | any Contribution intentionally submitted for inclusion in the Work
132 | by You to the Licensor shall be under the terms and conditions of
133 | this License, without any additional terms or conditions.
134 | Notwithstanding the above, nothing herein shall supersede or modify
135 | the terms of any separate license agreement you may have executed
136 | with Licensor regarding such Contributions.
137 |
138 | 6. Trademarks. This License does not grant permission to use the trade
139 | names, trademarks, service marks, or product names of the Licensor,
140 | except as required for reasonable and customary use in describing the
141 | origin of the Work and reproducing the content of the NOTICE file.
142 |
143 | 7. Disclaimer of Warranty. Unless required by applicable law or
144 | agreed to in writing, Licensor provides the Work (and each
145 | Contributor provides its Contributions) on an "AS IS" BASIS,
146 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
147 | implied, including, without limitation, any warranties or conditions
148 | of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
149 | PARTICULAR PURPOSE. You are solely responsible for determining the
150 | appropriateness of using or redistributing the Work and assume any
151 | risks associated with Your exercise of permissions under this License.
152 |
153 | 8. Limitation of Liability. In no event and under no legal theory,
154 | whether in tort (including negligence), contract, or otherwise,
155 | unless required by applicable law (such as deliberate and grossly
156 | negligent acts) or agreed to in writing, shall any Contributor be
157 | liable to You for damages, including any direct, indirect, special,
158 | incidental, or consequential damages of any character arising as a
159 | result of this License or out of the use or inability to use the
160 | Work (including but not limited to damages for loss of goodwill,
161 | work stoppage, computer failure or malfunction, or any and all
162 | other commercial damages or losses), even if such Contributor
163 | has been advised of the possibility of such damages.
164 |
165 | 9. Accepting Warranty or Additional Liability. While redistributing
166 | the Work or Derivative Works thereof, You may choose to offer,
167 | and charge a fee for, acceptance of support, warranty, indemnity,
168 | or other liability obligations and/or rights consistent with this
169 | License. However, in accepting such obligations, You may act only
170 | on Your own behalf and on Your sole responsibility, not on behalf
171 | of any other Contributor, and only if You agree to indemnify,
172 | defend, and hold each Contributor harmless for any liability
173 | incurred by, or claims asserted against, such Contributor by reason
174 | of your accepting any such warranty or additional liability.
175 |
176 | END OF TERMS AND CONDITIONS
177 |
178 | APPENDIX: How to apply the Apache License to your work.
179 |
180 | To apply the Apache License to your work, attach the following
181 | boilerplate notice, with the fields enclosed by brackets "{}"
182 | replaced with your own identifying information. (Don't include
183 | the brackets!) The text should be enclosed in the appropriate
184 | comment syntax for the file format. We also recommend that a
185 | file or class name and description of purpose be included on the
186 | same "printed page" as the copyright notice for easier
187 | identification within third-party archives.
188 |
189 | Copyright {yyyy} {name of copyright owner}
190 |
191 | Licensed under the Apache License, Version 2.0 (the "License");
192 | you may not use this file except in compliance with the License.
193 | You may obtain a copy of the License at
194 |
195 | http://www.apache.org/licenses/LICENSE-2.0
196 |
197 | Unless required by applicable law or agreed to in writing, software
198 | distributed under the License is distributed on an "AS IS" BASIS,
199 | WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
200 | See the License for the specific language governing permissions and
201 | limitations under the License.
202 |
--------------------------------------------------------------------------------
/README.md:
--------------------------------------------------------------------------------
1 | # dl4j-rnn-timeseries-examples
2 | An example of DL4J's Recurrent Neural Networks applied to timeseries data
3 |
4 | ## Working with the PhysioNet Example
5 |
6 | * git clone the repo
7 | * update the schema as needed - https://github.com/jpatanooga/dl4j-rnn-timeseries-examples/blob/master/src/test/resources/physionet_schema.txt
8 | * the system will automatically download PhysioNet data when it runs
9 | * the outcomes / labels are already included in the repo
10 | * tune the model by changing hyperparameters and running: https://github.com/jpatanooga/dl4j-rnn-timeseries-examples/blob/master/src/main/java/org/deeplearning4j/examples/rnn/strata/physionet/PhysioNet_LSTM_Model.java
11 |
12 | ## ToDo
13 |
14 | * the current state of the PhysioNet LSTM seems to train (avg loss drops, but hyperparameter tuning appreciated)
15 | * we dont yet have the evaluation code complete as of sunday night (main todo as of right now)
16 |
17 | ## PhysioNet Stuff
18 |
19 | * http://physionet.org/challenge/2012/
20 |
21 |
22 |
--------------------------------------------------------------------------------
/pom.xml:
--------------------------------------------------------------------------------
1 |
2 | 4.0.0
3 |
4 | org.deeplearning4j
5 | dl4j-rnn-timeseries-examples
6 | 0.4-rc0-SNAPSHOT
7 |
8 | DeepLearning4j Strata RNN Examples
9 | Examples of Recurrent Neural Networks training different data sets
10 |
11 | 0.4-rc3.8
12 | 0.4-rc3.8
13 | 0.0.0.14
14 | 2.5.1
15 |
16 |
17 |
18 |
19 |
20 |
21 | sonatype-nexus-snapshots
22 | Sonatype Nexus snapshot repository
23 | https://oss.sonatype.org/content/repositories/snapshots
24 |
25 |
26 | nexus-releases
27 | Nexus Release Repository
28 | http://oss.sonatype.org/service/local/staging/deploy/maven2/
29 |
30 |
31 |
32 |
33 |
34 | org.nd4j
35 | nd4j-x86
36 | ${nd4j.version}
37 |
38 |
39 |
40 |
41 |
42 | org.deeplearning4j
43 | deeplearning4j-nlp
44 | ${dl4j.version}
45 |
46 |
47 |
48 | org.deeplearning4j
49 | deeplearning4j-core
50 | ${dl4j.version}
51 |
52 |
53 | org.deeplearning4j
54 | deeplearning4j-ui
55 | ${dl4j.version}
56 |
57 |
58 | com.google.guava
59 | guava
60 | 19.0
61 |
62 |
63 | org.nd4j
64 | nd4j-x86
65 | ${nd4j.version}
66 |
67 |
68 | canova-nd4j-image
69 | org.nd4j
70 | ${canova.version}
71 |
72 |
73 | canova-nd4j-codec
74 | org.nd4j
75 | ${canova.version}
76 |
77 |
78 | com.fasterxml.jackson.dataformat
79 | jackson-dataformat-yaml
80 | ${jackson.version}
81 |
82 |
83 |
84 | org.apache.commons
85 | commons-compress
86 | 1.10
87 |
88 |
89 |
90 |
91 |
92 |
93 |
94 | org.codehaus.mojo
95 | exec-maven-plugin
96 | 1.4.0
97 |
98 |
99 |
100 | exec
101 |
102 |
103 |
104 |
105 | java
106 |
107 |
108 |
109 | org.apache.maven.plugins
110 | maven-shade-plugin
111 | 1.6
112 |
113 | true
114 |
115 |
116 | *:*
117 |
118 | org/datanucleus/**
119 | META-INF/*.SF
120 | META-INF/*.DSA
121 | META-INF/*.RSA
122 |
123 |
124 |
125 |
126 |
127 |
128 | package
129 |
130 | shade
131 |
132 |
133 |
134 |
135 | reference.conf
136 |
137 |
138 |
139 |
140 |
141 |
142 |
143 |
144 |
145 |
146 |
147 | org.apache.maven.plugins
148 | maven-compiler-plugin
149 |
150 | 1.7
151 | 1.7
152 |
153 |
154 |
155 |
156 |
157 |
158 |
159 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/shakespeare/CharacterIterator.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.shakespeare;
2 |
3 | import java.io.File;
4 | import java.io.IOException;
5 | import java.nio.charset.Charset;
6 | import java.nio.file.Files;
7 | import java.util.Arrays;
8 | import java.util.HashMap;
9 | import java.util.LinkedList;
10 | import java.util.List;
11 | import java.util.Map;
12 | import java.util.NoSuchElementException;
13 | import java.util.Random;
14 |
15 | import org.deeplearning4j.datasets.iterator.DataSetIterator;
16 | import org.nd4j.linalg.api.ndarray.INDArray;
17 | import org.nd4j.linalg.dataset.DataSet;
18 | import org.nd4j.linalg.dataset.api.DataSetPreProcessor;
19 | import org.nd4j.linalg.factory.Nd4j;
20 |
21 | /** A very simple DataSetIterator for use in the GravesLSTMCharModellingExample.
22 | * Given a text file and a few options, generate feature vectors and labels for training,
23 | * where we want to predict the next character in the sequence.
24 | * This is done by randomly choosing a position in the text file to start the sequence and
25 | * (optionally) scanning backwards to a new line (to ensure we don't start half way through a word
26 | * for example).
27 | * Feature vectors and labels are both one-hot vectors of same length
28 | * @author Alex Black
29 | */
30 | public class CharacterIterator implements DataSetIterator {
31 | private static final long serialVersionUID = -7287833919126626356L;
32 | private static final int MAX_SCAN_LENGTH = 200;
33 | private char[] validCharacters;
34 | private Map charToIdxMap;
35 | private char[] fileCharacters;
36 | private int exampleLength;
37 | private int miniBatchSize;
38 | private int numExamplesToFetch;
39 | private int examplesSoFar = 0;
40 | private Random rng;
41 | private final int numCharacters;
42 | private final boolean alwaysStartAtNewLine;
43 |
44 | public CharacterIterator(String path, int miniBatchSize, int exampleSize, int numExamplesToFetch ) throws IOException {
45 | this(path,Charset.defaultCharset(),miniBatchSize,exampleSize,numExamplesToFetch,getDefaultCharacterSet(), new Random(),true);
46 | }
47 |
48 | /**
49 | * @param textFilePath Path to text file to use for generating samples
50 | * @param textFileEncoding Encoding of the text file. Can try Charset.defaultCharset()
51 | * @param miniBatchSize Number of examples per mini-batch
52 | * @param exampleLength Number of characters in each input/output vector
53 | * @param numExamplesToFetch Total number of examples to fetch (must be multiple of miniBatchSize). Used in hasNext() etc methods
54 | * @param validCharacters Character array of valid characters. Characters not present in this array will be removed
55 | * @param rng Random number generator, for repeatability if required
56 | * @param alwaysStartAtNewLine if true, scan backwards until we find a new line character (up to MAX_SCAN_LENGTH in case
57 | * of no new line characters, to avoid scanning entire file)
58 | * @throws IOException If text file cannot be loaded
59 | */
60 | public CharacterIterator(String textFilePath, Charset textFileEncoding, int miniBatchSize, int exampleLength,
61 | int numExamplesToFetch, char[] validCharacters, Random rng, boolean alwaysStartAtNewLine ) throws IOException {
62 | if( !new File(textFilePath).exists()) throw new IOException("Could not access file (does not exist): " + textFilePath);
63 | if(numExamplesToFetch % miniBatchSize != 0 ) throw new IllegalArgumentException("numExamplesToFetch must be a multiple of miniBatchSize");
64 | if( miniBatchSize <= 0 ) throw new IllegalArgumentException("Invalid miniBatchSize (must be >0)");
65 | this.validCharacters = validCharacters;
66 | this.exampleLength = exampleLength;
67 | this.miniBatchSize = miniBatchSize;
68 | this.numExamplesToFetch = numExamplesToFetch;
69 | this.rng = rng;
70 | this.alwaysStartAtNewLine = alwaysStartAtNewLine;
71 |
72 | //Store valid characters is a map for later use in vectorization
73 | charToIdxMap = new HashMap<>();
74 | for( int i=0; i lines = Files.readAllLines(new File(textFilePath).toPath(),textFileEncoding);
80 | int maxSize = lines.size(); //add lines.size() to account for newline characters at end of each line
81 | for( String s : lines ) maxSize += s.length();
82 | char[] characters = new char[maxSize];
83 | int currIdx = 0;
84 | for( String s : lines ){
85 | char[] thisLine = s.toCharArray();
86 | for( int i=0; i= fileCharacters.length ) throw new IllegalArgumentException("exampleLength="+exampleLength
99 | +" cannot exceed number of valid characters in file ("+fileCharacters.length+")");
100 |
101 | int nRemoved = maxSize - fileCharacters.length;
102 | System.out.println("Loaded and converted file: " + fileCharacters.length + " valid characters of "
103 | + maxSize + " total characters (" + nRemoved + " removed)");
104 | }
105 |
106 | /** A minimal character set, with a-z, A-Z, 0-9 and common punctuation etc */
107 | public static char[] getMinimalCharacterSet(){
108 | List validChars = new LinkedList<>();
109 | for(char c='a'; c<='z'; c++) validChars.add(c);
110 | for(char c='A'; c<='Z'; c++) validChars.add(c);
111 | for(char c='0'; c<='9'; c++) validChars.add(c);
112 | char[] temp = {'!', '&', '(', ')', '?', '-', '\'', '"', ',', '.', ':', ';', ' ', '\n', '\t'};
113 | for( char c : temp ) validChars.add(c);
114 | char[] out = new char[validChars.size()];
115 | int i=0;
116 | for( Character c : validChars ) out[i++] = c;
117 | return out;
118 | }
119 |
120 | /** As per getMinimalCharacterSet(), but with a few extra characters */
121 | public static char[] getDefaultCharacterSet(){
122 | List validChars = new LinkedList<>();
123 | for(char c : getMinimalCharacterSet() ) validChars.add(c);
124 | char[] additionalChars = {'@', '#', '$', '%', '^', '*', '{', '}', '[', ']', '/', '+', '_',
125 | '\\', '|', '<', '>'};
126 | for( char c : additionalChars ) validChars.add(c);
127 | char[] out = new char[validChars.size()];
128 | int i=0;
129 | for( Character c : validChars ) out[i++] = c;
130 | return out;
131 | }
132 |
133 | public char convertIndexToCharacter( int idx ){
134 | return validCharacters[idx];
135 | }
136 |
137 | public int convertCharacterToIndex( char c ){
138 | return charToIdxMap.get(c);
139 | }
140 |
141 | public char getRandomCharacter(){
142 | return validCharacters[(int) (rng.nextDouble()*validCharacters.length)];
143 | }
144 |
145 | public boolean hasNext() {
146 | return examplesSoFar + miniBatchSize <= numExamplesToFetch;
147 | }
148 |
149 | public DataSet next() {
150 | return next(miniBatchSize);
151 | }
152 |
153 | public DataSet next(int num) {
154 | if( examplesSoFar+num > numExamplesToFetch ) throw new NoSuchElementException();
155 | //Allocate space:
156 | INDArray input = Nd4j.zeros(new int[]{num,numCharacters,exampleLength});
157 | INDArray labels = Nd4j.zeros(new int[]{num,numCharacters,exampleLength});
158 |
159 | int maxStartIdx = fileCharacters.length - exampleLength;
160 |
161 | //Randomly select a subset of the file. No attempt is made to avoid overlapping subsets
162 | // of the file in the same minibatch
163 | for( int i=0; i= 1 && fileCharacters[startIdx-1] != '\n' && scanLength++ < MAX_SCAN_LENGTH ){
169 | startIdx--;
170 | endIdx--;
171 | }
172 | }
173 |
174 | int currCharIdx = charToIdxMap.get(fileCharacters[startIdx]); //Current input
175 | int c=0;
176 | for( int j=startIdx+1; j<=endIdx; j++, c++ ){
177 | int nextCharIdx = charToIdxMap.get(fileCharacters[j]); //Next character to predict
178 | input.putScalar(new int[]{i,currCharIdx,c}, 1.0);
179 | labels.putScalar(new int[]{i,nextCharIdx,c}, 1.0);
180 | currCharIdx = nextCharIdx;
181 | }
182 | }
183 |
184 | examplesSoFar += num;
185 | return new DataSet(input,labels);
186 | }
187 |
188 | public int totalExamples() {
189 | return numExamplesToFetch;
190 | }
191 |
192 | public int inputColumns() {
193 | return numCharacters;
194 | }
195 |
196 | public int totalOutcomes() {
197 | return numCharacters;
198 | }
199 |
200 | public void reset() {
201 | examplesSoFar = 0;
202 | }
203 |
204 | public int batch() {
205 | return miniBatchSize;
206 | }
207 |
208 | public int cursor() {
209 | return examplesSoFar;
210 | }
211 |
212 | public int numExamples() {
213 | return numExamplesToFetch;
214 | }
215 |
216 | public void setPreProcessor(DataSetPreProcessor preProcessor) {
217 | throw new UnsupportedOperationException("Not implemented");
218 | }
219 |
220 | @Override
221 | public List getLabels() {
222 | throw new UnsupportedOperationException("Not implemented");
223 | }
224 |
225 | @Override
226 | public void remove() {
227 | throw new UnsupportedOperationException();
228 | }
229 | }
230 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/shakespeare/GravesLSTMCharModellingExample.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.shakespeare;
2 |
3 |
4 | import java.io.File;
5 | import java.io.IOException;
6 | import java.net.URL;
7 | import java.nio.charset.Charset;
8 | import java.util.Random;
9 |
10 | import org.apache.commons.io.FileUtils;
11 | import org.deeplearning4j.nn.api.Layer;
12 | import org.deeplearning4j.nn.api.OptimizationAlgorithm;
13 | import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
14 | import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
15 | import org.deeplearning4j.nn.conf.Updater;
16 | import org.deeplearning4j.nn.conf.distribution.UniformDistribution;
17 | import org.deeplearning4j.nn.conf.layers.GravesLSTM;
18 | import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
19 | import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
20 | import org.deeplearning4j.nn.weights.WeightInit;
21 | import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
22 | import org.nd4j.linalg.api.ndarray.INDArray;
23 | import org.nd4j.linalg.factory.Nd4j;
24 | import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction;
25 |
26 | /**GravesLSTM Character modelling example
27 | * @author Alex Black
28 |
29 | Example: Train a LSTM RNN to generates text, one character at a time.
30 | This example is somewhat inspired by Andrej Karpathy's blog post,
31 | "The Unreasonable Effectiveness of Recurrent Neural Networks"
32 | http://karpathy.github.io/2015/05/21/rnn-effectiveness/
33 |
34 | Note that this example has not been well tuned - better performance is likely possible with better hyperparameters
35 |
36 | Some differences between this example and Karpathy's work:
37 | - The LSTM architectures appear to differ somewhat. GravesLSTM has peephole connections that
38 | Karpathy's char-rnn implementation appears to lack. See GravesLSTM javadoc for details.
39 | There are pros and cons to both architectures (addition of peephole connections is a more powerful
40 | model but has more parameters per unit), though they are not radically different in practice.
41 | - Karpathy uses truncated backpropagation through time (BPTT) on full character
42 | sequences, whereas this example uses standard (non-truncated) BPTT on partial/subset sequences.
43 | Truncated BPTT is probably the preferred method of training for this sort of problem, and is configurable
44 | using the .backpropType(BackpropType.TruncatedBPTT).tBPTTForwardLength().tBPTTBackwardLength() options
45 |
46 | This example is set up to train on the Complete Works of William Shakespeare, downloaded
47 | from Project Gutenberg. Training on other text sources should be relatively easy to implement.
48 | */
49 | public class GravesLSTMCharModellingExample {
50 | public static void main( String[] args ) throws Exception {
51 | int lstmLayerSize = 200; //Number of units in each GravesLSTM layer
52 | int miniBatchSize = 32; //Size of mini batch to use when training
53 | int examplesPerEpoch = 50 * miniBatchSize; //i.e., how many examples to learn on between generating samples
54 | int exampleLength = 100; //Length of each training example
55 | int numEpochs = 30; //Total number of training + sample generation epochs
56 | int nSamplesToGenerate = 4; //Number of samples to generate after each training epoch
57 | int nCharactersToSample = 300; //Length of each sample to generate
58 | String generationInitialization = null; //Optional character initialization; a random character is used if null
59 | // Above is Used to 'prime' the LSTM with a character sequence to continue/complete.
60 | // Initialization characters must all be in CharacterIterator.getMinimalCharacterSet() by default
61 | Random rng = new Random(12345);
62 |
63 | //Get a DataSetIterator that handles vectorization of text into something we can use to train
64 | // our GravesLSTM network.
65 | CharacterIterator iter = getShakespeareIterator(miniBatchSize,exampleLength,examplesPerEpoch);
66 | int nOut = iter.totalOutcomes();
67 |
68 | //Set up network configuration:
69 | MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
70 | .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
71 | .learningRate(0.1)
72 | .rmsDecay(0.95)
73 | .seed(12345)
74 | .regularization(true)
75 | .l2(0.001)
76 | .list(3)
77 | .layer(0, new GravesLSTM.Builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
78 | .updater(Updater.RMSPROP)
79 | .activation("tanh").weightInit(WeightInit.DISTRIBUTION)
80 | .dist(new UniformDistribution(-0.08, 0.08)).build())
81 | .layer(1, new GravesLSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
82 | .updater(Updater.RMSPROP)
83 | .activation("tanh").weightInit(WeightInit.DISTRIBUTION)
84 | .dist(new UniformDistribution(-0.08, 0.08)).build())
85 | .layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation("softmax") //MCXENT + softmax for classification
86 | .updater(Updater.RMSPROP)
87 | .nIn(lstmLayerSize).nOut(nOut).weightInit(WeightInit.DISTRIBUTION)
88 | .dist(new UniformDistribution(-0.08, 0.08)).build())
89 | .pretrain(false).backprop(true)
90 | .build();
91 |
92 | MultiLayerNetwork net = new MultiLayerNetwork(conf);
93 | net.init();
94 | net.setListeners(new ScoreIterationListener(1));
95 |
96 | //Print the number of parameters in the network (and for each layer)
97 | Layer[] layers = net.getLayers();
98 | int totalNumParams = 0;
99 | for( int i=0; i
156 | * Note that the initalization is used for all samples
157 | * @param initialization String, may be null. If null, select a random character as initialization for all samples
158 | * @param charactersToSample Number of characters to sample from network (excluding initialization)
159 | * @param net MultiLayerNetwork with one or more GravesLSTM/RNN layers and a softmax output layer
160 | * @param iter CharacterIterator. Used for going from indexes back to characters
161 | */
162 | private static String[] sampleCharactersFromNetwork( String initialization, MultiLayerNetwork net,
163 | CharacterIterator iter, Random rng, int charactersToSample, int numSamples ){
164 | //Set up initialization. If no initialization: use a random character
165 | if( initialization == null ){
166 | initialization = String.valueOf(iter.getRandomCharacter());
167 | }
168 |
169 | //Create input for initialization
170 | INDArray initializationInput = Nd4j.zeros(numSamples, iter.inputColumns(), initialization.length());
171 | char[] init = initialization.toCharArray();
172 | for( int i=0; i Map< ColName, ColValue >
20 | // Cols per patient: { survival_days, length_stay_days, in_hospital_death }
21 | public Map> physioNetLabels = new LinkedHashMap<>();
22 |
23 | public PhysioNetLabels() {
24 |
25 |
26 |
27 | }
28 |
29 | public void load(String labels_path) {
30 |
31 | this.path = labels_path;
32 |
33 | // read file into hash map
34 |
35 | String csvLine = "";
36 | int labelCount = 0;
37 |
38 |
39 | try (BufferedReader br = new BufferedReader(new FileReader( this.path ) ) ) {
40 |
41 | // bleed off the header line
42 | csvLine = br.readLine();
43 |
44 | //Map timeStepMap = new LinkedHashMap<>();
45 |
46 | while ((csvLine = br.readLine()) != null) {
47 | // process the line.
48 |
49 | // open the file
50 | //String csvLine = value.toString();
51 | String[] columns = csvLine.split( this.columnDelimiter );
52 |
53 | String patientID = columns[ 0 ];
54 | int survival_days = Integer.parseInt( columns[ 4 ] );
55 | int length_of_stay_days = Integer.parseInt( columns[ 3 ] );
56 | int in_hospital_death = Integer.parseInt( columns[ 5 ] );
57 |
58 | // Cols per patient: { survival_days, length_stay_days, in_hospital_death }
59 | Map patientMap = new HashMap<>();
60 | patientMap.put("survival_days", survival_days);
61 | patientMap.put("length_stay_days", length_of_stay_days);
62 | patientMap.put("in_hospital_death", in_hospital_death);
63 |
64 | this.physioNetLabels.put(patientID, patientMap);
65 |
66 | if (this.translateLabelEntry(patientID) == 1) {
67 | this.survivedLabelCount++;
68 | } else {
69 | this.diedLabelCount++;
70 | }
71 |
72 | labelCount++;
73 | }
74 |
75 | } catch (IOException e) {
76 | // TODO Auto-generated catch block
77 | e.printStackTrace();
78 | }
79 |
80 | if (4000 != labelCount) {
81 | System.err.println( "There were not 4000 labels in the file! (" + labelCount + ")" );
82 | }
83 |
84 | System.out.println( "Print Label Stats -------- " );
85 | System.out.println( "Survived: " + this.survivedLabelCount );
86 | System.out.println( "Died: " + this.diedLabelCount + "\n" );
87 |
88 | }
89 |
90 | public Map getPatientData( String patientID ) {
91 |
92 | return this.physioNetLabels.get(patientID);
93 |
94 | }
95 |
96 | /**
97 | * Is this is non-survival outcome (0 value) class?
98 | * or is it a survival outcome (1 value) class?
99 | *
100 | Given these definitions and constraints,
101 | Survival > Length of stay ⇒ Survivor
102 | Survival = -1 ⇒ Survivor
103 | 2 ≤ Survival ≤ Length of stay ⇒ In-hospital death
104 | *
105 | * @return
106 | */
107 | public int translateLabelEntry(String patientID) {
108 |
109 | Map patientMap = this.physioNetLabels.get(patientID);
110 |
111 | // Cols per patient: { survival_days, length_stay_days, in_hospital_death }
112 |
113 | if ( patientMap.get("survival_days") > patientMap.get("length_stay_days") ) {
114 |
115 | return 1;
116 |
117 | }
118 |
119 | if ( patientMap.get("survival_days") == -1 ) {
120 |
121 | return 1;
122 |
123 | }
124 |
125 | if ( 2 <= patientMap.get("survival_days") && patientMap.get("survival_days") <= patientMap.get("length_stay_days") ) {
126 |
127 | return 0;
128 |
129 | }
130 |
131 |
132 |
133 | return 0;
134 | }
135 |
136 |
137 | }
138 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/strata/physionet/PhysioNet_ICU_Mortality_Iterator.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.strata.physionet;
2 |
3 | import static org.junit.Assert.assertEquals;
4 |
5 | import java.io.File;
6 | import java.io.IOException;
7 | import java.nio.charset.Charset;
8 | import java.nio.file.Files;
9 | import java.util.Arrays;
10 | import java.util.HashMap;
11 | import java.util.LinkedList;
12 | import java.util.List;
13 | import java.util.Map;
14 | import java.util.NoSuchElementException;
15 | import java.util.Random;
16 |
17 | import org.deeplearning4j.datasets.iterator.DataSetIterator;
18 | import org.nd4j.linalg.dataset.DataSet;
19 | import org.nd4j.linalg.dataset.api.DataSetPreProcessor;
20 |
21 | public class PhysioNet_ICU_Mortality_Iterator implements DataSetIterator {
22 | //private static final long serialVersionUID = -7287833919126626356L;
23 | //private static final int MAX_SCAN_LENGTH = 200;
24 | //private char[] validCharacters;
25 | //private Map charToIdxMap;
26 | // private char[] fileCharacters;
27 | // private int exampleLength;
28 |
29 | private int miniBatchSize;
30 | private int currentFileListIndex = 0;
31 | private int totalExamples = 0;
32 |
33 | //private int numExamplesToFetch;
34 | //private int examplesSoFar = 0;
35 | //private Random rng;
36 | //private final int numCharacters;
37 | //private final boolean alwaysStartAtNewLine;
38 |
39 | String datasetInputPath = "";
40 | String datasetSchemaPath = "";
41 | String datasetLabelsPath = "";
42 | public PhysioNet_Vectorizer vectorizer = null;
43 |
44 | public PhysioNet_ICU_Mortality_Iterator(String dataInputPath, String datasetSchemaPath, String datasetLabels, int miniBatchSize, int totalExamples ) throws IOException {
45 | // this(path,Charset.defaultCharset(),miniBatchSize,exampleSize,numExamplesToFetch,getDefaultCharacterSet(), new Random(),true);
46 | //this.numCharacters = 0; // fix
47 |
48 | this.datasetInputPath = dataInputPath;
49 | this.datasetSchemaPath = datasetSchemaPath;
50 | this.datasetLabelsPath = datasetLabels;
51 |
52 | this.vectorizer = new PhysioNet_Vectorizer(this.datasetInputPath, this.datasetSchemaPath, this.datasetLabelsPath );
53 | this.vectorizer.loadSchema();
54 | this.vectorizer.loadLabels();
55 |
56 | this.vectorizer.setSpecialStatisticsFileList("/tmp/set-a/");
57 | // this.vectorizer.setupBalancedSubset( totalExamples );
58 |
59 | this.vectorizer.collectStatistics();
60 |
61 | this.miniBatchSize = miniBatchSize;
62 | this.totalExamples = totalExamples;
63 |
64 | }
65 |
66 |
67 | public boolean hasNext() {
68 | return currentFileListIndex + miniBatchSize <= this.totalExamples;
69 | }
70 |
71 | public DataSet next() {
72 | return next(miniBatchSize);
73 | }
74 |
75 | /**
76 | * TODO: Cut here ----------------------------------
77 | *
78 | * Dimensions of input
79 | * x: miniBatchSize
80 | * y: every column we want to look at per timestep (basically our traditional vector)
81 | * z: the timestep value
82 | *
83 | */
84 | public DataSet next(int miniBatchSize) {
85 |
86 | /*
87 |
88 | if( examplesSoFar + miniBatchSize > numExamplesToFetch ) throw new NoSuchElementException();
89 | //Allocate space:
90 | INDArray input = null; //Nd4j.zeros(new int[]{num,numCharacters,exampleLength});
91 | INDArray labels = null; //Nd4j.zeros(new int[]{num,numCharacters,exampleLength});
92 |
93 | int maxStartIdx = fileCharacters.length - exampleLength;
94 |
95 | //Randomly select a subset of the file. No attempt is made to avoid overlapping subsets
96 | // of the file in the same minibatch
97 | for( int i=0; i < miniBatchSize; i++ ){
98 | int startIdx = (int) (rng.nextDouble()*maxStartIdx);
99 | int endIdx = startIdx + exampleLength;
100 | int scanLength = 0;
101 |
102 | int currCharIdx = charToIdxMap.get(fileCharacters[startIdx]); //Current input
103 | int c=0;
104 | for( int j=startIdx+1; j<=endIdx; j++, c++ ){
105 | int nextCharIdx = charToIdxMap.get(fileCharacters[j]); //Next character to predict
106 | input.putScalar(new int[]{i,currCharIdx,c}, 1.0);
107 | labels.putScalar(new int[]{i,nextCharIdx,c}, 1.0);
108 | currCharIdx = nextCharIdx;
109 | }
110 | }
111 |
112 | examplesSoFar += miniBatchSize;
113 | return new DataSet(input,labels);
114 | */
115 |
116 | //int miniBatchSize = 50;
117 | int columnCount = 0;
118 |
119 |
120 | columnCount = (this.vectorizer.schema.getTransformedVectorSize() + 1);
121 |
122 |
123 |
124 |
125 |
126 |
127 | // vec.schema.debugPrintDatasetStatistics();
128 | /*
129 | System.out.println( "Max Timesteps: " + this.vectorizer.maxNumberTimeSteps );
130 |
131 | System.out.println( "ND4J Input Size: " );
132 | System.out.println( "Minibatch: " + miniBatchSize );
133 | System.out.println( "Column Count: " + columnCount );
134 | System.out.println( "Timestep Count: " + this.vectorizer.maxNumberTimeSteps );
135 | */
136 |
137 | //int currentOffset = 0;
138 |
139 | // for ( int index = 0; index < this.vectorizer.listOfFilesToVectorize.length; index += miniBatchSize) {
140 |
141 | // System.out.println( "\n\n ------------- Mini-batch offset: " + this.currentFileListIndex + " -----------------\n" );
142 | DataSet d = this.vectorizer.generateNextTimeseriesVectorMiniBatch( miniBatchSize, this.currentFileListIndex, columnCount );
143 | this.currentFileListIndex += miniBatchSize;
144 |
145 | // }
146 |
147 |
148 |
149 | return d;
150 |
151 | }
152 |
153 | public int totalExamples() {
154 | return this.currentFileListIndex;
155 | }
156 |
157 | public int inputColumns() {
158 | return this.vectorizer.schema.getTransformedVectorSize() + 1;
159 | }
160 |
161 | public int totalOutcomes() {
162 | return 2;
163 | }
164 |
165 | public void reset() {
166 | this.currentFileListIndex = 0;
167 | }
168 |
169 | public int batch() {
170 | return miniBatchSize;
171 | }
172 |
173 | public int cursor() {
174 | return this.currentFileListIndex;
175 | }
176 |
177 | public int numExamples() {
178 | return this.totalExamples;
179 | }
180 |
181 | public void setPreProcessor(DataSetPreProcessor preProcessor) {
182 | throw new UnsupportedOperationException("Not implemented");
183 | }
184 |
185 | @Override
186 | public List getLabels() {
187 | throw new UnsupportedOperationException("Not implemented");
188 | }
189 |
190 | @Override
191 | public void remove() {
192 | throw new UnsupportedOperationException();
193 | }
194 | }
195 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/strata/physionet/PhysioNet_Model_Evaluation.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.strata.physionet;
2 |
3 | import java.io.IOException;
4 | import java.text.SimpleDateFormat;
5 | import java.util.Random;
6 |
7 | import org.deeplearning4j.eval.Evaluation;
8 | import org.deeplearning4j.examples.rnn.strata.physionet.utils.PhysioNetDataUtils;
9 | import org.deeplearning4j.nn.api.OptimizationAlgorithm;
10 | import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
11 | import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
12 | import org.deeplearning4j.nn.conf.Updater;
13 | import org.deeplearning4j.nn.conf.distribution.UniformDistribution;
14 | import org.deeplearning4j.nn.conf.layers.GravesLSTM;
15 | import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
16 | import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
17 | import org.deeplearning4j.nn.weights.WeightInit;
18 | import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
19 | import org.nd4j.linalg.api.ndarray.INDArray;
20 | import org.nd4j.linalg.dataset.DataSet;
21 | import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction;
22 |
23 | public class PhysioNet_Model_Evaluation {
24 |
25 | public static void main( String[] args ) throws Exception {
26 |
27 | evaluateExistingModel();
28 |
29 | }
30 |
31 | public static void evaluateExistingModel() throws IOException {
32 |
33 | String modelPath = "/tmp/rnns/physionet/models/dl4j_model_run_2016-03-20_17_21_08/epoch_9_f1_0.8684/";
34 |
35 | int lstmLayerSize = 300; //Number of units in each GravesLSTM layer
36 | int miniBatchSize = 20; //Size of mini batch to use when training
37 | //int totalExamplesToTrainWith = 1100;
38 |
39 | int trainingExamples = 2800;
40 | int testExamples = 600;
41 | int validateExamples = 600;
42 |
43 | double learningRate = 0.009;
44 |
45 | int numEpochs = 10; //Total number of training + sample generation epochs
46 | Random rng = new Random(12345);
47 | SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd_HH_mm_ss");
48 |
49 | int nOut = 2; //iter.totalOutcomes();
50 |
51 | //PhysioNet_ICU_Mortality_Iterator iter = getPhysioNetIterator( miniBatchSize, totalExamplesToTrainWith );
52 |
53 | PhysioNet_ICU_Mortality_Iterator iter = new PhysioNet_ICU_Mortality_Iterator( "/tmp/set-a-full-splits-1/train/", "src/test/resources/physionet_schema_zmzuv_0.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt", miniBatchSize, trainingExamples);
54 |
55 |
56 | PhysioNet_ICU_Mortality_Iterator iter_validate = new PhysioNet_ICU_Mortality_Iterator( "/tmp/set-a-full-splits-1/validate/", "src/test/resources/physionet_schema_zmzuv_0.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt", validateExamples, validateExamples);
57 |
58 | // PhysioNet_ICU_Mortality_Iterator test_iter = getPhysioNetIterator( miniBatchSize, 100 );
59 |
60 | //PhysioNet_ICU_Mortality_Iterator test_iter = new PhysioNet_ICU_Mortality_Iterator( "/tmp/set-a-balanced-5/test/", "src/test/resources/physionet_schema.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt", miniBatchSize, 20);
61 | PhysioNet_ICU_Mortality_Iterator test_iter = new PhysioNet_ICU_Mortality_Iterator( "/tmp/set-a-full-splits-1/test/", "src/test/resources/physionet_schema_zmzuv_0.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt", testExamples, testExamples);
62 |
63 | iter.reset();
64 | test_iter.reset();
65 | iter_validate.reset();
66 |
67 | System.out.println( "We have " + iter.inputColumns() + " input columns." );
68 |
69 | // *****************************
70 | // TODO: Drop:
71 | /*
72 | dropout for rnns is applied on the input activations only, not recurrent activations
73 | as is common in the literature
74 | same as other layers
75 | so .dropout(0.5) with .regularization(true)
76 | */
77 |
78 | //Set up network configuration:
79 | MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
80 | .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
81 | .learningRate( learningRate )
82 | .rmsDecay(0.95)
83 | .seed(12345)
84 | .regularization(true)
85 | .l2(0.001)
86 | //.dropOut(0.5)
87 | .list(3)
88 | .layer(0, new GravesLSTM.Builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
89 | .updater(Updater.RMSPROP)
90 | .activation("tanh").weightInit(WeightInit.DISTRIBUTION)
91 | .dist(new UniformDistribution(-0.08, 0.08)).build())
92 | .layer(1, new GravesLSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
93 | .updater(Updater.RMSPROP)
94 | .activation("tanh").weightInit(WeightInit.DISTRIBUTION)
95 | .dist(new UniformDistribution(-0.08, 0.08)).build())
96 | .layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation("softmax") //MCXENT + softmax for classification
97 | .updater(Updater.RMSPROP)
98 | .nIn(lstmLayerSize).nOut(nOut).weightInit(WeightInit.DISTRIBUTION)
99 | .dist(new UniformDistribution(-0.08, 0.08)).build())
100 | .pretrain(false).backprop(true)
101 | .build();
102 |
103 |
104 |
105 |
106 |
107 | MultiLayerNetwork net = new MultiLayerNetwork(conf);
108 | net.init();
109 | net.setListeners(new ScoreIterationListener(1));
110 |
111 | // DAVE: UNCOMMENT HERE AND REPLACE DIRS TO RESUME TRAINING...
112 | // System.out.println( "Loading old parameters [test] >> " );
113 | PhysioNetDataUtils.loadDL4JNetworkParameters( net, modelPath );
114 |
115 |
116 |
117 | iter_validate.reset();
118 |
119 | Evaluation evaluation_validate = new Evaluation(2);
120 | while(iter_validate.hasNext()){
121 | DataSet t = iter_validate.next();
122 | INDArray features = t.getFeatureMatrix();
123 | INDArray lables = t.getLabels();
124 | INDArray inMask = t.getFeaturesMaskArray();
125 | INDArray outMask = t.getLabelsMaskArray();
126 | INDArray predicted = net.output(features,false,inMask,outMask);
127 |
128 | evaluation_validate.evalTimeSeries(lables,predicted,outMask);
129 |
130 | }
131 | System.out.println( "\nParameter Load --- Pre Check: Validate Evaluation: ");
132 | System.out.println( evaluation_validate.stats() );
133 |
134 |
135 | Evaluation evaluation_final_test = new Evaluation(2);
136 | while(test_iter.hasNext()){
137 | DataSet t = test_iter.next();
138 | INDArray features = t.getFeatureMatrix();
139 | INDArray lables = t.getLabels();
140 | INDArray inMask = t.getFeaturesMaskArray();
141 | INDArray outMask = t.getLabelsMaskArray();
142 | INDArray predicted = net.output(features,false,inMask,outMask);
143 | /*
144 | System.out.println("predicted: ");
145 | System.out.println( predicted.getRow(0) );
146 | System.out.println("label: ");
147 | System.out.println( lables.getRow(0) );
148 | */
149 |
150 | evaluation_final_test.evalTimeSeries(lables,predicted,outMask);
151 |
152 | }
153 | //test_iter.reset();
154 | System.out.println( "\n\n\nFinal Test Evaluation: ");
155 | System.out.println( evaluation_final_test.stats() );
156 |
157 |
158 | }
159 |
160 | }
161 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/strata/physionet/output/single/PhysioNet_ICU_SingleLabel_Iterator.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.strata.physionet.output.single;
2 |
3 | import java.io.IOException;
4 | import java.util.List;
5 |
6 | import org.deeplearning4j.datasets.iterator.DataSetIterator;
7 | import org.deeplearning4j.examples.rnn.strata.physionet.output.single.PhysioNet_Vectorizer;
8 | import org.nd4j.linalg.dataset.DataSet;
9 | import org.nd4j.linalg.dataset.api.DataSetPreProcessor;
10 |
11 | public class PhysioNet_ICU_SingleLabel_Iterator implements DataSetIterator {
12 | //private static final long serialVersionUID = -7287833919126626356L;
13 | //private static final int MAX_SCAN_LENGTH = 200;
14 | //private char[] validCharacters;
15 | //private Map charToIdxMap;
16 | // private char[] fileCharacters;
17 | // private int exampleLength;
18 |
19 | private int miniBatchSize;
20 | private int currentFileListIndex = 0;
21 | private int totalExamples = 0;
22 |
23 |
24 | String datasetInputPath = "";
25 | String datasetSchemaPath = "";
26 | String datasetLabelsPath = "";
27 | public PhysioNet_Vectorizer vectorizer = null;
28 |
29 | public PhysioNet_ICU_SingleLabel_Iterator(String dataInputPath, String datasetSchemaPath, String datasetLabels, int miniBatchSize, int totalExamples ) throws IOException {
30 |
31 | this.datasetInputPath = dataInputPath;
32 | this.datasetSchemaPath = datasetSchemaPath;
33 | this.datasetLabelsPath = datasetLabels;
34 |
35 | this.vectorizer = new PhysioNet_Vectorizer(this.datasetInputPath, this.datasetSchemaPath, this.datasetLabelsPath );
36 | this.vectorizer.loadSchema();
37 | this.vectorizer.loadLabels();
38 |
39 | this.vectorizer.setSpecialStatisticsFileList("/tmp/set-a/");
40 | // this.vectorizer.setupBalancedSubset( totalExamples );
41 |
42 | this.vectorizer.collectStatistics();
43 |
44 | this.miniBatchSize = miniBatchSize;
45 | this.totalExamples = totalExamples;
46 |
47 | }
48 |
49 |
50 | public boolean hasNext() {
51 | return currentFileListIndex + miniBatchSize <= this.totalExamples;
52 | }
53 |
54 | public DataSet next() {
55 | return next(miniBatchSize);
56 | }
57 |
58 | /**
59 | * TODO: Cut here ----------------------------------
60 | *
61 | * Dimensions of input
62 | * x: miniBatchSize
63 | * y: every column we want to look at per timestep (basically our traditional vector)
64 | * z: the timestep value
65 | *
66 | */
67 | public DataSet next(int miniBatchSize) {
68 |
69 | /*
70 |
71 | if( examplesSoFar + miniBatchSize > numExamplesToFetch ) throw new NoSuchElementException();
72 | //Allocate space:
73 | INDArray input = null; //Nd4j.zeros(new int[]{num,numCharacters,exampleLength});
74 | INDArray labels = null; //Nd4j.zeros(new int[]{num,numCharacters,exampleLength});
75 |
76 | int maxStartIdx = fileCharacters.length - exampleLength;
77 |
78 | //Randomly select a subset of the file. No attempt is made to avoid overlapping subsets
79 | // of the file in the same minibatch
80 | for( int i=0; i < miniBatchSize; i++ ){
81 | int startIdx = (int) (rng.nextDouble()*maxStartIdx);
82 | int endIdx = startIdx + exampleLength;
83 | int scanLength = 0;
84 |
85 | int currCharIdx = charToIdxMap.get(fileCharacters[startIdx]); //Current input
86 | int c=0;
87 | for( int j=startIdx+1; j<=endIdx; j++, c++ ){
88 | int nextCharIdx = charToIdxMap.get(fileCharacters[j]); //Next character to predict
89 | input.putScalar(new int[]{i,currCharIdx,c}, 1.0);
90 | labels.putScalar(new int[]{i,nextCharIdx,c}, 1.0);
91 | currCharIdx = nextCharIdx;
92 | }
93 | }
94 |
95 | examplesSoFar += miniBatchSize;
96 | return new DataSet(input,labels);
97 | */
98 |
99 | //int miniBatchSize = 50;
100 | int columnCount = 0;
101 |
102 |
103 | columnCount = (this.vectorizer.schema.getTransformedVectorSize() + 1);
104 |
105 |
106 |
107 |
108 |
109 |
110 | // vec.schema.debugPrintDatasetStatistics();
111 | /*
112 | System.out.println( "Max Timesteps: " + this.vectorizer.maxNumberTimeSteps );
113 |
114 | System.out.println( "ND4J Input Size: " );
115 | System.out.println( "Minibatch: " + miniBatchSize );
116 | System.out.println( "Column Count: " + columnCount );
117 | System.out.println( "Timestep Count: " + this.vectorizer.maxNumberTimeSteps );
118 | */
119 |
120 | //int currentOffset = 0;
121 |
122 | // for ( int index = 0; index < this.vectorizer.listOfFilesToVectorize.length; index += miniBatchSize) {
123 |
124 | // System.out.println( "\n\n ------------- Mini-batch offset: " + this.currentFileListIndex + " -----------------\n" );
125 | DataSet d = this.vectorizer.generateNextTimeseriesVectorMiniBatch( miniBatchSize, this.currentFileListIndex, columnCount );
126 | this.currentFileListIndex += miniBatchSize;
127 |
128 | // }
129 |
130 |
131 |
132 | return d;
133 |
134 | }
135 |
136 | public int totalExamples() {
137 | return this.currentFileListIndex;
138 | }
139 |
140 | public int inputColumns() {
141 | return this.vectorizer.schema.getTransformedVectorSize() + 1;
142 | }
143 |
144 | public int totalOutcomes() {
145 | return 2;
146 | }
147 |
148 | public void reset() {
149 | this.currentFileListIndex = 0;
150 | }
151 |
152 | public int batch() {
153 | return miniBatchSize;
154 | }
155 |
156 | public int cursor() {
157 | return this.currentFileListIndex;
158 | }
159 |
160 | public int numExamples() {
161 | return this.totalExamples;
162 | }
163 |
164 | public void setPreProcessor(DataSetPreProcessor preProcessor) {
165 | throw new UnsupportedOperationException("Not implemented");
166 | }
167 |
168 | @Override
169 | public List getLabels() {
170 | throw new UnsupportedOperationException("Not implemented");
171 | }
172 |
173 | @Override
174 | public void remove() {
175 | throw new UnsupportedOperationException();
176 | }
177 | }
178 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/strata/physionet/utils/EvalScoreTracker.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.strata.physionet.utils;
2 |
3 | import java.util.ArrayList;
4 | import java.util.List;
5 |
6 | public class EvalScoreTracker {
7 |
8 | List epochs = new ArrayList<>();
9 | List f1Scores_train = new ArrayList<>();
10 | List f1Scores_validate = new ArrayList<>();
11 | int windowSize = 5;
12 |
13 | public EvalScoreTracker(int windowSize) {
14 |
15 | this.windowSize = windowSize;
16 |
17 |
18 | }
19 |
20 | public void addF1(int epoch, double f1_train, double f1_validate) {
21 |
22 | if (this.f1Scores_train.size() >= this.windowSize) {
23 | this.f1Scores_train.remove(0);
24 | }
25 |
26 | if (this.f1Scores_validate.size() >= this.windowSize) {
27 | this.f1Scores_validate.remove(0);
28 | }
29 |
30 | if (this.epochs.size() >= this.windowSize) {
31 | this.epochs.remove(0);
32 | }
33 |
34 | this.f1Scores_train.add( new Double( f1_train ) );
35 | this.f1Scores_validate.add( new Double( f1_validate ) );
36 | this.epochs.add( new Integer( epoch ) );
37 |
38 | }
39 |
40 | public void printWindow() {
41 |
42 | System.out.println( "> ---------------- ---------------- ----------------" );
43 | System.out.println( "Last " + this.windowSize + " f1 Scores: " );
44 |
45 | for (int x = 0; x < this.f1Scores_train.size(); x++ ) {
46 |
47 |
48 | System.out.println( "\tEpoch: " + this.epochs.get( x ) + ", F1-Train: " + this.f1Scores_train.get(x) + ", F1-Validate: " + this.f1Scores_validate.get(x) );
49 |
50 | }
51 |
52 | System.out.println( "> ---------------- ---------------- ----------------" );
53 |
54 | }
55 |
56 | }
57 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/strata/physionet/utils/PhysioNetVectorizationDebugTool.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.strata.physionet.utils;
2 |
3 | import java.io.BufferedWriter;
4 | import java.io.File;
5 | import java.io.FileWriter;
6 | import java.text.SimpleDateFormat;
7 | import java.util.ArrayList;
8 | import java.util.Calendar;
9 | import java.util.List;
10 |
11 | import org.deeplearning4j.examples.rnn.strata.physionet.PhysioNet_Vectorizer;
12 | import org.nd4j.linalg.api.ndarray.INDArray;
13 | import org.nd4j.linalg.factory.Nd4j;
14 |
15 | public class PhysioNetVectorizationDebugTool {
16 |
17 | public static void triagePatientFile( String physioNetBaseDirectory, String physioSchemaFilePath, String physioLabelsFilePath, String patientFileFullPath ) {
18 |
19 | //PhysioNet_Vectorizer vec = new PhysioNet_Vectorizer("/tmp/set-a/", "src/test/resources/physionet_schema.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt" );
20 | PhysioNet_Vectorizer vec = new PhysioNet_Vectorizer( physioNetBaseDirectory, physioSchemaFilePath, physioLabelsFilePath );
21 | vec.loadSchema();
22 | vec.loadLabels();
23 |
24 | vec.collectStatistics();
25 |
26 | String filename = patientFileFullPath; //vec.getFilenameForIndex( 0 );
27 | // if (filename.startsWith("/")) {
28 | // filename = filename.replace("/", "");
29 | //}
30 |
31 | File f = new File(filename);
32 | String strippedFilename = f.getName().replace(".txt", "");
33 |
34 | BufferedWriter writer = null;
35 | try {
36 | //create a temporary file
37 | String timeLog = new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime());
38 | File logFile = new File("/tmp/" + strippedFilename + "_" + timeLog + ".txt");
39 |
40 | // This will output the full path where the file will be written to...
41 | System.out.println(logFile.getCanonicalPath());
42 | writer = new BufferedWriter(new FileWriter(logFile));
43 |
44 |
45 | // log this
46 | vec.schema.debugPrintDatasetStatistics();
47 | vec.schema.logDatasetStatistics(writer);
48 | vec.schema.logColumns(writer);
49 |
50 | // now write out the raw record data
51 |
52 | //System.out.println( "ND4J Input Size: " );
53 | //System.out.println( "Minibatch: 1" );
54 | writer.write( "\n\n" );
55 | writer.write( "Total Column Count: " + (vec.schema.getTransformedVectorSize() + 1) + "\n" );
56 | writer.write( "Total Timestep Count: " + vec.maxNumberTimeSteps + "\n" );
57 | writer.write( "\n\n" );
58 |
59 | vec.logTreeMapData(writer, filename);
60 |
61 | int timesteps = vec.maxNumberTimeSteps;
62 |
63 | INDArray input = Nd4j.zeros(new int[]{ 1, vec.schema.getTransformedVectorSize() + 1, timesteps });
64 | INDArray inputMask = Nd4j.zeros(new int[]{ 1, vec.schema.getTransformedVectorSize() + 1, timesteps });
65 | // 1 == mini-batch size
66 | // 2 == number of classes (0 -> no survive, 1 -> survival)
67 | INDArray labels = Nd4j.zeros(new int[]{ 1, 2 });
68 | INDArray labelsMask = Nd4j.ones(new int[]{ 1, 2 });
69 |
70 | vec.extractFileContentsAndVectorize( filename, 0, vec.schema.getTransformedVectorSize() + 1, timesteps, input, inputMask, labels, labelsMask);
71 |
72 | PhysioNet_Vectorizer.log_debug3D_Nd4J_Input( writer, input, 1, vec.schema.getTransformedVectorSize() + 1, timesteps );
73 |
74 | writer.write( "\n\nDebug Input Mask --------------- \n" );
75 |
76 | PhysioNet_Vectorizer.log_debug3D_Nd4J_Input( writer, inputMask, 1, vec.schema.getTransformedVectorSize() + 1, timesteps );
77 |
78 | //PhysioNet_Vectorizer.log_debug3D_Nd4J_Input( writer, d.getFeaturesMaskArray(), 1, 43, 202 );
79 |
80 | System.out.println( "> [ done ] ");
81 |
82 | } catch (Exception e) {
83 | e.printStackTrace();
84 | } finally {
85 | try {
86 | // Close the writer regardless of what happens...
87 | writer.close();
88 | } catch (Exception e) {
89 | }
90 | }
91 |
92 |
93 |
94 |
95 | }
96 |
97 | public static void extractBalancedSubsetOfPhysioNet( int totalRecords ) {
98 |
99 | // "src/test/resources/physionet_schema.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt"
100 |
101 | String physioNetBaseDirectory = "/tmp/set-a/";
102 | String physioSchemaFilePath = "src/test/resources/physionet_schema.txt";
103 | String physioLabelsFilePath = "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt";
104 |
105 | PhysioNet_Vectorizer vec = new PhysioNet_Vectorizer( physioNetBaseDirectory, physioSchemaFilePath, physioLabelsFilePath );
106 | vec.loadSchema();
107 | vec.loadLabels();
108 | //vec.setupFileInputList(false, 0);
109 | vec.collectStatistics();
110 | /*
111 | // died
112 | List negativeClassFiles = new ArrayList<>();
113 |
114 | // survived
115 | List positiveClassFiles = new ArrayList<>();
116 |
117 | //for (int x = 0; x < 100; x++) {
118 | while (negativeClassFiles.size() + positiveClassFiles.size() < totalRecords) {
119 |
120 | int randomIndex = (int )(Math.random() * 4000);
121 |
122 | String filename = vec.getFilenameForIndex(randomIndex);
123 |
124 | String[] filenameParts = filename.split(".t");
125 | String patientID = filenameParts[ 0 ];
126 |
127 | //System.out.println( "" + patientID );
128 |
129 | if (0 == vec.labels.translateLabelEntry(patientID)) {
130 |
131 | // died
132 | if (negativeClassFiles.size() < totalRecords / 2) {
133 | negativeClassFiles.add( patientID );
134 | }
135 |
136 | } else {
137 |
138 | // survived
139 | if (positiveClassFiles.size() < totalRecords / 2) {
140 | positiveClassFiles.add( patientID );
141 | }
142 |
143 | }
144 |
145 | } // while
146 |
147 | System.out.println( "Classes: " );
148 |
149 | System.out.println( "Survived: " + positiveClassFiles.size() );
150 | System.out.println( "Died: " + negativeClassFiles.size() );
151 |
152 | File[] listOfFiles = new File[ totalRecords ];
153 | for ( int x = 0; x < totalRecords; x++) {
154 |
155 | String patientID = "";
156 | if (positiveClassFiles.size() > negativeClassFiles.size()) {
157 |
158 | patientID = positiveClassFiles.remove(0);
159 | String path = physioNetBaseDirectory + patientID + ".txt";
160 | listOfFiles[ x ] = new File( path );
161 |
162 | System.out.println( "pos: " + path );
163 |
164 | } else {
165 |
166 | patientID = negativeClassFiles.remove(0);
167 | String path = physioNetBaseDirectory + patientID + ".txt";
168 | listOfFiles[ x ] = new File( path );
169 |
170 | System.out.println( "neg: " + path );
171 |
172 |
173 | }
174 |
175 |
176 |
177 | }
178 | */
179 |
180 |
181 | }
182 |
183 | }
184 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/synthetic/ND4JMatrixTool.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.synthetic;
2 |
3 | import java.io.BufferedWriter;
4 | import java.io.IOException;
5 |
6 | import org.nd4j.linalg.api.ndarray.INDArray;
7 |
8 | public class ND4JMatrixTool {
9 |
10 | public static void debug3D_Nd4J_Input( INDArray dstInput, int miniBatchCount, int columnCount, int timeStepCount) {
11 |
12 | System.out.println( "Debugging Input of ND4J 3d Matrix -------" );
13 |
14 | for ( int miniBatchIndex = 0; miniBatchIndex < miniBatchCount; miniBatchIndex++) {
15 |
16 | System.out.println( "Mini-Batch Index: " + miniBatchIndex );
17 |
18 | for ( int timeStepIndex = 0; timeStepIndex < timeStepCount; timeStepIndex++) {
19 |
20 | System.out.print( "[timestep: " + timeStepIndex + "] " );
21 |
22 | for ( int columnIndex = 0; columnIndex < columnCount; columnIndex++) {
23 |
24 |
25 |
26 | int[] params = new int[]{ miniBatchIndex, columnIndex, timeStepIndex };
27 |
28 | double v = dstInput.getDouble( params );
29 |
30 | System.out.print( ", " + v );
31 |
32 |
33 | }
34 |
35 | System.out.println("");
36 |
37 | }
38 |
39 |
40 | }
41 |
42 | System.out.println( "Debugging Input of ND4J 3d Matrix -------" );
43 |
44 | }
45 |
46 | public static void debug2D_Nd4J_Input( INDArray dstInput, int miniBatchCount, int timeStepCount) {
47 |
48 | System.out.println( "\nSTART > Debugging Input of ND4J 2d Matrix -------" );
49 |
50 | for ( int miniBatchIndex = 0; miniBatchIndex < miniBatchCount; miniBatchIndex++) {
51 |
52 | System.out.println( "Mini-Batch Index: " + miniBatchIndex );
53 |
54 | for ( int timeStepIndex = 0; timeStepIndex < timeStepCount; timeStepIndex++) {
55 |
56 | System.out.print( "[timestep: " + timeStepIndex + "] " );
57 |
58 |
59 |
60 | int[] params = new int[]{ miniBatchIndex, timeStepIndex };
61 |
62 | double v = dstInput.getDouble( params );
63 |
64 | System.out.print( ", " + v );
65 |
66 |
67 | System.out.println("");
68 |
69 | }
70 |
71 |
72 | }
73 |
74 | System.out.println( "END > Debugging Input of ND4J 2d Matrix -------" );
75 |
76 | }
77 |
78 |
79 | public static void log_debug3D_Nd4J_Input( BufferedWriter writer, INDArray dstInput, int miniBatchCount, int columnCount, int timeStepCount) throws IOException {
80 |
81 | writer.write( "Debugging Input of ND4J 3d Matrix -------\n" );
82 |
83 | for ( int miniBatchIndex = 0; miniBatchIndex < miniBatchCount; miniBatchIndex++) {
84 |
85 | writer.write( "Mini-Batch Index: " + miniBatchIndex + "\n" );
86 |
87 | for ( int timeStepIndex = 0; timeStepIndex < timeStepCount; timeStepIndex++) {
88 |
89 | writer.write( "[timestep: " + timeStepIndex + "] " );
90 |
91 | for ( int columnIndex = 0; columnIndex < columnCount; columnIndex++) {
92 |
93 |
94 |
95 | int[] params = new int[]{ miniBatchIndex, columnIndex, timeStepIndex };
96 |
97 | double v = dstInput.getDouble( params );
98 |
99 | writer.write( ", " + v );
100 |
101 |
102 | }
103 |
104 | writer.write("\n");
105 |
106 | }
107 |
108 |
109 | }
110 |
111 | writer.write( "END Debugging Input of ND4J 3d Matrix -------\n" );
112 |
113 | }
114 |
115 | }
116 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/synthetic/simple1/SyntheticDataIterator.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.synthetic.simple1;
2 |
3 | import java.io.BufferedReader;
4 | import java.io.FileReader;
5 | import java.io.IOException;
6 | import java.util.ArrayList;
7 | import java.util.HashMap;
8 | import java.util.List;
9 | import java.util.Map;
10 |
11 | import org.deeplearning4j.datasets.iterator.DataSetIterator;
12 | import org.deeplearning4j.examples.rnn.strata.physionet.PhysioNet_Vectorizer;
13 | import org.nd4j.linalg.api.ndarray.INDArray;
14 | import org.nd4j.linalg.dataset.DataSet;
15 | import org.nd4j.linalg.dataset.api.DataSetPreProcessor;
16 | import org.nd4j.linalg.factory.Nd4j;
17 |
18 | public class SyntheticDataIterator implements DataSetIterator {
19 |
20 | private int miniBatchSize;
21 | private int currentFileListIndex = 0;
22 | private int totalExamples = 0;
23 |
24 | //private int numExamplesToFetch;
25 | //private int examplesSoFar = 0;
26 | //private Random rng;
27 | //private final int numCharacters;
28 | //private final boolean alwaysStartAtNewLine;
29 |
30 | //String path = "";
31 |
32 | String datasetInputPath = "";
33 | //String datasetSchemaPath = "";
34 | String datasetLabelsPath = "";
35 | //public PhysioNet_Vectorizer vectorizer = null;
36 | String columnDelimiter = ",";
37 |
38 | public SyntheticDataIterator(String dataInputPath, String datasetLabels, int miniBatchSize, int totalExamples ) throws IOException {
39 | // this(path,Charset.defaultCharset(),miniBatchSize,exampleSize,numExamplesToFetch,getDefaultCharacterSet(), new Random(),true);
40 | //this.numCharacters = 0; // fix
41 |
42 | this.datasetInputPath = dataInputPath;
43 | //this.datasetSchemaPath = datasetSchemaPath;
44 | this.datasetLabelsPath = datasetLabels;
45 |
46 | this.miniBatchSize = miniBatchSize;
47 | this.totalExamples = totalExamples;
48 |
49 | }
50 |
51 |
52 | public boolean hasNext() {
53 | return currentFileListIndex + miniBatchSize <= this.totalExamples;
54 | }
55 |
56 | public DataSet next() {
57 | return next(miniBatchSize);
58 | }
59 |
60 |
61 | public List loadDataPoints(String path) {
62 |
63 | this.datasetInputPath = path;
64 |
65 | // read file into hash map
66 |
67 | String csvLine = "";
68 | int labelCount = 0;
69 | List lines = new ArrayList<>();
70 |
71 |
72 | try (BufferedReader br = new BufferedReader(new FileReader( this.datasetInputPath ) ) ) {
73 |
74 | // bleed off the header line
75 | //csvLine = br.readLine();
76 |
77 | //Map timeStepMap = new LinkedHashMap<>();
78 |
79 | while ((csvLine = br.readLine()) != null) {
80 |
81 | lines.add( csvLine );
82 |
83 | }
84 |
85 | } catch (IOException e) {
86 | // TODO Auto-generated catch block
87 | e.printStackTrace();
88 | }
89 |
90 |
91 | return lines;
92 |
93 | }
94 |
95 | /**
96 | * TODO: Cut here ----------------------------------
97 | *
98 | * Dimensions of input
99 | * x: miniBatchSize
100 | * y: every column we want to look at per timestep (basically our traditional vector)
101 | * z: the timestep value
102 | *
103 | */
104 | public DataSet next(int miniBatchSize) {
105 |
106 | int columnCount = 1;
107 | int timestepCount = 4;
108 |
109 | INDArray input = Nd4j.zeros(new int[]{ miniBatchSize, columnCount, timestepCount } );
110 | // input mask should be 2d (no column count)
111 | // we only care about minibatch and timestep --- for a given timestep, we are either using ALL colums... or we are not
112 | INDArray inputMask = Nd4j.ones( new int[]{ miniBatchSize, timestepCount } );
113 |
114 | // have to make labels 3d, but we pad/mask everything but first timestep
115 | INDArray labels = Nd4j.zeros(new int[]{ miniBatchSize, 2, timestepCount } );
116 | // mask / pad everything in labels up to the LAST timestep? and put the real labels there
117 | INDArray labelsMask = Nd4j.zeros(new int[]{ miniBatchSize, timestepCount } ); // labels are always used
118 |
119 | //String[] columns = csvLine.split( this.columnDelimiter );
120 | List records = this.loadDataPoints( this.datasetInputPath );
121 |
122 | System.out.println( "records loaded: " + records.size() );
123 |
124 | for ( int x = 0; x < records.size(); x++ ) {
125 |
126 | String[] timesteps = records.get( x ).split( this.columnDelimiter );
127 |
128 | for ( int step = 0; step < timesteps.length; step++ ) {
129 |
130 | input.putScalar(new int[]{ x, 0, step }, Double.parseDouble( timesteps[ step ] ) );
131 |
132 | }
133 |
134 | // set the label label
135 | int classIndex = Integer.parseInt( timesteps[ 3 ] );
136 |
137 | labels.putScalar(new int[]{ x, classIndex, 3 }, 1);
138 |
139 | // set the label mask
140 |
141 | labelsMask.putScalar(new int[]{ x, 3 }, 1);
142 |
143 | }
144 |
145 | // DataSet d = this.vectorizer.generateNextTimeseriesVectorMiniBatch( miniBatchSize, this.currentFileListIndex, columnCount );
146 | this.currentFileListIndex += miniBatchSize;
147 |
148 | //ND4JMatrixTool.debug3D_Nd4J_Input(input, miniBatchSize, columnCount, timestepCount);
149 | //ND4JMatrixTool.debug2D_Nd4J_Input( inputMask, miniBatchSize, timestepCount);
150 |
151 | // ND4JMatrixTool.debug3D_Nd4J_Input(labels, miniBatchSize, 2, timestepCount);
152 | // ND4JMatrixTool.debug2D_Nd4J_Input( labelsMask, miniBatchSize, timestepCount);
153 |
154 | //return d;
155 | return new DataSet( input, labels, inputMask, labelsMask );
156 |
157 | }
158 |
159 | public int totalExamples() {
160 | return this.currentFileListIndex;
161 | }
162 |
163 | public int inputColumns() {
164 | return 1;
165 | }
166 |
167 | public int totalOutcomes() {
168 | return 2;
169 | }
170 |
171 | public void reset() {
172 | this.currentFileListIndex = 0;
173 | }
174 |
175 | public int batch() {
176 | return miniBatchSize;
177 | }
178 |
179 | public int cursor() {
180 | return this.currentFileListIndex;
181 | }
182 |
183 | public int numExamples() {
184 | return this.totalExamples;
185 | }
186 |
187 | public void setPreProcessor(DataSetPreProcessor preProcessor) {
188 | throw new UnsupportedOperationException("Not implemented");
189 | }
190 |
191 | @Override
192 | public List getLabels() {
193 | throw new UnsupportedOperationException("Not implemented");
194 | }
195 |
196 | @Override
197 | public void remove() {
198 | throw new UnsupportedOperationException();
199 | }
200 | }
201 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/synthetic/simple1/SyntheticData_LSTM_Model.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.synthetic.simple1;
2 |
3 | import java.util.Random;
4 |
5 | import org.deeplearning4j.eval.Evaluation;
6 | import org.deeplearning4j.examples.rnn.strata.physionet.PhysioNet_ICU_Mortality_Iterator;
7 | import org.deeplearning4j.nn.api.Layer;
8 | import org.deeplearning4j.nn.api.OptimizationAlgorithm;
9 | import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
10 | import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
11 | import org.deeplearning4j.nn.conf.Updater;
12 | import org.deeplearning4j.nn.conf.distribution.UniformDistribution;
13 | import org.deeplearning4j.nn.conf.layers.GravesLSTM;
14 | import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
15 | import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
16 | import org.deeplearning4j.nn.weights.WeightInit;
17 | import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
18 | import org.nd4j.linalg.api.ndarray.INDArray;
19 | import org.nd4j.linalg.dataset.DataSet;
20 | import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction;
21 |
22 | public class SyntheticData_LSTM_Model {
23 |
24 |
25 | public static void main( String[] args ) throws Exception {
26 |
27 | trainExample();
28 |
29 | }
30 |
31 | public static void trainExample() throws Exception {
32 |
33 | int lstmLayerSize = 200; //Number of units in each GravesLSTM layer
34 | int miniBatchSize = 20; //Size of mini batch to use when training
35 | int totalExamplesToTrainWith = 40;
36 | //int examplesPerEpoch = 50 * miniBatchSize; //i.e., how many examples to learn on between generating samples
37 | //int exampleLength = 100; //Length of each training example
38 | int numEpochs = 20; //Total number of training + sample generation epochs
39 | //int nSamplesToGenerate = 4; //Number of samples to generate after each training epoch
40 | //int nCharactersToSample = 300; //Length of each sample to generate
41 | //String generationInitialization = null; //Optional character initialization; a random character is used if null
42 | // Above is Used to 'prime' the LSTM with a character sequence to continue/complete.
43 | // Initialization characters must all be in CharacterIterator.getMinimalCharacterSet() by default
44 | Random rng = new Random(12345);
45 |
46 | //Get a DataSetIterator that handles vectorization of text into something we can use to train
47 | // our GravesLSTM network.
48 | //CharacterIterator iter = getShakespeareIterator(miniBatchSize,exampleLength,examplesPerEpoch);
49 | int nOut = 2; //iter.totalOutcomes();
50 |
51 | //SyntheticDataIterator iter = getPhysioNetIterator( miniBatchSize, totalExamplesToTrainWith );
52 | SyntheticDataIterator iter = new SyntheticDataIterator("src/test/resources/data/synthetic/simple/simple_ts_data", "", 40, 40);
53 |
54 | //SyntheticDataIterator test_iter = getPhysioNetIterator( miniBatchSize, 1000 );
55 | SyntheticDataIterator test_iter = new SyntheticDataIterator("src/test/resources/data/synthetic/simple/simple_ts_data", "", 40, 40);
56 |
57 | iter.reset();
58 |
59 | System.out.println( "We have " + iter.inputColumns() + " input columns." );
60 |
61 | //Set up network configuration:
62 | MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
63 | .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
64 | .learningRate(0.01)
65 | .rmsDecay(0.95)
66 | .seed(12345)
67 | .regularization(true)
68 | .l2(0.001)
69 | .list(3)
70 | .layer(0, new GravesLSTM.Builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
71 | .updater(Updater.RMSPROP)
72 | .activation("tanh").weightInit(WeightInit.DISTRIBUTION)
73 | .dist(new UniformDistribution(-0.08, 0.08)).build())
74 | .layer(1, new GravesLSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
75 | .updater(Updater.RMSPROP)
76 | .activation("tanh").weightInit(WeightInit.DISTRIBUTION)
77 | .dist(new UniformDistribution(-0.08, 0.08)).build())
78 | .layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation("softmax") //MCXENT + softmax for classification
79 | .updater(Updater.RMSPROP)
80 | .nIn(lstmLayerSize).nOut(nOut).weightInit(WeightInit.DISTRIBUTION)
81 | .dist(new UniformDistribution(-0.08, 0.08)).build())
82 | .pretrain(false).backprop(true)
83 | .build();
84 |
85 | MultiLayerNetwork net = new MultiLayerNetwork(conf);
86 | net.init();
87 | net.setListeners(new ScoreIterationListener(1));
88 |
89 | //Print the number of parameters in the network (and for each layer)
90 | Layer[] layers = net.getLayers();
91 | int totalNumParams = 0;
92 | for( int i=0; i loadDataPoints(String path) {
53 |
54 | this.datasetInputPath = path;
55 |
56 | // read file into hash map
57 |
58 | String csvLine = "";
59 | int labelCount = 0;
60 | List lines = new ArrayList<>();
61 |
62 |
63 | try (BufferedReader br = new BufferedReader(new FileReader( this.datasetInputPath ) ) ) {
64 |
65 | // bleed off the header line
66 | //csvLine = br.readLine();
67 |
68 | //Map timeStepMap = new LinkedHashMap<>();
69 |
70 | while ((csvLine = br.readLine()) != null) {
71 |
72 | lines.add( csvLine );
73 |
74 | }
75 |
76 | } catch (IOException e) {
77 | // TODO Auto-generated catch block
78 | e.printStackTrace();
79 | }
80 |
81 |
82 | return lines;
83 |
84 | }
85 |
86 | /**
87 | * TODO: Cut here ----------------------------------
88 | *
89 | * Dimensions of input
90 | * x: miniBatchSize
91 | * y: every column we want to look at per timestep (basically our traditional vector)
92 | * z: the timestep value
93 | *
94 | */
95 | public DataSet next(int miniBatchSize) {
96 |
97 | int columnCount = 1;
98 | int timestepCount = 4;
99 |
100 | INDArray input = Nd4j.zeros(new int[]{ miniBatchSize, columnCount, timestepCount } );
101 | // input mask should be 2d (no column count)
102 | // we only care about minibatch and timestep --- for a given timestep, we are either using ALL colums... or we are not
103 | INDArray inputMask = Nd4j.ones( new int[]{ miniBatchSize, timestepCount } );
104 |
105 | // have to make labels 3d, but we pad/mask everything but first timestep
106 | INDArray labels = Nd4j.zeros(new int[]{ miniBatchSize, 2, timestepCount } );
107 | // mask / pad everything in labels up to the LAST timestep? and put the real labels there
108 | INDArray labelsMask = Nd4j.zeros(new int[]{ miniBatchSize, timestepCount } ); // labels are always used
109 |
110 | //String[] columns = csvLine.split( this.columnDelimiter );
111 | List recordLines = this.loadDataPoints( this.datasetInputPath );
112 | List labelLines = this.loadDataPoints( this.datasetLabelsPath );
113 |
114 | // System.out.println( "records loaded: " + recordLines.size() );
115 |
116 | if (recordLines.size() != labelLines.size()) {
117 | System.err.println( "record count and label count do not match!" );
118 | return null;//throw new Exception( "record count and label count do not match!" );
119 | }
120 |
121 | // System.out.println( "Current: " + this.currentRecordIndex );
122 |
123 | int targetRecordIndex = this.currentRecordIndex + this.miniBatchSize;
124 |
125 | // System.out.println( "Target: " + targetRecordIndex );
126 |
127 | for ( int miniBatchIndex = 0; miniBatchIndex < this.miniBatchSize; miniBatchIndex++ ) {
128 | //for ( int x = 0; x < recordLines.size(); x++ ) {
129 |
130 | //System.out.println( x );
131 |
132 | int globalRecordIndex = this.currentRecordIndex + miniBatchIndex;
133 |
134 | String[] timesteps = recordLines.get( globalRecordIndex ).split( this.columnDelimiter );
135 | String labelString = labelLines.get( globalRecordIndex );
136 |
137 | for ( int step = 0; step < timesteps.length; step++ ) {
138 |
139 | input.putScalar(new int[]{ miniBatchIndex, 0, step }, Double.parseDouble( timesteps[ step ] ) );
140 |
141 | }
142 |
143 | // set the label label
144 | int classIndex = Integer.parseInt( labelString );
145 |
146 | labels.putScalar(new int[]{ miniBatchIndex, classIndex, 3 }, 1);
147 |
148 | // set the label mask
149 |
150 | labelsMask.putScalar(new int[]{ miniBatchIndex, 3 }, 1);
151 |
152 | }
153 |
154 | // DataSet d = this.vectorizer.generateNextTimeseriesVectorMiniBatch( miniBatchSize, this.currentFileListIndex, columnCount );
155 | this.currentRecordIndex = targetRecordIndex;
156 |
157 | // System.out.println( "New Current: " + this.currentRecordIndex );
158 | /*
159 | System.out.println("\n\nDebug Input");
160 | ND4JMatrixTool.debug3D_Nd4J_Input(input, miniBatchSize, columnCount, timestepCount);
161 | ND4JMatrixTool.debug2D_Nd4J_Input( inputMask, miniBatchSize, timestepCount);
162 | */
163 |
164 | // System.out.println("\n\nDebug Labels");
165 | // ND4JMatrixTool.debug3D_Nd4J_Input(labels, miniBatchSize, 2, timestepCount);
166 | // ND4JMatrixTool.debug2D_Nd4J_Input( labelsMask, miniBatchSize, timestepCount);
167 |
168 | //return d;
169 | return new DataSet( input, labels, inputMask, labelsMask );
170 |
171 | }
172 |
173 | public int totalExamples() {
174 | return this.totalExamples;
175 | }
176 |
177 | public int inputColumns() {
178 | return 1;
179 | }
180 |
181 | public int totalOutcomes() {
182 | return 2;
183 | }
184 |
185 | public void reset() {
186 | this.currentRecordIndex = 0;
187 | }
188 |
189 | public int batch() {
190 | return miniBatchSize;
191 | }
192 |
193 | public int cursor() {
194 | return this.currentRecordIndex;
195 | }
196 |
197 | public int numExamples() {
198 | return this.totalExamples;
199 | }
200 |
201 | public void setPreProcessor(DataSetPreProcessor preProcessor) {
202 | throw new UnsupportedOperationException("Not implemented");
203 | }
204 |
205 | @Override
206 | public List getLabels() {
207 | throw new UnsupportedOperationException("Not implemented");
208 | }
209 |
210 | @Override
211 | public void remove() {
212 | throw new UnsupportedOperationException();
213 | }
214 | }
215 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/synthetic/simple2/Synthetic_Simple2_LSTM_Model.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.synthetic.simple2;
2 |
3 | import java.util.Random;
4 |
5 | import org.deeplearning4j.eval.Evaluation;
6 | import org.deeplearning4j.examples.rnn.synthetic.simple1.SyntheticDataIterator;
7 | import org.deeplearning4j.nn.api.Layer;
8 | import org.deeplearning4j.nn.api.OptimizationAlgorithm;
9 | import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
10 | import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
11 | import org.deeplearning4j.nn.conf.Updater;
12 | import org.deeplearning4j.nn.conf.distribution.UniformDistribution;
13 | import org.deeplearning4j.nn.conf.layers.GravesLSTM;
14 | import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
15 | import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
16 | import org.deeplearning4j.nn.weights.WeightInit;
17 | import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
18 | import org.nd4j.linalg.api.ndarray.INDArray;
19 | import org.nd4j.linalg.dataset.DataSet;
20 | import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction;
21 |
22 | public class Synthetic_Simple2_LSTM_Model {
23 |
24 |
25 | public static void main( String[] args ) throws Exception {
26 |
27 | trainExample();
28 |
29 | }
30 |
31 | public static void trainExample() throws Exception {
32 |
33 | int lstmLayerSize = 200; //Number of units in each GravesLSTM layer
34 | int miniBatchSize = 10; //Size of mini batch to use when training
35 | int totalExamplesToTrainWith = 40;
36 | //int examplesPerEpoch = 50 * miniBatchSize; //i.e., how many examples to learn on between generating samples
37 | //int exampleLength = 100; //Length of each training example
38 | int numEpochs = 30; //Total number of training + sample generation epochs
39 | //int nSamplesToGenerate = 4; //Number of samples to generate after each training epoch
40 | //int nCharactersToSample = 300; //Length of each sample to generate
41 | //String generationInitialization = null; //Optional character initialization; a random character is used if null
42 | // Above is Used to 'prime' the LSTM with a character sequence to continue/complete.
43 | // Initialization characters must all be in CharacterIterator.getMinimalCharacterSet() by default
44 | Random rng = new Random(12345);
45 |
46 | //Get a DataSetIterator that handles vectorization of text into something we can use to train
47 | // our GravesLSTM network.
48 | //CharacterIterator iter = getShakespeareIterator(miniBatchSize,exampleLength,examplesPerEpoch);
49 | int nOut = 2; //iter.totalOutcomes();
50 |
51 | //SyntheticDataIterator iter = getPhysioNetIterator( miniBatchSize, totalExamplesToTrainWith );
52 | Simple2Dataset_Iterator iter = new Simple2Dataset_Iterator("src/test/resources/data/synthetic/simple_2/simple_2_data.txt", "src/test/resources/data/synthetic/simple_2/simple_2_labels.txt", miniBatchSize, totalExamplesToTrainWith);
53 |
54 | //SyntheticDataIterator test_iter = getPhysioNetIterator( miniBatchSize, 1000 );
55 | Simple2Dataset_Iterator test_iter = new Simple2Dataset_Iterator("src/test/resources/data/synthetic/simple_2/simple_2_data.txt", "src/test/resources/data/synthetic/simple_2/simple_2_labels.txt", miniBatchSize, totalExamplesToTrainWith);
56 |
57 | iter.reset();
58 |
59 | System.out.println( "We have " + iter.inputColumns() + " input columns." );
60 |
61 | //Set up network configuration:
62 | MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
63 | .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
64 | .learningRate(0.01)
65 | .rmsDecay(0.95)
66 | .seed(12345)
67 | .regularization(true)
68 | .l2(0.001)
69 | .list(3)
70 | .layer(0, new GravesLSTM.Builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
71 | .updater(Updater.RMSPROP)
72 | .activation("tanh").weightInit(WeightInit.DISTRIBUTION)
73 | .dist(new UniformDistribution(-0.08, 0.08)).build())
74 | .layer(1, new GravesLSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
75 | .updater(Updater.RMSPROP)
76 | .activation("tanh").weightInit(WeightInit.DISTRIBUTION)
77 | .dist(new UniformDistribution(-0.08, 0.08)).build())
78 | .layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation("softmax") //MCXENT + softmax for classification
79 | .updater(Updater.RMSPROP)
80 | .nIn(lstmLayerSize).nOut(nOut).weightInit(WeightInit.DISTRIBUTION)
81 | .dist(new UniformDistribution(-0.08, 0.08)).build())
82 | .pretrain(false).backprop(true)
83 | .build();
84 |
85 | MultiLayerNetwork net = new MultiLayerNetwork(conf);
86 | net.init();
87 | net.setListeners(new ScoreIterationListener(1));
88 |
89 | //Print the number of parameters in the network (and for each layer)
90 | Layer[] layers = net.getLayers();
91 | int totalNumParams = 0;
92 | for( int i=0; i loadDataPoints(String path) {
54 |
55 | this.datasetInputPath = path;
56 |
57 | // read file into hash map
58 |
59 | String csvLine = "";
60 | int labelCount = 0;
61 | List lines = new ArrayList<>();
62 |
63 |
64 | try (BufferedReader br = new BufferedReader(new FileReader( this.datasetInputPath ) ) ) {
65 |
66 | // bleed off the header line
67 | //csvLine = br.readLine();
68 |
69 | //Map timeStepMap = new LinkedHashMap<>();
70 |
71 | while ((csvLine = br.readLine()) != null) {
72 |
73 | lines.add( csvLine );
74 |
75 | }
76 |
77 | } catch (IOException e) {
78 | // TODO Auto-generated catch block
79 | e.printStackTrace();
80 | }
81 |
82 |
83 | return lines;
84 |
85 | }
86 |
87 | /**
88 | * TODO: Cut here ----------------------------------
89 | *
90 | * Dimensions of input
91 | * x: miniBatchSize
92 | * y: every column we want to look at per timestep (basically our traditional vector)
93 | * z: the timestep value
94 | *
95 | */
96 | public DataSet next(int miniBatchSize) {
97 |
98 | int columnCount = 1;
99 | int timestepCount = 4;
100 |
101 | INDArray input = Nd4j.zeros(new int[]{ miniBatchSize, columnCount, timestepCount } );
102 | // input mask should be 2d (no column count)
103 | // we only care about minibatch and timestep --- for a given timestep, we are either using ALL colums... or we are not
104 | INDArray inputMask = Nd4j.ones( new int[]{ miniBatchSize, timestepCount } );
105 |
106 | // have to make labels 3d, but we pad/mask everything but first timestep
107 | INDArray labels = Nd4j.zeros(new int[]{ miniBatchSize, 2, timestepCount } );
108 | // mask / pad everything in labels up to the LAST timestep? and put the real labels there
109 | INDArray labelsMask = Nd4j.zeros(new int[]{ miniBatchSize, timestepCount } ); // labels are always used
110 |
111 | //String[] columns = csvLine.split( this.columnDelimiter );
112 | List recordLines = this.loadDataPoints( this.datasetInputPath );
113 | List labelLines = this.loadDataPoints( this.datasetLabelsPath );
114 |
115 | // System.out.println( "records loaded: " + recordLines.size() );
116 |
117 | if (recordLines.size() != labelLines.size()) {
118 | System.err.println( "record count and label count do not match!" );
119 | return null;//throw new Exception( "record count and label count do not match!" );
120 | }
121 |
122 | // System.out.println( "Current: " + this.currentRecordIndex );
123 |
124 | int targetRecordIndex = this.currentRecordIndex + this.miniBatchSize;
125 |
126 | // System.out.println( "Target: " + targetRecordIndex );
127 |
128 | for ( int miniBatchIndex = 0; miniBatchIndex < this.miniBatchSize; miniBatchIndex++ ) {
129 | //for ( int x = 0; x < recordLines.size(); x++ ) {
130 |
131 | //System.out.println( x );
132 |
133 | int globalRecordIndex = this.currentRecordIndex + miniBatchIndex;
134 |
135 | String[] timesteps = recordLines.get( globalRecordIndex ).split( this.columnDelimiter );
136 | String labelString = labelLines.get( globalRecordIndex );
137 |
138 | //for ( int step = 0; step < timesteps.length; step++ ) {
139 | for ( int step = 0; step < this.maxTimestepLength; step++ ) {
140 |
141 | if (step >= timesteps.length) {
142 |
143 | // mask the unused timesteps
144 | inputMask.putScalar(new int[]{ miniBatchIndex, step }, 0.0 );
145 |
146 | } else {
147 |
148 | input.putScalar(new int[]{ miniBatchIndex, 0, step }, Double.parseDouble( timesteps[ step ] ) );
149 |
150 | }
151 |
152 | }
153 |
154 | // set the label label
155 | int classIndex = Integer.parseInt( labelString );
156 |
157 | int labelStepIndex = this.maxTimestepLength - 1;
158 | if ( timesteps.length < this.maxTimestepLength) {
159 | labelStepIndex = timesteps.length - 1;
160 | }
161 |
162 |
163 | labels.putScalar(new int[]{ miniBatchIndex, classIndex, labelStepIndex }, 1);
164 |
165 | // set the label mask
166 |
167 | labelsMask.putScalar(new int[]{ miniBatchIndex, labelStepIndex }, 1);
168 |
169 | }
170 |
171 | // DataSet d = this.vectorizer.generateNextTimeseriesVectorMiniBatch( miniBatchSize, this.currentFileListIndex, columnCount );
172 | this.currentRecordIndex = targetRecordIndex;
173 |
174 | // System.out.println( "New Current: " + this.currentRecordIndex );
175 | /*
176 | System.out.println("\n\nDebug Input");
177 | ND4JMatrixTool.debug3D_Nd4J_Input(input, miniBatchSize, columnCount, timestepCount);
178 | ND4JMatrixTool.debug2D_Nd4J_Input( inputMask, miniBatchSize, timestepCount);
179 | */
180 |
181 | // System.out.println("\n\nDebug Labels");
182 | // ND4JMatrixTool.debug3D_Nd4J_Input(labels, miniBatchSize, 2, timestepCount);
183 | // ND4JMatrixTool.debug2D_Nd4J_Input( labelsMask, miniBatchSize, timestepCount);
184 |
185 | //return d;
186 | return new DataSet( input, labels, inputMask, labelsMask );
187 |
188 | }
189 |
190 | public int totalExamples() {
191 | return this.totalExamples;
192 | }
193 |
194 | public int inputColumns() {
195 | return 1;
196 | }
197 |
198 | public int totalOutcomes() {
199 | return 2;
200 | }
201 |
202 | public void reset() {
203 | this.currentRecordIndex = 0;
204 | }
205 |
206 | public int batch() {
207 | return miniBatchSize;
208 | }
209 |
210 | public int cursor() {
211 | return this.currentRecordIndex;
212 | }
213 |
214 | public int numExamples() {
215 | return this.totalExamples;
216 | }
217 |
218 | public void setPreProcessor(DataSetPreProcessor preProcessor) {
219 | throw new UnsupportedOperationException("Not implemented");
220 | }
221 |
222 | @Override
223 | public List getLabels() {
224 | throw new UnsupportedOperationException("Not implemented");
225 | }
226 |
227 | @Override
228 | public void remove() {
229 | throw new UnsupportedOperationException();
230 | }
231 | }
232 |
--------------------------------------------------------------------------------
/src/main/java/org/deeplearning4j/examples/rnn/synthetic/simple3/uneven/Simple3_Uneven_LSTM_Model.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.examples.rnn.synthetic.simple3.uneven;
2 |
3 | import java.util.Random;
4 |
5 | import org.deeplearning4j.eval.Evaluation;
6 | import org.deeplearning4j.examples.rnn.synthetic.simple2.Simple2Dataset_Iterator;
7 | import org.deeplearning4j.nn.api.Layer;
8 | import org.deeplearning4j.nn.api.OptimizationAlgorithm;
9 | import org.deeplearning4j.nn.conf.MultiLayerConfiguration;
10 | import org.deeplearning4j.nn.conf.NeuralNetConfiguration;
11 | import org.deeplearning4j.nn.conf.Updater;
12 | import org.deeplearning4j.nn.conf.distribution.UniformDistribution;
13 | import org.deeplearning4j.nn.conf.layers.GravesLSTM;
14 | import org.deeplearning4j.nn.conf.layers.RnnOutputLayer;
15 | import org.deeplearning4j.nn.multilayer.MultiLayerNetwork;
16 | import org.deeplearning4j.nn.weights.WeightInit;
17 | import org.deeplearning4j.optimize.listeners.ScoreIterationListener;
18 | import org.nd4j.linalg.api.ndarray.INDArray;
19 | import org.nd4j.linalg.dataset.DataSet;
20 | import org.nd4j.linalg.lossfunctions.LossFunctions.LossFunction;
21 |
22 | public class Simple3_Uneven_LSTM_Model {
23 |
24 |
25 |
26 | public static void main( String[] args ) throws Exception {
27 |
28 | trainExample();
29 |
30 | }
31 |
32 | public static void trainExample() throws Exception {
33 |
34 | int lstmLayerSize = 200; //Number of units in each GravesLSTM layer
35 | int miniBatchSize = 10; //Size of mini batch to use when training
36 | int totalExamplesToTrainWith = 40;
37 | //int examplesPerEpoch = 50 * miniBatchSize; //i.e., how many examples to learn on between generating samples
38 | //int exampleLength = 100; //Length of each training example
39 | int numEpochs = 50; //Total number of training + sample generation epochs
40 | //int nSamplesToGenerate = 4; //Number of samples to generate after each training epoch
41 | //int nCharactersToSample = 300; //Length of each sample to generate
42 | //String generationInitialization = null; //Optional character initialization; a random character is used if null
43 | // Above is Used to 'prime' the LSTM with a character sequence to continue/complete.
44 | // Initialization characters must all be in CharacterIterator.getMinimalCharacterSet() by default
45 | Random rng = new Random(12345);
46 |
47 | //Get a DataSetIterator that handles vectorization of text into something we can use to train
48 | // our GravesLSTM network.
49 | //CharacterIterator iter = getShakespeareIterator(miniBatchSize,exampleLength,examplesPerEpoch);
50 | int nOut = 2; //iter.totalOutcomes();
51 |
52 | //SyntheticDataIterator iter = getPhysioNetIterator( miniBatchSize, totalExamplesToTrainWith );
53 | //Simple3_Uneven_Dataset_Iterator iter = new Simple3_Uneven_Dataset_Iterator("src/test/resources/data/synthetic/simple_2/simple_2_data.txt", "src/test/resources/data/synthetic/simple_2/simple_2_labels.txt", miniBatchSize, totalExamplesToTrainWith);
54 | Simple3_Uneven_Dataset_Iterator iter = new Simple3_Uneven_Dataset_Iterator("src/test/resources/data/synthetic/simple_3_uneven/simple_3_uneven_data.txt", "src/test/resources/data/synthetic/simple_3_uneven/simple_3_uneven_labels.txt", miniBatchSize, totalExamplesToTrainWith, 4 );
55 |
56 | //SyntheticDataIterator test_iter = getPhysioNetIterator( miniBatchSize, 1000 );
57 | //Simple3_Uneven_Dataset_Iterator test_iter = new Simple3_Uneven_Dataset_Iterator("src/test/resources/data/synthetic/simple_2/simple_2_data.txt", "src/test/resources/data/synthetic/simple_2/simple_2_labels.txt", miniBatchSize, totalExamplesToTrainWith);
58 | Simple3_Uneven_Dataset_Iterator test_iter = new Simple3_Uneven_Dataset_Iterator("src/test/resources/data/synthetic/simple_3_uneven/simple_3_uneven_data.txt", "src/test/resources/data/synthetic/simple_3_uneven/simple_3_uneven_labels.txt", totalExamplesToTrainWith, totalExamplesToTrainWith, 4 );
59 |
60 | iter.reset();
61 |
62 | System.out.println( "We have " + iter.inputColumns() + " input columns." );
63 |
64 | //Set up network configuration:
65 | MultiLayerConfiguration conf = new NeuralNetConfiguration.Builder()
66 | .optimizationAlgo(OptimizationAlgorithm.STOCHASTIC_GRADIENT_DESCENT).iterations(1)
67 | .learningRate(0.005)
68 | .rmsDecay(0.95)
69 | .seed(12345)
70 | .regularization(true)
71 | .l2(0.001)
72 | .list(3)
73 | .layer(0, new GravesLSTM.Builder().nIn(iter.inputColumns()).nOut(lstmLayerSize)
74 | .updater(Updater.RMSPROP)
75 | .activation("tanh").weightInit(WeightInit.DISTRIBUTION)
76 | .dist(new UniformDistribution(-0.08, 0.08)).build())
77 | .layer(1, new GravesLSTM.Builder().nIn(lstmLayerSize).nOut(lstmLayerSize)
78 | .updater(Updater.RMSPROP)
79 | .activation("tanh").weightInit(WeightInit.DISTRIBUTION)
80 | .dist(new UniformDistribution(-0.08, 0.08)).build())
81 | .layer(2, new RnnOutputLayer.Builder(LossFunction.MCXENT).activation("softmax") //MCXENT + softmax for classification
82 | .updater(Updater.RMSPROP)
83 | .nIn(lstmLayerSize).nOut(nOut).weightInit(WeightInit.DISTRIBUTION)
84 | .dist(new UniformDistribution(-0.08, 0.08)).build())
85 | .pretrain(false).backprop(true)
86 | .build();
87 |
88 | MultiLayerNetwork net = new MultiLayerNetwork(conf);
89 | net.init();
90 | net.setListeners(new ScoreIterationListener(1));
91 |
92 | //Print the number of parameters in the network (and for each layer)
93 | Layer[] layers = net.getLayers();
94 | int totalNumParams = 0;
95 | for( int i=0; i
149 | * Note that the initalization is used for all samples
150 | * @param initialization String, may be null. If null, select a random character as initialization for all samples
151 | * @param charactersToSample Number of characters to sample from network (excluding initialization)
152 | * @param net MultiLayerNetwork with one or more GravesLSTM/RNN layers and a softmax output layer
153 | * @param iter CharacterIterator. Used for going from indexes back to characters
154 | */
155 | private static String[] sampleCharactersFromNetwork( String initialization, MultiLayerNetwork net,
156 | CharacterIterator iter, Random rng, int charactersToSample, int numSamples ){
157 | //Set up initialization. If no initialization: use a random character
158 | if( initialization == null ){
159 | initialization = String.valueOf(iter.getRandomCharacter());
160 | }
161 |
162 | //Create input for initialization
163 | INDArray initializationInput = Nd4j.zeros(numSamples, iter.inputColumns(), initialization.length());
164 | char[] init = initialization.toCharArray();
165 | for( int i=0; i
144 | * Note that the initalization is used for all samples
145 | * @param initialization String, may be null. If null, select a random character as initialization for all samples
146 | * @param charactersToSample Number of characters to sample from network (excluding initialization)
147 | * @param net MultiLayerNetwork with one or more GravesLSTM/RNN layers and a softmax output layer
148 | * @param iter CharacterIterator. Used for going from indexes back to characters
149 | */
150 | private static String[] sampleCharactersFromNetwork( String initialization, MultiLayerNetwork net,
151 | CharacterIterator iter, Random rng, int charactersToSample, int numSamples ){
152 | //Set up initialization. If no initialization: use a random character
153 | if( initialization == null ){
154 | initialization = String.valueOf(iter.getRandomCharacter());
155 | }
156 |
157 | //Create input for initialization
158 | INDArray initializationInput = Nd4j.zeros(numSamples, iter.inputColumns(), initialization.length());
159 | char[] init = initialization.toCharArray();
160 | for( int i=0; i
134 | * Note that the initalization is used for all samples
135 | * @param initialization String, may be null. If null, select a random character as initialization for all samples
136 | * @param charactersToSample Number of characters to sample from network (excluding initialization)
137 | * @param net MultiLayerNetwork with one or more GravesLSTM/RNN layers and a softmax output layer
138 | * @param iter CharacterIterator. Used for going from indexes back to characters
139 | */
140 | private static String[] sampleCharactersFromNetwork( String initialization, MultiLayerNetwork net,
141 | CharacterIterator iter, Random rng, int charactersToSample, int numSamples ){
142 | //Set up initialization. If no initialization: use a random character
143 | if( initialization == null ){
144 | initialization = String.valueOf(iter.getRandomCharacter());
145 | }
146 |
147 | //Create input for initialization
148 | INDArray initializationInput = Nd4j.zeros(numSamples, iter.inputColumns(), initialization.length());
149 | char[] init = initialization.toCharArray();
150 | for( int i=0; i no survive, 1 -> survival)
66 | // INDArray labels = Nd4j.zeros(new int[]{ miniBatchSize, 2 });
67 |
68 | // vec.extractFileContentsAndVectorize( "src/test/resources/physionet_sample_data.txt", 0, columnCount, vec.maxNumberTimeSteps, input, labels);
69 |
70 | // PhysioNet_Vectorizer.debug3D_Nd4J_Input( input, miniBatchSize, columnCount, vec.maxNumberTimeSteps );
71 |
72 | int currentOffset = 0;
73 |
74 |
75 | //for ( int index = 0; index < 200; index += miniBatchSize) {
76 | for ( int index = 0; index < vec.listOfFilesToVectorize.length; index += miniBatchSize) {
77 | //vec.listOfFilesToVectorize.length; index += miniBatchSize) {
78 |
79 | System.out.println( "\n\n ------------- Mini-batch test: " + index + " -----------------\n" );
80 | DataSet d = vec.generateNextTimeseriesVectorMiniBatch(miniBatchSize, index, columnCount);
81 |
82 | }
83 |
84 | /*
85 | double labelNegativeLabelValue = labels.getDouble(0, 0);
86 | double labelPositiveLabelValue = labels.getDouble(0, 1);
87 | assertEquals( 0.0, labelNegativeLabelValue, 0.0 );
88 | assertEquals( 1.0, labelPositiveLabelValue, 0.0 );
89 | */
90 |
91 |
92 | }
93 |
94 | @Test
95 | public void testIterator() throws IOException {
96 |
97 |
98 | // "/tmp/set-a/", "src/test/resources/physionet_schema.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt"
99 | PhysioNet_ICU_Mortality_Iterator iterator = new PhysioNet_ICU_Mortality_Iterator( "/tmp/set-a/", "src/test/resources/physionet_schema.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt", 50, 200);
100 |
101 | while (iterator.hasNext()) {
102 |
103 | DataSet d = iterator.next();
104 | System.out.println( "> Pulled Dataset ... ");
105 |
106 | }
107 |
108 | System.out.println( "> [ done ] ");
109 |
110 |
111 | }
112 |
113 | @Test
114 | public void testIteratorCheckSmallMiniBatch() throws IOException {
115 |
116 |
117 |
118 | BufferedWriter writer = null;
119 | try {
120 | //create a temporary file
121 | String timeLog = new SimpleDateFormat("yyyyMMdd_HHmmss").format(Calendar.getInstance().getTime());
122 | File logFile = new File("/tmp/rnn_physionet_" + timeLog + ".txt");
123 |
124 | // This will output the full path where the file will be written to...
125 | System.out.println(logFile.getCanonicalPath());
126 | writer = new BufferedWriter(new FileWriter(logFile));
127 | //writer.write("Hello world!");
128 |
129 |
130 |
131 |
132 |
133 | PhysioNet_ICU_Mortality_Iterator iterator = new PhysioNet_ICU_Mortality_Iterator( "/tmp/set-a/", "src/test/resources/physionet_schema.txt", "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt", 1, 1);
134 |
135 | iterator.vectorizer.schema.logColumns(writer);
136 |
137 | //while (iterator.hasNext()) {
138 |
139 | DataSet d = iterator.next();
140 | System.out.println( "> Pulled Dataset ... ");
141 |
142 | //}
143 |
144 | PhysioNet_Vectorizer.log_debug3D_Nd4J_Input( writer, d.getFeatures(), 1, 43, 202 );
145 |
146 | writer.write( "\n\n> Mask Array: \n\n");
147 |
148 | PhysioNet_Vectorizer.log_debug3D_Nd4J_Input( writer, d.getFeaturesMaskArray(), 1, 43, 202 );
149 |
150 | System.out.println( "> [ done ] ");
151 |
152 | } catch (Exception e) {
153 | e.printStackTrace();
154 | } finally {
155 | try {
156 | // Close the writer regardless of what happens...
157 | writer.close();
158 | } catch (Exception e) {
159 | }
160 | }
161 |
162 |
163 | }
164 |
165 |
166 |
167 | }
168 |
--------------------------------------------------------------------------------
/src/test/java/org/deeplearning4j/jp/rnn/strata/physionet/utils/TestPhysioNetSubsetExtraction.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.jp.rnn.strata.physionet.utils;
2 |
3 | import static org.junit.Assert.*;
4 |
5 | import java.io.File;
6 | import java.io.IOException;
7 |
8 | import org.deeplearning4j.examples.rnn.strata.physionet.PhysioNet_Vectorizer;
9 | import org.deeplearning4j.examples.rnn.strata.physionet.utils.PhysioNetDataUtils;
10 | import org.deeplearning4j.examples.rnn.strata.physionet.utils.PhysioNetVectorizationDebugTool;
11 | import org.junit.Test;
12 |
13 | public class TestPhysioNetSubsetExtraction {
14 |
15 | @Test
16 | public void testDir() {
17 |
18 | String fileNamePath = "/tmp/rnns/physionet/models/";
19 |
20 |
21 | }
22 |
23 | @Test
24 | public void test() {
25 | //fail("Not yet implemented");
26 |
27 | //PhysioNetVectorizationDebugTool.extractBalancedSubsetOfPhysioNet( 40 );
28 |
29 | String physioNetBaseDirectory = "/tmp/set-a/";
30 | String physioSchemaFilePath = "src/test/resources/physionet_schema.txt";
31 | String physioLabelsFilePath = "src/test/resources/data/physionet/sample/set-a-labels/Outcomes-a.txt";
32 |
33 |
34 | PhysioNet_Vectorizer vec = new PhysioNet_Vectorizer( physioNetBaseDirectory, physioSchemaFilePath, physioLabelsFilePath );
35 | vec.loadSchema();
36 | vec.loadLabels();
37 | //vec.setupFileInputList(false, 0);
38 | vec.setupBalancedSubset( 40 );
39 | vec.collectStatistics();
40 | vec.schema.debugPrintDatasetStatistics();
41 |
42 |
43 | }
44 |
45 | @Test
46 | public void testExtractFolds() throws IOException {
47 |
48 |
49 | PhysioNetDataUtils.extractNFoldSubsetBalanced(true, "srcDirectory", "schemaPath", "labels_file_path", "/tmp/set-a-balanced-validate-6/");
50 |
51 | }
52 |
53 |
54 | @Test
55 | public void testExtract5Splits() throws IOException {
56 |
57 |
58 | PhysioNetDataUtils.extractEvenSplitsFromFullPhysioNet( "/tmp/set-a/", "src/test/resources/physionet_schema_zmzuv_0.txt", "/tmp/set-a-5-splits-validate-1/");
59 |
60 | }
61 |
62 |
63 | @Test
64 | public void testExtractFoldsFillPhysioNet() throws IOException {
65 |
66 |
67 | PhysioNetDataUtils.extractNFoldFromFullPhysioNet("/tmp/set-a/", "src/test/resources/physionet_schema_zmzuv_0.txt", "/tmp/set-a-full-splits-1/");
68 |
69 | }
70 |
71 |
72 | }
73 |
--------------------------------------------------------------------------------
/src/test/java/org/deeplearning4j/jp/rnn/synthetic/TestSyntheticDataIterator.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.jp.rnn.synthetic;
2 |
3 | import static org.junit.Assert.*;
4 |
5 | import java.io.IOException;
6 |
7 | import org.deeplearning4j.examples.rnn.synthetic.simple1.SyntheticDataIterator;
8 | import org.junit.Test;
9 |
10 | public class TestSyntheticDataIterator {
11 |
12 | @Test
13 | public void test() throws IOException {
14 |
15 | SyntheticDataIterator iter = new SyntheticDataIterator("src/test/resources/data/synthetic/simple/simple_ts_data", "", 40, 40);
16 |
17 | iter.next();
18 |
19 |
20 | }
21 |
22 | }
23 |
--------------------------------------------------------------------------------
/src/test/java/org/deeplearning4j/jp/rnn/synthetic/simple3/uneven/Test_Simple3_Uneven_Dataset_Iterator.java:
--------------------------------------------------------------------------------
1 | package org.deeplearning4j.jp.rnn.synthetic.simple3.uneven;
2 |
3 | import static org.junit.Assert.*;
4 |
5 | import java.io.IOException;
6 |
7 | import org.deeplearning4j.examples.rnn.synthetic.ND4JMatrixTool;
8 | import org.deeplearning4j.examples.rnn.synthetic.simple1.SyntheticDataIterator;
9 | import org.deeplearning4j.examples.rnn.synthetic.simple3.uneven.Simple3_Uneven_Dataset_Iterator;
10 | import org.junit.Test;
11 | import org.nd4j.linalg.api.ndarray.INDArray;
12 | import org.nd4j.linalg.dataset.DataSet;
13 |
14 | public class Test_Simple3_Uneven_Dataset_Iterator {
15 |
16 | @Test
17 | public void test() throws IOException {
18 |
19 | Simple3_Uneven_Dataset_Iterator iter = new Simple3_Uneven_Dataset_Iterator("src/test/resources/data/synthetic/simple_3_uneven/simple_3_uneven_data.txt", "src/test/resources/data/synthetic/simple_3_uneven/simple_3_uneven_labels.txt", 40, 40, 4 );
20 |
21 | //iter.next();
22 |
23 | DataSet t = iter.next();
24 | INDArray input = t.getFeatureMatrix();
25 | INDArray labels = t.getLabels();
26 | INDArray inputMask = t.getFeaturesMaskArray();
27 | INDArray labelsMask = t.getLabelsMaskArray();
28 |
29 | int miniBatchSize = 40;
30 | int columnCount = 1;
31 | int timestepCount = 4;
32 |
33 | System.out.println("\n\nDebug Input");
34 | ND4JMatrixTool.debug3D_Nd4J_Input(input, miniBatchSize, columnCount, timestepCount);
35 | ND4JMatrixTool.debug2D_Nd4J_Input( inputMask, miniBatchSize, timestepCount);
36 |
37 |
38 | System.out.println("\n\nDebug Labels");
39 | ND4JMatrixTool.debug3D_Nd4J_Input(labels, miniBatchSize, 2, timestepCount);
40 | ND4JMatrixTool.debug2D_Nd4J_Input( labelsMask, miniBatchSize, timestepCount);
41 |
42 |
43 |
44 | }
45 |
46 | }
47 |
--------------------------------------------------------------------------------
/src/test/resources/data/physionet/sample/set-a/135458.txt:
--------------------------------------------------------------------------------
1 | Time,Parameter,Value
2 | 00:00,RecordID,135458
3 | 00:00,Age,42
4 | 00:00,Gender,1
5 | 00:00,Height,177.8
6 | 00:00,ICUType,3
7 | 00:00,Weight,94.8
8 | 00:38,Lactate,1.1
9 | 01:42,FiO2,0.6
10 | 01:42,GCS,8
11 | 01:42,HR,76
12 | 01:42,MechVent,1
13 | 01:42,NIDiasABP,96
14 | 01:42,NIMAP,113.7
15 | 01:42,NISysABP,149
16 | 01:42,Temp,35.9
17 | 01:42,Urine,480
18 | 01:42,Weight,94.8
19 | 01:57,HR,73
20 | 01:57,NIDiasABP,82
21 | 01:57,NIMAP,100.3
22 | 01:57,NISysABP,137
23 | 01:57,Weight,94.8
24 | 02:12,HR,76
25 | 02:12,NIDiasABP,75
26 | 02:12,NIMAP,91.33
27 | 02:12,NISysABP,124
28 | 02:12,Weight,94.8
29 | 02:27,HR,78
30 | 02:27,NIDiasABP,74
31 | 02:27,NIMAP,90
32 | 02:27,NISysABP,122
33 | 02:27,Weight,94.8
34 | 02:42,HR,74
35 | 02:42,NIDiasABP,72
36 | 02:42,NIMAP,89
37 | 02:42,NISysABP,123
38 | 02:42,Urine,10
39 | 02:42,Weight,94.8
40 | 03:12,HR,78
41 | 03:12,NIDiasABP,87
42 | 03:12,NIMAP,102.3
43 | 03:12,NISysABP,133
44 | 03:12,Weight,94.8
45 | 03:42,HR,77
46 | 03:42,NIDiasABP,76
47 | 03:42,NIMAP,93.33
48 | 03:42,NISysABP,128
49 | 03:42,Urine,30
50 | 03:42,Weight,94.8
51 | 04:42,HR,75
52 | 04:42,NIDiasABP,66
53 | 04:42,NIMAP,85.33
54 | 04:42,NISysABP,124
55 | 04:42,Urine,10
56 | 04:42,Weight,94.8
57 | 05:42,HR,71
58 | 05:42,NIDiasABP,66
59 | 05:42,NIMAP,84.33
60 | 05:42,NISysABP,121
61 | 05:42,Urine,25
62 | 05:42,Weight,94.8
63 | 06:12,FiO2,0.6
64 | 06:12,MechVent,1
65 | 06:42,GCS,8
66 | 06:42,HR,76
67 | 06:42,NIDiasABP,58
68 | 06:42,NIMAP,79.67
69 | 06:42,NISysABP,123
70 | 06:42,Temp,35.6
71 | 06:42,Urine,30
72 | 06:42,Weight,94.8
73 | 07:37,BUN,11
74 | 07:37,Creatinine,0.8
75 | 07:37,Glucose,201
76 | 07:37,HCO3,24
77 | 07:37,HCT,41.2
78 | 07:37,Mg,2
79 | 07:37,Platelets,246
80 | 07:37,K,4.3
81 | 07:37,Na,142
82 | 07:37,WBC,7.4
83 | 07:42,HR,72
84 | 07:42,NIDiasABP,66
85 | 07:42,NIMAP,83.33
86 | 07:42,NISysABP,118
87 | 07:42,Urine,50
88 | 07:42,Weight,94.8
89 | 08:42,HR,72
90 | 08:42,NIDiasABP,72
91 | 08:42,NIMAP,90.67
92 | 08:42,NISysABP,128
93 | 08:42,Urine,330
94 | 08:42,Weight,94.8
95 | 09:42,HR,80
96 | 09:42,NIDiasABP,77
97 | 09:42,NIMAP,93.67
98 | 09:42,NISysABP,127
99 | 09:42,Weight,94.8
100 | 10:07,FiO2,0.6
101 | 10:07,MechVent,1
102 | 10:17,FiO2,0.5
103 | 10:42,GCS,7
104 | 10:42,HR,78
105 | 10:42,NIDiasABP,78
106 | 10:42,NIMAP,96.33
107 | 10:42,NISysABP,133
108 | 10:42,Urine,200
109 | 10:42,Weight,94.8
110 | 11:12,HR,75
111 | 11:12,NIDiasABP,73
112 | 11:12,NIMAP,92
113 | 11:12,NISysABP,130
114 | 11:12,Temp,36.2
115 | 11:12,Weight,94.8
116 | 11:42,HR,76
117 | 11:42,NIDiasABP,73
118 | 11:42,NIMAP,91.67
119 | 11:42,NISysABP,129
120 | 11:42,Urine,240
121 | 11:42,Weight,94.8
122 | 12:22,FiO2,0.5
123 | 12:22,MechVent,1
124 | 12:42,HR,78
125 | 12:42,NIDiasABP,75
126 | 12:42,NIMAP,91.67
127 | 12:42,NISysABP,125
128 | 12:42,Urine,320
129 | 12:42,Weight,94.8
130 | 13:12,HR,99
131 | 13:12,NIDiasABP,75
132 | 13:12,NIMAP,94
133 | 13:12,NISysABP,132
134 | 13:12,Weight,94.8
135 | 13:42,HR,87
136 | 13:42,NIDiasABP,76
137 | 13:42,NIMAP,96
138 | 13:42,NISysABP,136
139 | 13:42,Weight,94.8
140 | 13:52,FiO2,0.5
141 | 13:52,MechVent,1
142 | 14:42,GCS,8
143 | 14:42,HR,85
144 | 14:42,NIDiasABP,64
145 | 14:42,NIMAP,85
146 | 14:42,NISysABP,127
147 | 14:42,Temp,36.8
148 | 14:42,Urine,320
149 | 14:42,Weight,94.8
150 | 15:42,HR,73
151 | 15:42,NIDiasABP,71
152 | 15:42,NIMAP,89.33
153 | 15:42,NISysABP,126
154 | 15:42,Weight,94.8
155 | 16:42,HR,95
156 | 16:42,NIDiasABP,73
157 | 16:42,NIMAP,92
158 | 16:42,NISysABP,130
159 | 16:42,Urine,450
160 | 16:42,Weight,94.8
161 | 16:57,HR,82
162 | 16:57,NIDiasABP,65
163 | 16:57,NIMAP,86.67
164 | 16:57,NISysABP,130
165 | 16:57,Weight,94.8
166 | 17:42,HR,80
167 | 17:42,NIDiasABP,54
168 | 17:42,NIMAP,77.33
169 | 17:42,NISysABP,124
170 | 17:42,Urine,180
171 | 17:42,Weight,94.8
172 | 17:52,FiO2,0.5
173 | 17:52,MechVent,1
174 | 19:12,GCS,8
175 | 19:12,HR,86
176 | 19:12,NIDiasABP,78
177 | 19:12,NIMAP,96.67
178 | 19:12,NISysABP,134
179 | 19:12,Temp,36.7
180 | 19:12,Urine,120
181 | 19:12,Weight,94.8
182 | 19:42,HR,77
183 | 19:42,NIDiasABP,74
184 | 19:42,NIMAP,93.67
185 | 19:42,NISysABP,133
186 | 19:42,Urine,100
187 | 19:42,Weight,94.8
188 | 20:42,HR,104
189 | 20:42,MechVent,1
190 | 20:42,NIDiasABP,48
191 | 20:42,NIMAP,63.67
192 | 20:42,NISysABP,95
193 | 20:42,Urine,280
194 | 20:42,Weight,94.8
195 | 21:12,HR,94
196 | 21:12,NIDiasABP,74
197 | 21:12,NIMAP,97.33
198 | 21:12,NISysABP,144
199 | 21:12,Weight,94.8
200 | 21:27,HR,88
201 | 21:27,NIDiasABP,72
202 | 21:27,NIMAP,92.33
203 | 21:27,NISysABP,133
204 | 21:27,Weight,94.8
205 | 21:42,HR,91
206 | 21:42,Weight,94.8
207 | 22:42,FiO2,0.5
208 | 22:42,GCS,8
209 | 22:42,HR,98
210 | 22:42,MechVent,1
211 | 22:42,NIDiasABP,50
212 | 22:42,NIMAP,69.67
213 | 22:42,NISysABP,109
214 | 22:42,Urine,50
215 | 22:42,Weight,94.8
216 | 23:42,HR,88
217 | 23:42,MechVent,1
218 | 23:42,NIDiasABP,57
219 | 23:42,NIMAP,74.33
220 | 23:42,NISysABP,109
221 | 23:42,Temp,36.8
222 | 23:42,Urine,16
223 | 23:42,Weight,94.8
224 | 24:42,HR,80
225 | 24:42,NIDiasABP,77
226 | 24:42,NIMAP,97.33
227 | 24:42,NISysABP,138
228 | 24:42,Urine,60
229 | 24:42,Weight,94.8
230 | 25:42,HR,79
231 | 25:42,NIDiasABP,64
232 | 25:42,NIMAP,83.33
233 | 25:42,NISysABP,122
234 | 25:42,Urine,60
235 | 25:42,Weight,94.8
236 | 26:42,FiO2,0.5
237 | 26:42,GCS,8
238 | 26:42,HR,78
239 | 26:42,MechVent,1
240 | 26:42,NIDiasABP,59
241 | 26:42,NIMAP,79.33
242 | 26:42,NISysABP,120
243 | 26:42,Temp,35.9
244 | 26:42,Urine,60
245 | 26:42,Weight,94.8
246 | 27:42,HR,79
247 | 27:42,NIDiasABP,65
248 | 27:42,NIMAP,81.33
249 | 27:42,NISysABP,114
250 | 27:42,Urine,120
251 | 27:42,Weight,94.8
252 | 28:42,HR,67
253 | 28:42,NIDiasABP,84
254 | 28:42,NIMAP,100.7
255 | 28:42,NISysABP,134
256 | 28:42,Urine,160
257 | 28:42,Weight,94.8
258 | 29:42,HR,72
259 | 29:42,NIDiasABP,68
260 | 29:42,NIMAP,87.33
261 | 29:42,NISysABP,126
262 | 29:42,Urine,70
263 | 29:42,Weight,94.8
264 | 30:42,FiO2,0.5
265 | 30:42,GCS,8
266 | 30:42,HR,69
267 | 30:42,MechVent,1
268 | 30:42,NIDiasABP,69
269 | 30:42,NIMAP,89.33
270 | 30:42,NISysABP,130
271 | 30:42,Temp,36.3
272 | 30:42,Urine,80
273 | 30:42,Weight,94.8
274 | 31:42,HR,74
275 | 31:42,NIDiasABP,79
276 | 31:42,NIMAP,104.3
277 | 31:42,NISysABP,155
278 | 31:42,Weight,94.8
279 | 32:22,BUN,8
280 | 32:22,Creatinine,0.8
281 | 32:22,Glucose,90
282 | 32:22,HCO3,28
283 | 32:22,HCT,41
284 | 32:22,Mg,1.9
285 | 32:22,Platelets,235
286 | 32:22,K,4
287 | 32:22,Na,137
288 | 32:22,WBC,9.7
289 | 32:42,HR,73
290 | 32:42,NIDiasABP,65
291 | 32:42,NIMAP,83
292 | 32:42,NISysABP,119
293 | 32:42,Urine,240
294 | 32:42,Weight,94.8
295 | 33:42,HR,73
296 | 33:42,NIDiasABP,92
297 | 33:42,NIMAP,106.7
298 | 33:42,NISysABP,136
299 | 33:42,Urine,100
300 | 33:42,Weight,94.8
301 | 34:27,FiO2,0.5
302 | 34:27,MechVent,1
303 | 34:42,GCS,9
304 | 34:42,HR,89
305 | 34:42,NIDiasABP,59
306 | 34:42,NIMAP,84.67
307 | 34:42,NISysABP,136
308 | 34:42,Temp,37.3
309 | 34:42,Urine,360
310 | 34:42,Weight,94.8
311 | 35:42,HR,90
312 | 35:42,NIDiasABP,64
313 | 35:42,NIMAP,83
314 | 35:42,NISysABP,121
315 | 35:42,Weight,94.8
316 | 36:42,HR,86
317 | 36:42,NIDiasABP,71
318 | 36:42,NIMAP,87
319 | 36:42,NISysABP,119
320 | 36:42,Urine,280
321 | 36:42,Weight,94.8
322 | 37:22,FiO2,0.5
323 | 37:22,MechVent,1
324 | 37:42,HR,82
325 | 37:42,NIDiasABP,73
326 | 37:42,NIMAP,89.33
327 | 37:42,NISysABP,122
328 | 37:42,Temp,37.7
329 | 37:42,Urine,160
330 | 37:42,Weight,94.8
331 | 38:42,GCS,9
332 | 38:42,HR,89
333 | 38:42,NIDiasABP,62
334 | 38:42,NIMAP,82
335 | 38:42,NISysABP,122
336 | 38:42,Urine,180
337 | 38:42,Weight,94.8
338 | 38:57,FiO2,0.5
339 | 38:57,MechVent,1
340 | 39:42,HR,110
341 | 39:42,NIDiasABP,68
342 | 39:42,NIMAP,86.33
343 | 39:42,NISysABP,123
344 | 39:42,Weight,94.8
345 | 39:50,HCT,40
346 | 40:42,HR,88
347 | 40:42,NIDiasABP,55
348 | 40:42,NIMAP,74.33
349 | 40:42,NISysABP,113
350 | 40:42,Urine,380
351 | 40:42,Weight,94.8
352 | 41:22,MechVent,1
353 | 41:25,MechVent,1
354 | 41:42,HR,92
355 | 41:42,NIDiasABP,64
356 | 41:42,NIMAP,79
357 | 41:42,NISysABP,109
358 | 41:42,Urine,80
359 | 41:42,Weight,94.8
360 | 42:42,GCS,14
361 | 42:42,HR,92
362 | 42:42,NIDiasABP,70
363 | 42:42,NIMAP,88.33
364 | 42:42,NISysABP,125
365 | 42:42,Temp,37.2
366 | 42:42,Temp,0
367 | 42:42,Urine,100
368 | 42:42,Weight,94.8
369 | 42:53,FiO2,0.4
370 | 43:12,FiO2,0.7
371 | 43:12,HR,108
372 | 43:12,NIDiasABP,55
373 | 43:12,NIMAP,78.67
374 | 43:12,NISysABP,126
375 | 43:12,Weight,94.8
376 | 43:42,HR,95
377 | 43:42,NIDiasABP,73
378 | 43:42,NIMAP,93.67
379 | 43:42,NISysABP,135
380 | 43:42,Weight,94.8
381 | 44:42,HR,101
382 | 44:42,NIDiasABP,54
383 | 44:42,NIMAP,80.67
384 | 44:42,NISysABP,134
385 | 44:42,Urine,180
386 | 44:42,Weight,94.8
387 | 45:42,HR,99
388 | 45:42,NIDiasABP,58
389 | 45:42,NIMAP,81.33
390 | 45:42,NISysABP,128
391 | 45:42,Weight,94.8
392 | 46:42,GCS,14
393 | 46:42,HR,88
394 | 46:42,MechVent,1
395 | 46:42,NIDiasABP,61
396 | 46:42,NIMAP,82.67
397 | 46:42,NISysABP,126
398 | 46:42,Temp,37.2
399 | 46:42,Urine,80
400 | 46:42,Weight,94.8
401 | 47:42,HR,87
402 | 47:42,NIDiasABP,59
403 | 47:42,NIMAP,83
404 | 47:42,NISysABP,131
405 | 47:42,Weight,94.8
--------------------------------------------------------------------------------
/src/test/resources/data/physionet/sample/set-a/135476.txt:
--------------------------------------------------------------------------------
1 | Time,Parameter,Value
2 | 00:00,RecordID,135476
3 | 00:00,Age,54
4 | 00:00,Gender,1
5 | 00:00,Height,175.3
6 | 00:00,ICUType,2
7 | 00:00,Weight,100
8 | 00:16,pH,7.43
9 | 00:16,PaCO2,43
10 | 00:16,PaO2,294
11 | 01:39,pH,7.45
12 | 01:39,PaCO2,36
13 | 01:39,PaO2,325
14 | 02:21,pH,7.36
15 | 02:21,PaCO2,46
16 | 02:21,PaO2,211
17 | 02:40,FiO2,1
18 | 02:40,MechVent,1
19 | 03:10,DiasABP,56
20 | 03:10,GCS,3
21 | 03:10,HR,76
22 | 03:10,MAP,69
23 | 03:10,SysABP,104
24 | 03:10,Urine,550
25 | 03:10,Urine,100
26 | 03:15,DiasABP,54
27 | 03:15,HR,78
28 | 03:15,MAP,66
29 | 03:15,SysABP,100
30 | 03:15,Temp,36.3
31 | 03:20,BUN,16
32 | 03:20,Creatinine,0.9
33 | 03:20,HCT,28.4
34 | 03:20,Mg,1.4
35 | 03:20,Platelets,155
36 | 03:20,WBC,14.2
37 | 03:25,DiasABP,68
38 | 03:25,HR,87
39 | 03:25,MAP,82
40 | 03:25,SysABP,116
41 | 03:25,Temp,36.3
42 | 03:33,pH,7.43
43 | 03:33,PaCO2,38
44 | 03:33,PaO2,334
45 | 03:40,DiasABP,64
46 | 03:40,HR,85
47 | 03:40,MAP,75
48 | 03:40,SysABP,108
49 | 03:40,Temp,36.5
50 | 03:40,Urine,0
51 | 03:40,Urine,300
52 | 03:55,DiasABP,71
53 | 03:55,HR,90
54 | 03:55,MAP,87
55 | 03:55,SysABP,120
56 | 03:55,Temp,36.5
57 | 04:08,FiO2,0.5
58 | 04:10,DiasABP,79
59 | 04:10,HR,97
60 | 04:10,MAP,93
61 | 04:10,SysABP,124
62 | 04:10,Temp,36.7
63 | 04:10,Urine,400
64 | 04:25,DiasABP,79
65 | 04:25,HR,95
66 | 04:25,MAP,95
67 | 04:25,SysABP,129
68 | 04:25,Temp,36.7
69 | 04:40,DiasABP,84
70 | 04:40,HR,93
71 | 04:40,MAP,102
72 | 04:40,SysABP,133
73 | 04:40,Temp,36.7
74 | 04:40,Urine,400
75 | 04:55,DiasABP,86
76 | 04:55,HR,97
77 | 04:55,MAP,103
78 | 04:55,SysABP,132
79 | 04:55,Temp,37
80 | 05:10,DiasABP,76
81 | 05:10,HR,103
82 | 05:10,MAP,88
83 | 05:10,SysABP,120
84 | 05:10,Temp,37
85 | 05:25,DiasABP,73
86 | 05:25,HR,100
87 | 05:25,MAP,89
88 | 05:25,SysABP,131
89 | 05:25,Temp,37
90 | 05:40,DiasABP,65
91 | 05:40,GCS,7
92 | 05:40,HR,106
93 | 05:40,MAP,77
94 | 05:40,SysABP,105
95 | 05:40,Temp,37.2
96 | 05:40,Urine,800
97 | 05:52,FiO2,0.5
98 | 05:52,MechVent,1
99 | 05:55,DiasABP,68
100 | 05:55,HR,114
101 | 05:55,MAP,82
102 | 05:55,SysABP,115
103 | 05:55,Temp,37.3
104 | 06:10,DiasABP,83
105 | 06:10,HR,122
106 | 06:10,MAP,97
107 | 06:10,SysABP,132
108 | 06:10,Temp,37.7
109 | 06:25,DiasABP,82
110 | 06:25,HR,119
111 | 06:25,MAP,94
112 | 06:25,SysABP,131
113 | 06:25,Temp,38.2
114 | 06:40,DiasABP,80
115 | 06:40,HR,114
116 | 06:40,MAP,90
117 | 06:40,SysABP,123
118 | 06:40,Temp,38.6
119 | 06:40,Urine,120
120 | 06:55,DiasABP,68
121 | 06:55,HR,111
122 | 06:55,MAP,78
123 | 06:55,SysABP,104
124 | 06:55,Temp,38.8
125 | 07:10,DiasABP,68
126 | 07:10,HR,111
127 | 07:10,MAP,76
128 | 07:10,SysABP,102
129 | 07:10,Temp,38.9
130 | 07:11,pH,7.33
131 | 07:11,Glucose,139
132 | 07:11,HCT,38.8
133 | 07:11,Platelets,187
134 | 07:11,K,4.8
135 | 07:11,PaCO2,49
136 | 07:11,PaO2,148
137 | 07:11,WBC,20.3
138 | 07:25,DiasABP,74
139 | 07:25,FiO2,0.5
140 | 07:25,HR,109
141 | 07:25,MAP,85
142 | 07:25,MechVent,1
143 | 07:25,SysABP,117
144 | 07:25,Temp,38.9
145 | 07:40,DiasABP,74
146 | 07:40,GCS,11
147 | 07:40,HR,106
148 | 07:40,MAP,84
149 | 07:40,SysABP,113
150 | 07:40,Temp,38.9
151 | 07:40,Urine,120
152 | 07:55,DiasABP,68
153 | 07:55,HR,109
154 | 07:55,MAP,84
155 | 07:55,SysABP,121
156 | 07:55,Temp,38.9
157 | 08:10,DiasABP,66
158 | 08:10,HR,106
159 | 08:10,MAP,77
160 | 08:10,SysABP,109
161 | 08:10,Temp,39
162 | 08:25,DiasABP,66
163 | 08:25,HR,107
164 | 08:25,MAP,77
165 | 08:25,SysABP,109
166 | 08:25,Temp,39
167 | 08:40,DiasABP,63
168 | 08:40,HR,104
169 | 08:40,MAP,74
170 | 08:40,SysABP,104
171 | 08:40,Temp,39
172 | 08:40,Urine,60
173 | 09:40,DiasABP,61
174 | 09:40,FiO2,0.5
175 | 09:40,GCS,10
176 | 09:40,HR,106
177 | 09:40,MAP,71
178 | 09:40,MechVent,1
179 | 09:40,SysABP,101
180 | 09:40,Temp,39
181 | 09:40,Urine,45
182 | 10:40,DiasABP,55
183 | 10:40,HR,107
184 | 10:40,MAP,65
185 | 10:40,SysABP,89
186 | 10:40,Temp,39
187 | 10:40,Urine,45
188 | 11:40,DiasABP,56
189 | 11:40,FiO2,0.5
190 | 11:40,GCS,10
191 | 11:40,HR,107
192 | 11:40,MAP,65
193 | 11:40,MechVent,1
194 | 11:40,SysABP,86
195 | 11:40,Temp,38.9
196 | 11:40,Urine,45
197 | 12:10,DiasABP,59
198 | 12:10,HR,107
199 | 12:10,MAP,67
200 | 12:10,SysABP,82
201 | 12:10,Temp,39
202 | 12:40,DiasABP,64
203 | 12:40,HR,103
204 | 12:40,MAP,73
205 | 12:40,SysABP,93
206 | 12:40,Temp,38.9
207 | 12:40,Urine,25
208 | 13:10,DiasABP,63
209 | 13:10,HR,104
210 | 13:10,MAP,70
211 | 13:10,SysABP,83
212 | 13:10,Temp,38.7
213 | 13:19,HCT,34.8
214 | 13:29,pH,7.4
215 | 13:29,PaCO2,48
216 | 13:29,PaO2,125
217 | 13:40,DiasABP,56
218 | 13:40,FiO2,0.5
219 | 13:40,GCS,15
220 | 13:40,HR,107
221 | 13:40,MAP,67
222 | 13:40,MechVent,1
223 | 13:40,SysABP,93
224 | 13:40,Temp,38.6
225 | 13:40,Urine,32
226 | 14:40,DiasABP,52
227 | 14:40,HR,100
228 | 14:40,MAP,63
229 | 14:40,SysABP,90
230 | 14:40,Temp,38.3
231 | 14:40,Urine,25
232 | 15:40,DiasABP,50
233 | 15:40,HR,96
234 | 15:40,MAP,61
235 | 15:40,SysABP,86
236 | 15:40,Temp,37.9
237 | 15:40,Urine,27
238 | 16:40,DiasABP,48
239 | 16:40,FiO2,0.4
240 | 16:40,HR,97
241 | 16:40,MAP,60
242 | 16:40,MechVent,1
243 | 16:40,SysABP,85
244 | 16:40,Temp,38
245 | 16:40,Urine,25
246 | 17:11,BUN,21
247 | 17:11,Creatinine,1.4
248 | 17:11,Glucose,109
249 | 17:11,HCO3,27
250 | 17:11,HCT,34.4
251 | 17:11,Mg,1.4
252 | 17:11,Platelets,161
253 | 17:11,K,4.5
254 | 17:11,Na,139
255 | 17:11,WBC,15.8
256 | 17:40,DiasABP,50
257 | 17:40,GCS,4
258 | 17:40,HR,94
259 | 17:40,MAP,61
260 | 17:40,SysABP,86
261 | 17:40,Temp,37.9
262 | 17:40,Urine,15
263 | 17:54,pH,7.36
264 | 17:54,PaCO2,50
265 | 17:54,PaO2,97
266 | 18:40,DiasABP,53
267 | 18:40,HR,98
268 | 18:40,MAP,65
269 | 18:40,MechVent,1
270 | 18:40,SysABP,93
271 | 18:40,Temp,38.1
272 | 18:40,Urine,30
273 | 18:40,Weight,105.5
274 | 19:40,DiasABP,60
275 | 19:40,FiO2,0.4
276 | 19:40,HR,87
277 | 19:40,MAP,76
278 | 19:40,MechVent,1
279 | 19:40,SysABP,105
280 | 19:40,Temp,37.8
281 | 19:40,Urine,25
282 | 19:40,Weight,105.5
283 | 20:40,DiasABP,66
284 | 20:40,HR,87
285 | 20:40,MAP,84
286 | 20:40,SysABP,117
287 | 20:40,Temp,37.8
288 | 20:40,Urine,25
289 | 20:40,Weight,105.5
290 | 21:40,DiasABP,59
291 | 21:40,FiO2,0.4
292 | 21:40,GCS,9
293 | 21:40,HR,105
294 | 21:40,MAP,73
295 | 21:40,MechVent,1
296 | 21:40,SysABP,104
297 | 21:40,Temp,38.3
298 | 21:40,Urine,40
299 | 21:40,Weight,105.5
300 | 21:55,DiasABP,52
301 | 21:55,HR,112
302 | 21:55,MAP,67
303 | 21:55,SysABP,98
304 | 21:55,Temp,38.3
305 | 21:55,Weight,105.5
306 | 22:10,DiasABP,57
307 | 22:10,HR,111
308 | 22:10,MAP,71
309 | 22:10,SysABP,97
310 | 22:10,Temp,38.2
311 | 22:10,Weight,105.5
312 | 22:25,DiasABP,62
313 | 22:25,HR,108
314 | 22:25,MAP,79
315 | 22:25,SysABP,109
316 | 22:25,Temp,38.2
317 | 22:25,Weight,105.5
318 | 22:40,DiasABP,56
319 | 22:40,HR,105
320 | 22:40,MAP,68
321 | 22:40,SysABP,92
322 | 22:40,Temp,38
323 | 22:40,Urine,40
324 | 22:40,Weight,105.5
325 | 22:55,DiasABP,55
326 | 22:55,HR,106
327 | 22:55,MAP,67
328 | 22:55,SysABP,93
329 | 22:55,Temp,38
330 | 22:55,Weight,105.5
331 | 23:10,DiasABP,58
332 | 23:10,HR,109
333 | 23:10,MAP,73
334 | 23:10,SysABP,104
335 | 23:10,Temp,38
336 | 23:10,Weight,105.5
337 | 23:30,pH,7.39
338 | 23:30,PaCO2,38
339 | 23:30,PaO2,79
340 | 23:30,SaO2,95
341 | 23:40,DiasABP,61
342 | 23:40,HR,105
343 | 23:40,MAP,76
344 | 23:40,SysABP,110
345 | 23:40,Temp,37.8
346 | 23:40,Urine,35
347 | 23:40,Weight,105.5
348 | 23:51,FiO2,0.6
349 | 24:10,DiasABP,66
350 | 24:10,HR,106
351 | 24:10,MAP,81
352 | 24:10,SysABP,115
353 | 24:10,Temp,37.8
354 | 24:10,Weight,105.5
355 | 24:40,DiasABP,66
356 | 24:40,HR,108
357 | 24:40,MAP,82
358 | 24:40,SysABP,115
359 | 24:40,Temp,37.8
360 | 24:40,Urine,45
361 | 24:40,Weight,105.5
362 | 25:40,DiasABP,63
363 | 25:40,GCS,15
364 | 25:40,HR,112
365 | 25:40,MAP,78
366 | 25:40,SysABP,106
367 | 25:40,Temp,37.7
368 | 25:40,Urine,50
369 | 25:40,Weight,105.5
370 | 26:35,pH,7.37
371 | 26:35,PaCO2,45
372 | 26:35,PaO2,82
373 | 26:40,DiasABP,59
374 | 26:40,HR,113
375 | 26:40,MAP,73
376 | 26:40,SysABP,103
377 | 26:40,Temp,37.8
378 | 26:40,Urine,40
379 | 26:40,Weight,105.5
380 | 27:40,DiasABP,65
381 | 27:40,FiO2,0.5
382 | 27:40,HR,109
383 | 27:40,MAP,77
384 | 27:40,MechVent,1
385 | 27:40,SysABP,104
386 | 27:40,Urine,70
387 | 27:40,Weight,105.5
388 | 28:40,DiasABP,67
389 | 28:40,HR,104
390 | 28:40,MAP,81
391 | 28:40,SysABP,113
392 | 28:40,Urine,50
393 | 28:40,Weight,105.5
394 | 29:40,DiasABP,63
395 | 29:40,GCS,15
396 | 29:40,HR,101
397 | 29:40,MAP,77
398 | 29:40,SysABP,114
399 | 29:40,Urine,60
400 | 29:40,Weight,105.5
401 | 30:40,DiasABP,67
402 | 30:40,HR,107
403 | 30:40,MAP,80
404 | 30:40,SysABP,110
405 | 30:40,Weight,105.5
406 | 31:40,DiasABP,62
407 | 31:40,FiO2,0.4
408 | 31:40,HR,102
409 | 31:40,MAP,74
410 | 31:40,MechVent,1
411 | 31:40,SysABP,108
412 | 31:40,Urine,120
413 | 31:40,Weight,105.5
414 | 32:40,DiasABP,61
415 | 32:40,HR,102
416 | 32:40,MAP,74
417 | 32:40,SysABP,108
418 | 32:40,Weight,105.5
419 | 33:40,DiasABP,61
420 | 33:40,FiO2,0.4
421 | 33:40,GCS,15
422 | 33:40,HR,99
423 | 33:40,MAP,74
424 | 33:40,MechVent,1
425 | 33:40,SysABP,104
426 | 33:40,Urine,110
427 | 33:40,Weight,105.5
428 | 34:40,DiasABP,59
429 | 34:40,FiO2,0.5
430 | 34:40,HR,97
431 | 34:40,MAP,70
432 | 34:40,MechVent,1
433 | 34:40,SysABP,93
434 | 34:40,Temp,37.6
435 | 34:40,Urine,50
436 | 34:40,Weight,105.5
437 | 35:40,DiasABP,59
438 | 35:40,HR,94
439 | 35:40,MAP,70
440 | 35:40,SysABP,95
441 | 35:40,Urine,25
442 | 35:40,Weight,105.5
443 | 36:40,DiasABP,54
444 | 36:40,HR,92
445 | 36:40,MAP,66
446 | 36:40,SysABP,92
447 | 36:40,Urine,45
448 | 36:40,Weight,105.5
449 | 37:40,DiasABP,58
450 | 37:40,FiO2,0.7
451 | 37:40,HR,93
452 | 37:40,MAP,74
453 | 37:40,MechVent,1
454 | 37:40,SysABP,117
455 | 37:40,Weight,105.5
456 | 38:40,DiasABP,57
457 | 38:40,HR,100
458 | 38:40,MAP,74
459 | 38:40,SysABP,118
460 | 38:40,Urine,80
461 | 38:40,Weight,105.5
462 | 39:40,DiasABP,56
463 | 39:40,HR,100
464 | 39:40,MAP,70
465 | 39:40,SysABP,104
466 | 39:40,Urine,70
467 | 39:40,Weight,105.5
468 | 40:40,DiasABP,59
469 | 40:40,HR,100
470 | 40:40,MAP,71
471 | 40:40,SysABP,97
472 | 40:40,Urine,35
473 | 40:40,Weight,105.5
474 | 41:12,pH,7.4
475 | 41:12,PaCO2,49
476 | 41:12,PaO2,92
477 | 41:20,BUN,25
478 | 41:20,Creatinine,1.2
479 | 41:20,Glucose,122
480 | 41:20,HCO3,27
481 | 41:20,HCT,26.6
482 | 41:20,Mg,1.5
483 | 41:20,Platelets,108
484 | 41:20,K,4.1
485 | 41:20,Na,138
486 | 41:20,WBC,11.1
487 | 41:40,DiasABP,55
488 | 41:40,HR,99
489 | 41:40,MAP,68
490 | 41:40,SysABP,97
491 | 41:40,Weight,105.5
492 | 42:40,DiasABP,58
493 | 42:40,HR,102
494 | 42:40,MAP,74
495 | 42:40,SysABP,112
496 | 42:40,Weight,104.5
497 | 43:40,DiasABP,58
498 | 43:40,FiO2,0.7
499 | 43:40,GCS,15
500 | 43:40,HR,104
501 | 43:40,MAP,94
502 | 43:40,MechVent,1
503 | 43:40,SysABP,115
504 | 43:40,Temp,38.1
505 | 43:40,Urine,140
506 | 43:40,Weight,104.5
507 | 44:23,pH,7.37
508 | 44:23,PaCO2,43
509 | 44:23,PaO2,154
510 | 44:40,DiasABP,52
511 | 44:40,HR,106
512 | 44:40,MAP,65
513 | 44:40,NIDiasABP,48
514 | 44:40,NIMAP,70
515 | 44:40,NISysABP,114
516 | 44:40,SysABP,102
517 | 44:40,Urine,50
518 | 44:40,Weight,104.5
519 | 45:40,DiasABP,51
520 | 45:40,FiO2,0.7
521 | 45:40,GCS,15
522 | 45:40,HR,101
523 | 45:40,MAP,68
524 | 45:40,MechVent,1
525 | 45:40,NIDiasABP,46
526 | 45:40,NIMAP,68
527 | 45:40,NISysABP,112
528 | 45:40,SysABP,105
529 | 45:40,Temp,37.9
530 | 45:40,Urine,80
531 | 45:40,Weight,104.5
532 | 46:40,DiasABP,53
533 | 46:40,HR,102
534 | 46:40,MAP,70
535 | 46:40,NIDiasABP,53
536 | 46:40,NIMAP,74.67
537 | 46:40,NISysABP,118
538 | 46:40,SysABP,107
539 | 46:40,Urine,35
540 | 46:40,Weight,104.5
541 | 47:19,pH,7.4
542 | 47:19,PaCO2,42
543 | 47:19,PaO2,69
544 | 47:19,SaO2,95
545 | 47:40,DiasABP,58
546 | 47:40,FiO2,0.7
547 | 47:40,HR,101
548 | 47:40,MAP,74
549 | 47:40,SysABP,116
550 | 47:40,Urine,370
551 | 47:40,Weight,104.5
552 |
--------------------------------------------------------------------------------
/src/test/resources/data/physionet/sample/set-a/136434.txt:
--------------------------------------------------------------------------------
1 | Time,Parameter,Value
2 | 00:00,RecordID,136434
3 | 00:00,Age,60
4 | 00:00,Gender,0
5 | 00:00,Height,162.6
6 | 00:00,ICUType,3
7 | 00:00,Weight,90.6
8 | 00:07,DiasABP,0
9 | 00:07,HR,70
10 | 00:07,MAP,265
11 | 00:07,NIDiasABP,40
12 | 00:07,NIMAP,53.33
13 | 00:07,NISysABP,80
14 | 00:07,SysABP,0
15 | 00:07,Temp,36.4
16 | 00:17,DiasABP,0
17 | 00:17,HR,71
18 | 00:17,NIDiasABP,26
19 | 00:17,NIMAP,43.33
20 | 00:17,NISysABP,78
21 | 00:17,SysABP,0
22 | 00:22,DiasABP,0
23 | 00:22,HR,67
24 | 00:22,NIDiasABP,32
25 | 00:22,NIMAP,46.67
26 | 00:22,NISysABP,76
27 | 00:22,SysABP,0
28 | 00:22,Weight,90.6
29 | 00:27,DiasABP,0
30 | 00:27,HR,68
31 | 00:27,NIDiasABP,43
32 | 00:27,NIMAP,57
33 | 00:27,NISysABP,85
34 | 00:27,SysABP,0
35 | 00:27,Weight,90.6
36 | 00:32,DiasABP,0
37 | 00:32,HR,70
38 | 00:32,MAP,0
39 | 00:32,NIDiasABP,29
40 | 00:32,NIMAP,48.33
41 | 00:32,NISysABP,87
42 | 00:32,SysABP,0
43 | 00:32,Weight,90.6
44 | 01:07,DiasABP,61
45 | 01:07,HR,78
46 | 01:07,MAP,85
47 | 01:07,NIDiasABP,39
48 | 01:07,NIMAP,54
49 | 01:07,NISysABP,84
50 | 01:07,SysABP,119
51 | 01:07,Weight,90.6
52 | 01:37,DiasABP,61
53 | 01:37,HR,83
54 | 01:37,MAP,82
55 | 01:37,SysABP,114
56 | 01:37,Temp,36
57 | 01:37,Weight,90.6
58 | 02:07,pH,7.46
59 | 02:07,DiasABP,58
60 | 02:07,FiO2,1
61 | 02:07,GCS,15
62 | 02:07,HR,77
63 | 02:07,MAP,77
64 | 02:07,PaCO2,48
65 | 02:07,PaO2,70
66 | 02:07,SysABP,111
67 | 02:07,SaO2,98
68 | 02:07,Temp,36
69 | 02:07,Urine,10
70 | 02:07,Weight,90.6
71 | 02:37,DiasABP,55
72 | 02:37,HR,66
73 | 02:37,MAP,72
74 | 02:37,SysABP,102
75 | 02:37,Temp,36.1
76 | 02:37,Weight,90.6
77 | 02:52,BUN,84
78 | 02:52,Creatinine,3.4
79 | 02:52,Glucose,210
80 | 02:52,HCO3,31
81 | 02:52,HCT,28.6
82 | 02:52,Mg,3.1
83 | 02:52,Platelets,42
84 | 02:52,K,3.8
85 | 02:52,Na,143
86 | 02:52,WBC,11.4
87 | 03:07,DiasABP,55
88 | 03:07,HR,69
89 | 03:07,Lactate,3.3
90 | 03:07,MAP,71
91 | 03:07,SysABP,103
92 | 03:07,Temp,36.1
93 | 03:07,Weight,90.6
94 | 04:07,DiasABP,54
95 | 04:07,FiO2,1
96 | 04:07,GCS,14
97 | 04:07,HR,67
98 | 04:07,MAP,69
99 | 04:07,SysABP,96
100 | 04:07,Temp,36.1
101 | 04:07,Weight,90.6
102 | 04:17,DiasABP,55
103 | 04:17,HR,68
104 | 04:17,MAP,71
105 | 04:17,SysABP,96
106 | 04:17,Temp,36.1
107 | 04:17,Weight,90.6
108 | 04:37,DiasABP,53
109 | 04:37,HR,68
110 | 04:37,MAP,67
111 | 04:37,SysABP,93
112 | 04:37,Temp,36.1
113 | 04:37,Weight,90.6
114 | 04:47,DiasABP,49
115 | 04:47,HR,67
116 | 04:47,MAP,60
117 | 04:47,SysABP,83
118 | 04:47,Temp,36.1
119 | 04:47,Weight,90.6
120 | 04:52,DiasABP,43
121 | 04:52,HR,67
122 | 04:52,SysABP,76
123 | 04:52,Temp,36.1
124 | 04:52,Weight,90.6
125 | 04:55,pH,7.4
126 | 04:55,PaCO2,57
127 | 04:55,PaO2,100
128 | 05:07,DiasABP,49
129 | 05:07,HR,68
130 | 05:07,MAP,62
131 | 05:07,SysABP,89
132 | 05:07,Temp,36.1
133 | 05:07,Weight,90.6
134 | 05:12,DiasABP,53
135 | 05:12,HR,68
136 | 05:12,MAP,67
137 | 05:12,NIDiasABP,37
138 | 05:12,NIMAP,54
139 | 05:12,NISysABP,88
140 | 05:12,SysABP,96
141 | 05:12,Temp,36.1
142 | 05:12,Weight,90.6
143 | 05:37,DiasABP,45
144 | 05:37,HR,68
145 | 05:37,MAP,56
146 | 05:37,SysABP,80
147 | 05:37,Temp,36
148 | 05:37,Weight,90.6
149 | 05:55,Lactate,2.9
150 | 06:07,DiasABP,47
151 | 06:07,HR,68
152 | 06:07,MAP,58
153 | 06:07,SysABP,84
154 | 06:07,Temp,36
155 | 06:07,Weight,90.6
156 | 06:37,DiasABP,52
157 | 06:37,HR,67
158 | 06:37,MAP,65
159 | 06:37,SysABP,96
160 | 06:37,Temp,36
161 | 06:37,Weight,90.6
162 | 07:07,DiasABP,51
163 | 07:07,HR,68
164 | 07:07,MAP,64
165 | 07:07,SysABP,93
166 | 07:07,Temp,36
167 | 07:07,Urine,5
168 | 07:07,Weight,90.6
169 | 08:07,DiasABP,51
170 | 08:07,FiO2,1
171 | 08:07,GCS,14
172 | 08:07,HR,65
173 | 08:07,MAP,68
174 | 08:07,SysABP,101
175 | 08:07,Temp,36
176 | 08:07,Weight,90.6
177 | 08:11,pH,7.41
178 | 08:11,PaCO2,54
179 | 08:11,PaO2,87
180 | 08:11,SaO2,98
181 | 08:12,DiasABP,51
182 | 08:12,HR,65
183 | 08:12,MAP,66
184 | 08:12,SysABP,99
185 | 08:12,Temp,36
186 | 08:12,Weight,90.6
187 | 09:07,ALP,81
188 | 09:07,ALT,39
189 | 09:07,AST,79
190 | 09:07,Bilirubin,33.5
191 | 09:07,BUN,89
192 | 09:07,Creatinine,3.6
193 | 09:07,DiasABP,50
194 | 09:07,Glucose,167
195 | 09:07,HCO3,32
196 | 09:07,HCT,29.5
197 | 09:07,HR,63
198 | 09:07,Mg,3
199 | 09:07,MAP,64
200 | 09:07,Platelets,46
201 | 09:07,K,3.9
202 | 09:07,Na,142
203 | 09:07,SysABP,96
204 | 09:07,Temp,35.9
205 | 09:07,WBC,10.1
206 | 09:07,Weight,90.6
207 | 09:11,Lactate,2.9
208 | 10:07,DiasABP,55
209 | 10:07,FiO2,0.8
210 | 10:07,HR,64
211 | 10:07,MAP,71
212 | 10:07,SysABP,107
213 | 10:07,Temp,35.9
214 | 10:07,Urine,0
215 | 10:07,Weight,90.6
216 | 11:04,pH,7.42
217 | 11:04,PaCO2,53
218 | 11:04,PaO2,74
219 | 11:04,SaO2,96
220 | 11:07,DiasABP,49
221 | 11:07,FiO2,0.8
222 | 11:07,HR,62
223 | 11:07,MAP,63
224 | 11:07,SysABP,97
225 | 11:07,Temp,35.8
226 | 11:07,Weight,90.6
227 | 12:04,Lactate,2.5
228 | 12:07,DiasABP,49
229 | 12:07,FiO2,0.8
230 | 12:07,GCS,14
231 | 12:07,HR,61
232 | 12:07,MAP,64
233 | 12:07,SysABP,96
234 | 12:07,Temp,35.7
235 | 12:07,Weight,90.6
236 | 13:07,DiasABP,56
237 | 13:07,FiO2,0.8
238 | 13:07,HR,66
239 | 13:07,MAP,75
240 | 13:07,SysABP,109
241 | 13:07,Temp,35.6
242 | 13:07,Urine,10
243 | 13:07,Weight,90.6
244 | 14:07,DiasABP,57
245 | 14:07,FiO2,0.8
246 | 14:07,HR,58
247 | 14:07,MAP,77
248 | 14:07,SysABP,112
249 | 14:07,Temp,35.6
250 | 14:07,Weight,90.6
251 | 15:07,DiasABP,51
252 | 15:07,FiO2,0.7
253 | 15:07,HR,56
254 | 15:07,MAP,66
255 | 15:07,SysABP,96
256 | 15:07,Temp,35.6
257 | 15:07,Urine,8
258 | 15:07,Weight,90.6
259 | 15:37,pH,7.43
260 | 15:37,PaCO2,51
261 | 15:37,PaO2,68
262 | 15:37,SaO2,96
263 | 16:07,DiasABP,54
264 | 16:07,FiO2,0.7
265 | 16:07,GCS,14
266 | 16:07,HR,55
267 | 16:07,MAP,72
268 | 16:07,SysABP,102
269 | 16:07,Temp,35.6
270 | 16:07,Urine,5
271 | 16:07,Weight,90.6
272 | 16:27,HCT,35
273 | 17:07,DiasABP,57
274 | 17:07,FiO2,0.7
275 | 17:07,HR,56
276 | 17:07,MAP,75
277 | 17:07,SysABP,109
278 | 17:07,Temp,35.5
279 | 17:07,Urine,0
280 | 17:07,Weight,90.6
281 | 18:07,DiasABP,53
282 | 18:07,FiO2,0.7
283 | 18:07,HR,57
284 | 18:07,MAP,69
285 | 18:07,SysABP,100
286 | 18:07,Temp,35.6
287 | 18:07,Urine,0
288 | 18:07,Weight,90.6
289 | 19:07,DiasABP,54
290 | 19:07,FiO2,0.7
291 | 19:07,HR,59
292 | 19:07,MAP,71
293 | 19:07,SysABP,106
294 | 19:07,Temp,35.9
295 | 19:07,Urine,0
296 | 19:07,Weight,90.6
297 | 20:07,DiasABP,49
298 | 20:07,FiO2,0.7
299 | 20:07,GCS,14
300 | 20:07,HR,57
301 | 20:07,MAP,64
302 | 20:07,SysABP,95
303 | 20:07,Temp,36
304 | 20:07,Urine,0
305 | 20:07,Weight,90.6
306 | 20:37,DiasABP,51
307 | 20:37,HR,61
308 | 20:37,MAP,67
309 | 20:37,SysABP,100
310 | 20:37,Temp,35.9
311 | 20:37,Weight,90.6
312 | 20:48,pH,7.39
313 | 20:48,PaCO2,56
314 | 20:48,PaO2,68
315 | 20:48,SaO2,94
316 | 21:07,DiasABP,58
317 | 21:07,HR,67
318 | 21:07,MAP,77
319 | 21:07,SysABP,112
320 | 21:07,Temp,35.9
321 | 21:07,Urine,0
322 | 21:07,Weight,90.6
323 | 21:27,HCT,33.5
324 | 22:07,DiasABP,52
325 | 22:07,HR,60
326 | 22:07,MAP,68
327 | 22:07,SysABP,97
328 | 22:07,Temp,36
329 | 22:07,Urine,0
330 | 22:07,Weight,90.6
331 | 23:07,DiasABP,53
332 | 23:07,HR,63
333 | 23:07,MAP,70
334 | 23:07,SysABP,101
335 | 23:07,Temp,36.2
336 | 23:07,Urine,0
337 | 23:07,Weight,90.6
338 | 24:07,DiasABP,51
339 | 24:07,GCS,14
340 | 24:07,HR,64
341 | 24:07,MAP,67
342 | 24:07,SysABP,97
343 | 24:07,Temp,36.4
344 | 24:07,Urine,0
345 | 24:07,Weight,90.6
346 | 25:07,DiasABP,50
347 | 25:07,HR,74
348 | 25:07,MAP,65
349 | 25:07,SysABP,93
350 | 25:07,Temp,36.6
351 | 25:07,Urine,0
352 | 25:07,Weight,90.6
353 | 26:07,DiasABP,52
354 | 26:07,HR,77
355 | 26:07,MAP,68
356 | 26:07,SysABP,96
357 | 26:07,Temp,36.7
358 | 26:07,Urine,0
359 | 26:07,Weight,90.6
360 | 27:07,DiasABP,46
361 | 27:07,HR,76
362 | 27:07,MAP,59
363 | 27:07,SysABP,83
364 | 27:07,Temp,36.9
365 | 27:07,Urine,0
366 | 27:07,Weight,90.6
367 | 28:07,DiasABP,49
368 | 28:07,GCS,14
369 | 28:07,HR,82
370 | 28:07,MAP,64
371 | 28:07,SysABP,96
372 | 28:07,Temp,37
373 | 28:07,Urine,0
374 | 28:07,Weight,90.6
375 | 29:07,DiasABP,49
376 | 29:07,HR,83
377 | 29:07,MAP,61
378 | 29:07,SysABP,93
379 | 29:07,Temp,37.1
380 | 29:07,Urine,0
381 | 29:07,Weight,90.6
382 | 30:07,DiasABP,47
383 | 30:07,HR,85
384 | 30:07,MAP,60
385 | 30:07,SysABP,83
386 | 30:07,Temp,37.3
387 | 30:07,Urine,0
388 | 30:07,Weight,90.6
389 | 31:07,DiasABP,46
390 | 31:07,HR,91
391 | 31:07,MAP,64
392 | 31:07,SysABP,97
393 | 31:07,Temp,37.4
394 | 31:07,Urine,0
395 | 31:07,Weight,90.6
396 | 32:07,DiasABP,48
397 | 32:07,GCS,14
398 | 32:07,HR,89
399 | 32:07,MAP,61
400 | 32:07,SysABP,91
401 | 32:07,Temp,37.6
402 | 32:07,Urine,0
403 | 32:07,Weight,90.6
404 | 33:07,DiasABP,47
405 | 33:07,HR,84
406 | 33:07,MAP,60
407 | 33:07,SysABP,84
408 | 33:07,Temp,37.6
409 | 33:07,Urine,0
410 | 33:07,Weight,90.6
411 | 34:07,DiasABP,46
412 | 34:07,HR,84
413 | 34:07,MAP,61
414 | 34:07,SysABP,95
415 | 34:07,Temp,37.7
416 | 34:07,Urine,0
417 | 34:07,Weight,90.6
418 | 34:38,pH,7.48
419 | 34:38,PaCO2,37
420 | 34:38,PaO2,80
421 | 34:38,SaO2,97
422 | 35:07,DiasABP,44
423 | 35:07,HR,78
424 | 35:07,MAP,56
425 | 35:07,NIDiasABP,40
426 | 35:07,NIMAP,62.67
427 | 35:07,NISysABP,108
428 | 35:07,SysABP,77
429 | 35:07,Temp,37.4
430 | 35:07,Urine,0
431 | 35:07,Weight,90.6
432 | 35:37,Bilirubin,37.3
433 | 35:37,BUN,105
434 | 35:37,Creatinine,3.6
435 | 35:37,Glucose,60
436 | 35:37,HCO3,26
437 | 35:37,HCT,33.3
438 | 35:37,Mg,3
439 | 35:37,Platelets,71
440 | 35:37,K,4.9
441 | 35:37,Na,142
442 | 35:37,WBC,16.2
443 | 36:07,DiasABP,50
444 | 36:07,FiO2,0.7
445 | 36:07,GCS,14
446 | 36:07,HR,77
447 | 36:07,MAP,63
448 | 36:07,NIDiasABP,49
449 | 36:07,NIMAP,68.67
450 | 36:07,NISysABP,108
451 | 36:07,SysABP,88
452 | 36:07,Temp,37.2
453 | 36:07,Urine,14
454 | 36:07,Weight,90.6
455 | 37:07,DiasABP,44
456 | 37:07,HR,69
457 | 37:07,MAP,57
458 | 37:07,SysABP,81
459 | 37:07,Temp,36.9
460 | 37:07,Urine,0
461 | 37:07,Weight,90.6
462 | 37:37,DiasABP,44
463 | 37:37,HR,69
464 | 37:37,MAP,58
465 | 37:37,SysABP,83
466 | 37:37,Temp,36.8
467 | 37:37,Weight,90.6
468 | 37:52,DiasABP,48
469 | 37:52,HR,67
470 | 37:52,MAP,64
471 | 37:52,SysABP,91
472 | 37:52,Temp,36.6
473 | 37:52,Weight,90.6
474 | 38:07,DiasABP,41
475 | 38:07,HR,67
476 | 38:07,MAP,57
477 | 38:07,SysABP,85
478 | 38:07,Temp,36.6
479 | 38:07,Urine,0
480 | 38:07,Weight,90.6
481 | 39:07,DiasABP,48
482 | 39:07,FiO2,0.7
483 | 39:07,HR,70
484 | 39:07,MAP,68
485 | 39:07,NIDiasABP,46
486 | 39:07,NIMAP,67.33
487 | 39:07,NISysABP,110
488 | 39:07,SysABP,100
489 | 39:07,Temp,36.3
490 | 39:07,Urine,0
491 | 39:07,Weight,90.6
492 | 40:07,DiasABP,57
493 | 40:07,GCS,14
494 | 40:07,HR,69
495 | 40:07,MAP,74
496 | 40:07,SysABP,104
497 | 40:07,Temp,36
498 | 40:07,Urine,0
499 | 40:07,Weight,90.6
500 | 42:07,DiasABP,58
501 | 42:07,HR,63
502 | 42:07,MAP,76
503 | 42:07,SysABP,106
504 | 42:07,Urine,0
505 | 42:07,Weight,90.6
506 | 43:07,DiasABP,48
507 | 43:07,HR,61
508 | 43:07,MAP,62
509 | 43:07,SysABP,84
510 | 43:07,Weight,90.6
511 | 44:07,DiasABP,61
512 | 44:07,FiO2,0.7
513 | 44:07,GCS,10
514 | 44:07,HR,61
515 | 44:07,MAP,81
516 | 44:07,SysABP,113
517 | 44:07,Temp,35.6
518 | 44:07,Urine,0
519 | 44:07,Weight,90.6
520 | 45:07,DiasABP,59
521 | 45:07,HCT,31.6
522 | 45:07,HR,61
523 | 45:07,MAP,79
524 | 45:07,Platelets,57
525 | 45:07,SysABP,109
526 | 45:07,Urine,5
527 | 45:07,WBC,16.7
528 | 45:07,Weight,90.6
529 | 46:07,DiasABP,60
530 | 46:07,HR,59
531 | 46:07,MAP,79
532 | 46:07,SysABP,109
533 | 46:07,Urine,0
534 | 46:07,Weight,90.6
535 | 47:07,DiasABP,53
536 | 47:07,HR,58
537 | 47:07,MAP,67
538 | 47:07,SysABP,91
539 | 47:07,Temp,35.1
540 | 47:07,Weight,90.6
541 |
--------------------------------------------------------------------------------
/src/test/resources/data/synthetic/simple/simple_ts_data:
--------------------------------------------------------------------------------
1 | 1,1,1,1
2 | 1,1,1,1
3 | 1,1,1,1
4 | 1,1,1,1
5 | 1,1,1,1
6 | 0,0,0,0
7 | 0,0,0,0
8 | 0,0,0,0
9 | 0,0,0,0
10 | 0,0,0,0
11 | 1,1,1,1
12 | 1,1,1,1
13 | 1,1,1,1
14 | 1,1,1,1
15 | 1,1,1,1
16 | 0,0,0,0
17 | 0,0,0,0
18 | 0,0,0,0
19 | 0,0,0,0
20 | 0,0,0,0
21 | 1,1,1,1
22 | 1,1,1,1
23 | 1,1,1,1
24 | 1,1,1,1
25 | 1,1,1,1
26 | 0,0,0,0
27 | 0,0,0,0
28 | 0,0,0,0
29 | 0,0,0,0
30 | 0,0,0,0
31 | 1,1,1,1
32 | 1,1,1,1
33 | 1,1,1,1
34 | 1,1,1,1
35 | 1,1,1,1
36 | 0,0,0,0
37 | 0,0,0,0
38 | 0,0,0,0
39 | 0,0,0,0
40 | 0,0,0,0
41 |
--------------------------------------------------------------------------------
/src/test/resources/data/synthetic/simple/simple_ts_labels.txt:
--------------------------------------------------------------------------------
1 | 1
2 | 1
3 | 1
4 | 1
5 | 1
6 | 0
7 | 0
8 | 0
9 | 0
10 | 0
11 | 1
12 | 1
13 | 1
14 | 1
15 | 1
16 | 0
17 | 0
18 | 0
19 | 0
20 | 0
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 | 0
27 | 0
28 | 0
29 | 0
30 | 0
31 | 1
32 | 1
33 | 1
34 | 1
35 | 1
36 | 0
37 | 0
38 | 0
39 | 0
40 | 0
41 |
--------------------------------------------------------------------------------
/src/test/resources/data/synthetic/simple_2/simple_2_data.txt:
--------------------------------------------------------------------------------
1 | 1,0,0,1
2 | 1,0,0,1
3 | 1,0,0,1
4 | 1,0,0,1
5 | 1,0,0,1
6 | 0,1,1,0
7 | 0,1,1,0
8 | 0,1,1,0
9 | 0,1,1,0
10 | 0,1,1,0
11 | 1,0,0,1
12 | 1,0,0,1
13 | 1,0,0,1
14 | 1,0,0,1
15 | 1,0,0,1
16 | 0,1,1,0
17 | 0,1,1,0
18 | 0,1,1,0
19 | 0,1,1,0
20 | 0,1,1,0
21 | 1,0,0,1
22 | 1,0,0,1
23 | 1,0,0,1
24 | 1,0,0,1
25 | 1,0,0,1
26 | 0,1,1,0
27 | 0,1,1,0
28 | 0,1,1,0
29 | 0,1,1,0
30 | 0,1,1,0
31 | 1,0,0,1
32 | 1,0,0,1
33 | 1,0,0,1
34 | 1,0,0,1
35 | 1,0,0,1
36 | 0,1,1,0
37 | 0,1,1,0
38 | 0,1,1,0
39 | 0,1,1,0
40 | 0,1,1,0
41 |
--------------------------------------------------------------------------------
/src/test/resources/data/synthetic/simple_2/simple_2_labels.txt:
--------------------------------------------------------------------------------
1 | 1
2 | 1
3 | 1
4 | 1
5 | 1
6 | 0
7 | 0
8 | 0
9 | 0
10 | 0
11 | 1
12 | 1
13 | 1
14 | 1
15 | 1
16 | 0
17 | 0
18 | 0
19 | 0
20 | 0
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 | 0
27 | 0
28 | 0
29 | 0
30 | 0
31 | 1
32 | 1
33 | 1
34 | 1
35 | 1
36 | 0
37 | 0
38 | 0
39 | 0
40 | 0
41 |
--------------------------------------------------------------------------------
/src/test/resources/data/synthetic/simple_3_uneven/simple_3_uneven_data.txt:
--------------------------------------------------------------------------------
1 | 1,0,0,1
2 | 1,0,0,1
3 | 1,0,0,1
4 | 1,0,0,1
5 | 1,0,0,1
6 | 0,1
7 | 0,1
8 | 0,1
9 | 0,1
10 | 0,1
11 | 1,0,0,1
12 | 1,0,0,1
13 | 1,0,0,1
14 | 1,0,0,1
15 | 1,0,0,1
16 | 0,1
17 | 0,1
18 | 0,1
19 | 0,1
20 | 0,1
21 | 1,0,0,1
22 | 1,0,0,1
23 | 1,0,0,1
24 | 1,0,0,1
25 | 1,0,0,1
26 | 0,1
27 | 0,1
28 | 0,1
29 | 0,1
30 | 0,1
31 | 1,0,0,1
32 | 1,0,0,1
33 | 1,0,0,1
34 | 1,0,0,1
35 | 1,0,0,1
36 | 0,1
37 | 0,1
38 | 0,1
39 | 0,1
40 | 0,1
41 |
--------------------------------------------------------------------------------
/src/test/resources/data/synthetic/simple_3_uneven/simple_3_uneven_labels.txt:
--------------------------------------------------------------------------------
1 | 1
2 | 1
3 | 1
4 | 1
5 | 1
6 | 0
7 | 0
8 | 0
9 | 0
10 | 0
11 | 1
12 | 1
13 | 1
14 | 1
15 | 1
16 | 0
17 | 0
18 | 0
19 | 0
20 | 0
21 | 1
22 | 1
23 | 1
24 | 1
25 | 1
26 | 0
27 | 0
28 | 0
29 | 0
30 | 0
31 | 1
32 | 1
33 | 1
34 | 1
35 | 1
36 | 0
37 | 0
38 | 0
39 | 0
40 | 0
41 |
--------------------------------------------------------------------------------
/src/test/resources/physionet_sample_data.txt:
--------------------------------------------------------------------------------
1 | Time,Parameter,Value
2 | 00:00,RecordID,135361
3 | 00:00,Age,49
4 | 00:00,Gender,1
5 | 00:00,Height,-1
6 | 00:00,ICUType,3
7 | 00:00,Weight,-1
8 | 03:17,Albumin,4.1
9 | 03:17,ALP,77
10 | 03:17,ALT,38
11 | 03:17,AST,54
12 | 03:17,Bilirubin,1.3
13 | 03:17,BUN,10
14 | 03:17,Creatinine,0.7
15 | 03:17,Glucose,87
16 | 03:17,HCO3,24
17 | 03:17,HCT,36.4
18 | 03:17,Mg,1.6
19 | 03:17,Platelets,212
20 | 03:17,K,4.4
21 | 03:17,Na,140
22 | 03:17,WBC,8.2
23 | 25:49,Albumin,3.9
24 | 25:49,ALP,74
25 | 25:49,ALT,64
26 | 25:49,AST,122
27 | 25:49,Bilirubin,1.7
28 | 25:49,BUN,4
29 | 25:49,Creatinine,0.6
30 | 25:49,Glucose,88
31 | 25:49,HCO3,24
32 | 25:49,HCT,34.4
33 | 25:49,Mg,1.7
34 | 25:49,Platelets,179
35 | 25:49,K,3.3
36 | 25:49,Na,139
37 | 25:49,WBC,6.6
38 |
--------------------------------------------------------------------------------
/src/test/resources/physionet_schema.txt:
--------------------------------------------------------------------------------
1 | @RELATION UnitTest_PhysioNet_Schema
2 | @DELIMITER ,
3 | @MISSING_VALUE -1
4 |
5 |
6 | @ATTRIBUTE recordid NOMINAL DESCRIPTOR !SKIP !ZERO
7 | @ATTRIBUTE age NUMERIC DESCRIPTOR !COPY !AVG
8 | @ATTRIBUTE gender NUMERIC DESCRIPTOR !NORMALIZE !ZERO
9 | @ATTRIBUTE height NUMERIC DESCRIPTOR !NORMALIZE !AVG
10 | @ATTRIBUTE weight NUMERIC DESCRIPTOR !NORMALIZE !AVG
11 | @ATTRIBUTE icutype NUMERIC DESCRIPTOR !NORMALIZE !ZERO
12 |
13 |
14 |
15 | @ATTRIBUTE albumin NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
16 |
17 | @ATTRIBUTE alp NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
18 | @ATTRIBUTE alt NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
19 | @ATTRIBUTE ast NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
20 | @ATTRIBUTE bilirubin NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
21 |
22 | @ATTRIBUTE bun NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
23 | @ATTRIBUTE cholesterol NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
24 | @ATTRIBUTE creatinine NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
25 | @ATTRIBUTE diasabp NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
26 |
27 | @ATTRIBUTE fio2 NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
28 | @ATTRIBUTE gcs NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
29 | @ATTRIBUTE glucose NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
30 | @ATTRIBUTE hco3 NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
31 |
32 | @ATTRIBUTE hct NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
33 | @ATTRIBUTE hr NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
34 | @ATTRIBUTE k NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
35 | @ATTRIBUTE lactate NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
36 |
37 | @ATTRIBUTE mg NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
38 | @ATTRIBUTE map NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
39 | @ATTRIBUTE mechvent NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
40 | @ATTRIBUTE na NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
41 |
42 | @ATTRIBUTE nidiasabp NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
43 | @ATTRIBUTE nimap NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
44 | @ATTRIBUTE nisysabp NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
45 | @ATTRIBUTE paco2 NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
46 |
47 | @ATTRIBUTE pao2 NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
48 | @ATTRIBUTE ph NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
49 | @ATTRIBUTE platelets NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
50 | @ATTRIBUTE resprate NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
51 |
52 | @ATTRIBUTE sao2 NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
53 | @ATTRIBUTE sysabp NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
54 | @ATTRIBUTE temp NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
55 | @ATTRIBUTE troponini NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
56 |
57 | @ATTRIBUTE troponint NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
58 | @ATTRIBUTE urine NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
59 | @ATTRIBUTE wbc NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
60 | @ATTRIBUTE weight NUMERIC TIMESERIES !NORMALIZE !PAD_TAIL_WITH_ZEROS
61 |
62 |
63 |
64 |
--------------------------------------------------------------------------------
/src/test/resources/physionet_schema_zmzuv_0.txt:
--------------------------------------------------------------------------------
1 | @RELATION UnitTest_PhysioNet_Schema_ZUZUV
2 | @DELIMITER ,
3 | @MISSING_VALUE -1
4 |
5 |
6 | @ATTRIBUTE recordid NOMINAL DESCRIPTOR !SKIP !ZERO
7 | @ATTRIBUTE age NUMERIC DESCRIPTOR !ZEROMEAN_ZEROUNITVARIANCE !AVG
8 | @ATTRIBUTE gender NUMERIC DESCRIPTOR !ZEROMEAN_ZEROUNITVARIANCE !ZERO
9 | @ATTRIBUTE height NUMERIC DESCRIPTOR !ZEROMEAN_ZEROUNITVARIANCE !AVG
10 | @ATTRIBUTE weight NUMERIC DESCRIPTOR !ZEROMEAN_ZEROUNITVARIANCE !AVG
11 | @ATTRIBUTE icutype NUMERIC DESCRIPTOR !ZEROMEAN_ZEROUNITVARIANCE !ZERO
12 |
13 |
14 |
15 | @ATTRIBUTE albumin NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
16 |
17 | @ATTRIBUTE alp NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
18 | @ATTRIBUTE alt NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
19 | @ATTRIBUTE ast NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
20 | @ATTRIBUTE bilirubin NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
21 |
22 | @ATTRIBUTE bun NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
23 | @ATTRIBUTE cholesterol NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
24 | @ATTRIBUTE creatinine NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
25 | @ATTRIBUTE diasabp NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
26 |
27 | @ATTRIBUTE fio2 NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
28 | @ATTRIBUTE gcs NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
29 | @ATTRIBUTE glucose NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
30 | @ATTRIBUTE hco3 NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
31 |
32 | @ATTRIBUTE hct NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
33 | @ATTRIBUTE hr NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
34 | @ATTRIBUTE k NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
35 | @ATTRIBUTE lactate NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
36 |
37 | @ATTRIBUTE mg NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
38 | @ATTRIBUTE map NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
39 | @ATTRIBUTE mechvent NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
40 | @ATTRIBUTE na NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
41 |
42 | @ATTRIBUTE nidiasabp NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
43 | @ATTRIBUTE nimap NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
44 | @ATTRIBUTE nisysabp NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
45 | @ATTRIBUTE paco2 NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
46 |
47 | @ATTRIBUTE pao2 NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
48 | @ATTRIBUTE ph NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
49 | @ATTRIBUTE platelets NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
50 | @ATTRIBUTE resprate NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
51 |
52 | @ATTRIBUTE sao2 NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
53 | @ATTRIBUTE sysabp NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
54 | @ATTRIBUTE temp NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
55 | @ATTRIBUTE troponini NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
56 |
57 | @ATTRIBUTE troponint NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
58 | @ATTRIBUTE urine NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
59 | @ATTRIBUTE wbc NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
60 | @ATTRIBUTE weight NUMERIC TIMESERIES !ZEROMEAN_ZEROUNITVARIANCE !PAD_TAIL_WITH_ZEROS
61 |
62 |
63 |
64 |
--------------------------------------------------------------------------------