├── .gitignore ├── LICENSE ├── README.md ├── bikes_hours_model.json ├── decision_tree.js ├── decision_tree_score_proc.sql ├── decision_tree_score_table_function.sql ├── decision_tree_train.sql ├── htm ├── htm.sql ├── scalar_encoder.sql └── sparse_to_dense.sql ├── k-means ├── kmeans_stored_procedure.sql ├── merge_clusters_udtf.sql └── update_clusters_udtf.sql ├── model_tracking_tables.sql ├── package.json └── snowflake-functions.js /.gitignore: -------------------------------------------------------------------------------- 1 | 2 | node_modules/** 3 | -------------------------------------------------------------------------------- /LICENSE: -------------------------------------------------------------------------------- 1 | MIT License 2 | 3 | Copyright (c) 2019 James Weakley 4 | 5 | Permission is hereby granted, free of charge, to any person obtaining a copy 6 | of this software and associated documentation files (the "Software"), to deal 7 | in the Software without restriction, including without limitation the rights 8 | to use, copy, modify, merge, publish, distribute, sublicense, and/or sell 9 | copies of the Software, and to permit persons to whom the Software is 10 | furnished to do so, subject to the following conditions: 11 | 12 | The above copyright notice and this permission notice shall be included in all 13 | copies or substantial portions of the Software. 14 | 15 | THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR 16 | IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, 17 | FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE 18 | AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER 19 | LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, 20 | OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 21 | SOFTWARE. 22 | -------------------------------------------------------------------------------- /README.md: -------------------------------------------------------------------------------- 1 | # snowflake-ml 2 | Machine Learning in Snowflake 3 | -------------------------------------------------------------------------------- /decision_tree.js: -------------------------------------------------------------------------------- 1 | const snowflakeFunctions = require('./snowflake-functions.js'); 2 | var snowflake; 3 | const fs = require('fs'); 4 | var stream = fs.createWriteStream("main.log", {flags:'a'}); 5 | stream.write(new Date().toISOString()+"\r\n"); 6 | 7 | 8 | 9 | function main() { 10 | var runId=null; 11 | var snowflakeConnectPromise = snowflakeFunctions.snowflakeConnect(); 12 | snowflakeConnectPromise.then(function(snowflakeConnection) { 13 | stream.write("Connected to snowflake\r\n"); 14 | snowflake=snowflakeConnection; 15 | var snowflakeQuery = snowflakeFunctions.snowflakeQueryPromise('select ml_model_runs_sequence.nextval as RUNID',[]); 16 | return snowflakeQuery.then(function(rows) { 17 | runId=rows[0].RUNID; 18 | stream.write("runId: "+runId+"\r\n"); 19 | return {snowflake:snowflake,runId:runId} 20 | }, function(err) { 21 | stream.write(err); 22 | }); 23 | 24 | }, function(err) { 25 | console.error(err); 26 | }).then(function(return_object){ 27 | var runId=return_object.runId; 28 | var snowflake=return_object.snowflake; 29 | 30 | stream.write("inserting ml_model_runs entry\r\n"); 31 | var training_params={}; 32 | var default_training_parameters={}; 33 | default_training_parameters.cv_limit=10; 34 | default_training_parameters.total_count_limit=1; 35 | default_training_parameters.cv_decimal_places=5; 36 | default_training_parameters.average_decimal_places=2; 37 | default_training_parameters.maxDepth=15; 38 | default_training_parameters.maxFeatures=8; 39 | default_training_parameters.debugMessages=false; 40 | 41 | var training_parameters={...default_training_parameters,...training_params}; 42 | 43 | var snowflakeQuery = snowflakeFunctions.snowflakeQueryPromise( 44 | "insert into ml_model_runs(run_id,table_name,algorithm,training_parameters,start_time) select :1,:2,:3,parse_json('"+JSON.stringify(training_parameters)+"'),current_timestamp::TIMESTAMP_NTZ", 45 | [runId, 'bikes_hours_eng','decision_tree']); 46 | snowflakeQuery.then(function(rows) { 47 | }, function(err) { 48 | console.log(err); 49 | }); 50 | 51 | 52 | 53 | var inputObject={tableName:'bikes_hours_eng', 54 | whereClause:'1=1', 55 | whereClauseBindings:[], 56 | target:'CNT', 57 | remainingCols:'holiday,weekday,workingday,weathersit,temp,atemp,hum'.split(','), 58 | depth:0, 59 | trainingParameters:training_parameters, 60 | __dirname:__dirname}; 61 | 62 | var finished=false; 63 | var model=leafCalc(inputObject,function(result){ 64 | var snowflakeQuery = snowflakeFunctions.snowflakeQueryPromise( 65 | "update ml_model_runs set end_time=current_timestamp::TIMESTAMP_NTZ, model_object=parse_json('"+JSON.stringify(result)+"') where run_id=?", 66 | [runId]); 67 | snowflakeQuery.then(function(rows) { 68 | stream.write("Finished!\r\n"); 69 | stream.write(new Date().toISOString()+"\r\n"); 70 | stream.end(); 71 | }, function(err) { 72 | stream.write(err); 73 | stream.end(); 74 | }); 75 | 76 | }); 77 | }); 78 | } 79 | 80 | 81 | function leafCalc(input,done){ 82 | 83 | const spawn = require('threads').spawn; 84 | const fs = require('fs'); 85 | var tableName=input.tableName; 86 | var whereClause=input.whereClause; 87 | var whereClauseBindings=input.whereClauseBindings; 88 | var target=input.target; 89 | var remainingCols=input.remainingCols; 90 | var depth=input.depth; 91 | var trainingParameters=input.trainingParameters; 92 | 93 | var stream = fs.createWriteStream("main_"+depth+".log", {flags:'a'}); 94 | 95 | const snowflakeFunctions = require(input.__dirname+'/snowflake-functions.js'); 96 | 97 | var return_object={}; 98 | if (trainingParameters.debugMessages){ 99 | return_object.cumulative_where_clause=whereClause; 100 | return_object.cumulative_where_clause_bindings=whereClauseBindings; 101 | } 102 | 103 | var sqlText="select stddev("+target+") as target_stddev,"+ 104 | "avg("+target+") as target_avg,"+ 105 | "case when target_avg is not null and target_avg!=0 then target_stddev/target_avg*100 else 0 end as coef_of_variation,"+ 106 | "count(*) as target_count "+ 107 | "from "+tableName+" where "+whereClause; 108 | 109 | var snowflakeQuery = snowflakeFunctions.snowflakeQueryPromise( 110 | sqlText, 111 | whereClauseBindings); 112 | snowflakeQuery.then(function(rows) { 113 | stream.write("Got first query",rows.length); 114 | var results=rows[0]; 115 | var averageBelow=results.TARGET_AVG; 116 | 117 | 118 | if (averageBelow==null){ 119 | return done(null); // if there are no results below this value, return null so that this node can be removed 120 | } 121 | else{ 122 | averageBelow=averageBelow.toFixed(trainingParameters.average_decimal_places); 123 | } 124 | if (depth >= trainingParameters.maxDepth){ 125 | return_object.prediction=averageBelow; 126 | if (trainingParameters.debugMessages){ 127 | return_object.stopped_on="max_depth_reached (limit "+trainingParameters.maxDepth+", value "+depth+")"; 128 | } 129 | return done(return_object); 130 | } 131 | if (remainingCols.length<1){ 132 | return_object.prediction=averageBelow; 133 | if (trainingParameters.debugMessages){ 134 | return_object.stopped_on="last_attribute"; 135 | } 136 | return done(return_object); 137 | } 138 | var target_count=results.TARGET_COUNT; 139 | if (target_count <= 1 || target_count <= trainingParameters.total_count_limit){ 140 | return_object.prediction=averageBelow; 141 | if (trainingParameters.debugMessages){ 142 | return_object.stopped_on="below_child_record_count_limit (limit "+trainingParameters.total_count_limit+", value "+target_count+")"; 143 | } 144 | return done(return_object); 145 | } 146 | var coefficientOfVariation=results.COEF_OF_VARIATION.toFixed(trainingParameters.cv_decimal_places); 147 | if (coefficientOfVariation < trainingParameters.cv_limit){ 148 | return_object.prediction=averageBelow; 149 | if (trainingParameters.debugMessages){ 150 | return_object.stopped_on="below_cv_threshold (limit "+trainingParameters.cv_limit+", value "+coefficientOfVariation+")"; 151 | } 152 | return done(return_object); 153 | } 154 | var stddevBeforeSplit = results.TARGET_STDDEV; 155 | if (target_count==0){ 156 | throw "The number of records during leaf node calculation was zero, this should not happen and means there's a bug in the stored proc"; 157 | } 158 | if (stddevBeforeSplit==0){ 159 | throw "The standard deviation during leaf node calculation was zero, this should not happen and means there's a bug in the stored proc"; 160 | } 161 | var columnQueries=[]; 162 | for (var i=0;i?",whereClauseBindings:[1,4,2],remainingCols:[''a'',''b'',''c'']} 59 | * 60 | * Parameters: 61 | * - tableName : The name of the table/view containing the source data 62 | * - treeNodes : An array of nodes at the current level. Each node object tracks its own cumulative 63 | * "where" clause, associated bindings as well as a list of remaining attributes 64 | * from the original list 65 | * - target : The target attribute from the source table 66 | * - depth : The current depth of the tree 67 | * - trainingParameters : Parameters that impact the training process 68 | * - multiwayColumnDistinctValues: A precalculated map of all distinct values for each attribute selected as multi-way split 69 | */ 70 | function levelCalc(tableName,treeNodes,target,depth,trainingParameters,multiwayColumnDistinctValues){ 71 | var results; 72 | 73 | var currentBranchQueries=[]; 74 | var currentBranchQueryBindings=[]; 75 | 76 | // The first query collects some important information about each node: 77 | // 1) The standard deviation of all the target values from this node down, as we will pick the branch that reduces this value the most 78 | // 2) The average value of all the target values from this node down, as ultimately average is used as a predictor when we reach the leaf 79 | // 3) The coefficient of variation, can be used to stop building when it gets too small 80 | // 4) The number of target values from this node down, can be used to stop building when it gets too small 81 | // 5) For each potential branch below (from the list of remaining columns), the median value for columns using binary splits 82 | 83 | for (var i=0;i0){ 91 | remainingColumnMediansQuery=",TO_JSON(OBJECT_CONSTRUCT("+remainingColumnMedians.join(",")+")) as medians"; 92 | } 93 | currentBranchQueries.push("select "+i+" as index,"+ 94 | "stddev("+target+") as target_stddev,"+ 95 | "avg("+target+") as target_avg,"+ 96 | "case when target_avg is not null and target_avg!=0 then target_stddev/target_avg*100 else 0 end as coef_of_variation,"+ 97 | "count(*) as target_count"+ 98 | remainingColumnMediansQuery+ 99 | " from "+tableName+" where "+treeNodes[i].whereClause); 100 | currentBranchQueryBindings=currentBranchQueryBindings.concat(treeNodes[i].whereClauseBindings); 101 | } 102 | try{ 103 | results = snowflake.execute({ 104 | sqlText: currentBranchQueries.join('' UNION ''), 105 | binds: currentBranchQueryBindings 106 | }); 107 | } 108 | catch(e){ 109 | throw "Error executing first tree split query: "+e.message+". Bindings: "+currentBranchQueryBindings+", query: "+currentBranchQueries.join('' UNION ''); 110 | } 111 | 112 | var columnQueries=[]; 113 | var columnQueryBindings=[]; 114 | var medianValues={}; 115 | while (results.next()){ 116 | 117 | var index=results.getColumnValue(1); 118 | var stddevBeforeSplit = results.getColumnValue(2); 119 | var averageBelow=results.getColumnValue(3); 120 | var coefficientOfVariation=results.getColumnValue(4); 121 | var target_count=results.getColumnValue(5); 122 | var node=treeNodes[index]; 123 | var medianValuesRaw=results.getColumnValue(6); 124 | if (medianValuesRaw!=null){ 125 | medianValues[index]=JSON.parse(medianValuesRaw.replace(new RegExp(''undefined'', ''g''), ''null'')); 126 | } 127 | if (averageBelow==null){ 128 | treeNodes[index]=null; 129 | /*if (training_parameters.debugMessages){ 130 | node.stopped_on="no results below"; 131 | }*/ 132 | continue; 133 | } 134 | else{ 135 | averageBelow=averageBelow.toFixed(trainingParameters.average_decimal_places); 136 | } 137 | if (training_parameters.debugMessages){ 138 | node.averageBelow=averageBelow; 139 | node.stddevBeforeSplit=stddevBeforeSplit; 140 | node.coefficientOfVariation=coefficientOfVariation; 141 | node.target_count=target_count; 142 | } 143 | 144 | if (depth >= trainingParameters.maxDepth){ 145 | node.prediction=averageBelow; 146 | if (training_parameters.debugMessages){ 147 | node.stopped_on="max_depth_reached (limit "+trainingParameters.maxDepth+", value "+depth+")"; 148 | } 149 | continue; 150 | } 151 | if (node.remainingBinaryCols.length + node.remainingMultiwayCols.length <1){ 152 | node.prediction=averageBelow; 153 | if (training_parameters.debugMessages){ 154 | node.stopped_on="last_attribute"; 155 | } 156 | continue; 157 | } 158 | if (target_count <= trainingParameters.total_count_limit){ 159 | node.prediction=averageBelow; 160 | if (training_parameters.debugMessages){ 161 | node.stopped_on="below_child_record_count_limit (limit "+trainingParameters.total_count_limit+", value "+target_count+")"; 162 | } 163 | continue; 164 | } 165 | coefficientOfVariation=coefficientOfVariation.toFixed(trainingParameters.cv_decimal_places); 166 | if (coefficientOfVariation < trainingParameters.cv_limit){ 167 | node.prediction=averageBelow; 168 | if (training_parameters.debugMessages){ 169 | node.stopped_on="below_cv_threshold (limit "+trainingParameters.cv_limit+", value "+coefficientOfVariation+")"; 170 | } 171 | continue; 172 | } 173 | if (depth > trainingParameters.maxDepth){ 174 | node.prediction=averageBelow; 175 | if (training_parameters.debugMessages){ 176 | node.stopped_on="depth "+trainingParameters.maxDepth+" exceeded by value "+depth+")"; 177 | } 178 | continue; 179 | } 180 | if (target_count==0){ 181 | throw "The number of records during leaf node calculation was zero, this should not happen and means there''s a bug in the stored proc"; 182 | } 183 | if (stddevBeforeSplit==0){ 184 | throw "The standard deviation during leaf node calculation was zero, this should not happen and means there''s a bug in the stored proc"; 185 | } 186 | var featuresUsed=0; 187 | 188 | // for each binary split column, add a query for each side of the median 189 | 190 | for (var i=0;i=?"; 301 | rightChildNode.whereClauseBindings=rightNodeBindings; 302 | rightChildNode.remainingBinaryCols=newRemainingBinaryCols.slice(0); 303 | rightChildNode.remainingMultiwayCols=node.remainingMultiwayCols.slice(0); 304 | 305 | // tree navigation attributes 306 | rightChildNode.selectionCriteriaAttribute=col; 307 | rightChildNode.selectionCriteriaPredicate=''>=''; 308 | rightChildNode.selectionCriteriaValue=medianValue; 309 | 310 | node.children.push(leftChildNode); 311 | nextLevelNodes.push(leftChildNode); 312 | node.children.push(rightChildNode); 313 | nextLevelNodes.push(rightChildNode); 314 | } 315 | else{ 316 | var newRemainingMultiwayCols=node.remainingMultiwayCols.filter(function(value, index, arr){return value != col;}); 317 | var colDistinctValues=multiwayColumnDistinctValues[col]; 318 | for (var j=0;j 0){ 353 | levelCalc(tableName,nextLevelNodes,target,depth+1,trainingParameters,multiwayColumnDistinctValues); 354 | } 355 | } 356 | var binaryColumns=BINARY_COLS.split('','').filter(function(value, index, arr){return value.length>0;}); 357 | var multiwayColumns=MULTIWAY_COLS.split('','').filter(function(value, index, arr){return value.length>0;}); 358 | var results = snowflake.execute({ 359 | sqlText: "select ml_model_runs_sequence.nextval" 360 | }); 361 | results.next(); 362 | var default_training_parameters={}; 363 | default_training_parameters.cv_limit=10; 364 | default_training_parameters.total_count_limit=1; 365 | default_training_parameters.cv_decimal_places=5; 366 | default_training_parameters.average_decimal_places=2; 367 | default_training_parameters.maxDepth=15; 368 | default_training_parameters.maxFeatures=5; 369 | default_training_parameters.debugMessages=true; 370 | default_training_parameters.split_algorithm=''ID3''; 371 | 372 | var training_parameters={...default_training_parameters,...TRAINING_PARAMS}; 373 | training_parameters.binaryColumns=binaryColumns; 374 | training_parameters.multiwayColumns=multiwayColumns; 375 | 376 | var runId=results.getColumnValue(1); 377 | results = snowflake.execute({ 378 | sqlText: "insert into ml_model_runs(model_id,run_id,table_name,algorithm,training_parameters,start_time) select :1,:2,:3,:4,parse_json(''"+JSON.stringify(training_parameters)+"''),current_timestamp::TIMESTAMP_NTZ", 379 | binds: [MODEL_ID,runId, TABLE_NAME,''decision_tree''] 380 | }); 381 | 382 | // For multi-way columns (non-binary), we pre-calculate all distinct values at the outset 383 | var multiwayColumnDistinctValues={}; 384 | if (multiwayColumns.length>0){ 385 | var distinctValuesQueryComponents=[]; 386 | for (var i=0;i my.params.historyLength ) { 94 | my.activeCellHistory.length = my.params.historyLength; 95 | } 96 | // Save predicted active cells history 97 | my.predictedActiveCellHistory.unshift( my.predictedActiveCells ); 98 | if( my.predictedActiveCellHistory.length > my.params.historyLength ) { 99 | my.predictedActiveCellHistory.length = my.params.historyLength; 100 | } 101 | // Reset active cells 102 | for( c = 0; c < my.activeCells.length; c++ ) { 103 | cell = my.activeCells[c]; 104 | cell.active = false; 105 | cell.predictedActive = false; 106 | cell.distalLearnSegment = null; // Reset previous distal learn segment 107 | cell.apicalLearnSegment = null; // Reset previous apical learn segment 108 | // If cell is in a column, clear segment activity (this isn"t used for cells which feed SP) 109 | if( cell.column !== null ) { 110 | // Clear previous references to segment activity 111 | for( s = 0; s < cell.axonSynapses.length; s++ ) { 112 | synapse = cell.axonSynapses[s]; 113 | // Make sure we haven"t already processed this segment"s active synapses list 114 | if( synapse.segment.activeSynapses.length > 0 ) { 115 | // Save active synapses history, then clear in preparation for new input 116 | synapse.segment.activeSynapsesHistory.unshift( synapse.segment.activeSynapses ); 117 | if( synapse.segment.activeSynapsesHistory.length > my.params.historyLength ) { 118 | synapse.segment.activeSynapsesHistory.length = my.params.historyLength; 119 | } 120 | synapse.segment.activeSynapses = []; 121 | // Save connected synapses history, then clear in preparation for new input 122 | synapse.segment.connectedSynapsesHistory.unshift( synapse.segment.connectedSynapses ); 123 | if( synapse.segment.connectedSynapsesHistory.length > my.params.historyLength ) { 124 | synapse.segment.connectedSynapsesHistory.length = my.params.historyLength; 125 | } 126 | synapse.segment.connectedSynapses = []; 127 | // Save predicted active synapses history, then clear in preparation for new input 128 | synapse.segment.predictedActiveSynapsesHistory.unshift( synapse.segment.predictedActiveSynapses ); 129 | if( synapse.segment.predictedActiveSynapsesHistory.length > my.params.historyLength ) { 130 | synapse.segment.predictedActiveSynapsesHistory.length = my.params.historyLength; 131 | } 132 | synapse.segment.predictedActiveSynapses = []; 133 | } 134 | } 135 | } 136 | } 137 | // Clear active cells array 138 | my.activeCells = []; 139 | // Clear predicted active cells array 140 | my.predictedActiveCells = []; 141 | // Save learning cells history 142 | my.learningCellHistory.unshift( my.learningCells ); 143 | if( my.learningCellHistory.length > my.params.historyLength ) { 144 | my.learningCellHistory.length = my.params.historyLength; 145 | } 146 | // Reset learning cells 147 | for( c = 0; c < my.learningCells.length; c++ ) { 148 | cell = my.learningCells[c]; 149 | cell.learning = false; 150 | } 151 | // Clear learning cells array 152 | my.learningCells = []; 153 | return my; // Allows chaining function calls 154 | } 155 | 156 | /** 157 | * Resets the predictictive states after saving them to history 158 | */ 159 | this.resetPredictiveStates = function() { 160 | var c, cell; 161 | // Save predictive cells history 162 | my.predictiveCellHistory.unshift( my.predictiveCells ); 163 | if( my.predictiveCellHistory.length > my.params.historyLength ) { 164 | my.predictiveCellHistory.length = my.params.historyLength; 165 | } 166 | // Reset predictive cells 167 | for( c = 0; c < my.predictiveCells.length; c++ ) { 168 | cell = my.predictiveCells[c]; 169 | cell.predictive = false; 170 | cell.distalLearnSegment = null; // Reset previous distal learn segment 171 | cell.apicalLearnSegment = null; // Reset previous apical learn segment 172 | } 173 | // Clear predictive cells array 174 | my.predictiveCells = []; 175 | return my; // Allows chaining function calls 176 | } 177 | 178 | /** 179 | * This function clears all references 180 | */ 181 | this.clear = function() { 182 | if( my !== null ) { 183 | my.cells = null; 184 | my.activeCells = null; 185 | my.predictedActiveCells = null; 186 | my.predictiveCells = null; 187 | my.learningCells = null; 188 | my.activeCellHistory = null; 189 | my.learningCellHistory = null; 190 | my.predictiveCellHistory = null; 191 | my.params = null; 192 | my = null; 193 | } 194 | } 195 | } 196 | 197 | function Column( index, cellIndex, cellsPerColumn, layer ) { 198 | 199 | this.index = index; // Index of this column in its layer 200 | this.layer = layer; // Layer containing this column 201 | 202 | this.overlapActive = 0; // Count of connections with active input cells 203 | this.overlapPredictedActive = 0; // Count of connections with correctly predicted input cells 204 | this.score = null; // How well column matches current input 205 | this.persistence = 0; 206 | 207 | // Used to calculate persistence decay 208 | this.initialPersistence = 0; 209 | this.lastUsedTimestep = 0; 210 | 211 | this.cells = []; // Array of cells in this column 212 | 213 | this.proximalSegment = new Segment( PROXIMAL, null, this ); // Feed-forward input 214 | this.bestDistalSegment = null; // Reference to distal segment best matching current input 215 | this.bestDistalSegmentHistory = []; // Reverse-order history of best matching distal segments 216 | 217 | this.bestApicalSegment = null; // Reference to apical segment best matching current input 218 | this.bestApicalSegmentHistory = []; // Reverse-order history of best matching apical segments 219 | 220 | // Create the cells for this column 221 | var c, cell; 222 | for( c = 0; c < cellsPerColumn; c++ ) { 223 | cell = new Cell( layer.cellMatrix, cellIndex + c, index, c, this ); 224 | this.cells.push( cell ); 225 | } 226 | 227 | } 228 | 229 | function Layer( params, layerType, proximalInputs, distalInput, apicalInput ) { 230 | var my = this; 231 | 232 | this.columns = []; // Array of columns contained in this layer 233 | this.activeColumns = []; // Array of only the active columns 234 | 235 | this.type = ( ( typeof layerType === "undefined" ) ? TM_LAYER : layerType ); 236 | this.proximalInputs = ( ( typeof proximalInputs === "undefined" ) ? [] : proximalInputs ); // Feed-forward input cells 237 | this.distalInput = ( ( typeof distalInput === "undefined" ) ? null : distalInput ); // distal input cells 238 | this.apicalInput = ( ( typeof apicalInput === "undefined" ) ? null : apicalInput ); // apical input cells 239 | 240 | this.params = params; 241 | this.cellMatrix = new CellMatrix( this.params ); // A matrix containing all cells in the layer 242 | 243 | this.timestep = 0; // Used for tracking least recently used resources 244 | 245 | // Calculate the decay constant 246 | // (avoids repeating these calculation numerous times when simulating decay) 247 | if( ( typeof this.params.meanLifetime !== "undefined" ) && ( this.params.meanLifetime > 0 ) ) { 248 | this.params.decayConstant = ( 1.0 / parseFloat( this.params.meanLifetime ) ); 249 | } 250 | 251 | /** 252 | * This function adds a new column to the layer, and creates all of 253 | * the cells in it. If skipSpatialPooling is false, it also 254 | * establishes randomly distributed proximal connections with the 255 | * input cells. 256 | */ 257 | this.addColumn = function() { 258 | var i, c, p, input, perm, synapse; 259 | var column = new Column( my.columns.length, my.columns.length * my.params.cellsPerColumn, my.params.cellsPerColumn, my ); 260 | 261 | // Randomly connect columns to input cells, for use in spatial pooling 262 | if( !my.params.skipSpatialPooling ) { 263 | for( i = 0; i < my.proximalInputs.length; i++ ) { 264 | input = my.proximalInputs[i]; 265 | for( c = 0; c < input.cells.length; c++ ) { 266 | p = Math.floor( Math.random() * 100 ); 267 | if( p < my.params.potentialPercent ) { 268 | perm = Math.floor( Math.random() * 100 ); 269 | if( perm > my.params.connectedPermanence ) { 270 | // Start with weak connections (for faster initial learning) 271 | perm = my.params.connectedPermanence; 272 | } 273 | synapse = new Synapse( input.cells[c], column.proximalSegment, perm ); 274 | } 275 | } 276 | } 277 | } 278 | 279 | my.columns.push( column ); 280 | return column; 281 | } 282 | 283 | // Add the columns if spatial pooling is enabled 284 | if( !this.params.skipSpatialPooling ) { 285 | for( var c = 0; c < this.params.columnCount; c++ ) { 286 | this.addColumn(); 287 | } 288 | } 289 | 290 | /** 291 | * This function clears all references 292 | */ 293 | this.clear = function() { 294 | if( my !== null ) { 295 | my.cellMatrix.clear(); 296 | my.cellMatrix = null; 297 | my.columns = null; 298 | my.activeColumns = null; 299 | my.proximalInputs = null; 300 | my.distalInput = null; 301 | my.apicalInput = null; 302 | my.params = null; 303 | my.timestep = null; 304 | my = null; 305 | } 306 | } 307 | 308 | } 309 | 310 | function Segment( type, cellRx, column ) { 311 | 312 | this.type = type; // proximal, distal, or apical 313 | this.cellRx = cellRx; // Receiving cell 314 | this.column = ( ( typeof column === "undefined" ) ? null : column ); 315 | 316 | this.lastUsedTimestep = 0; // Used to remove least recently used segment if max per cell is exceeded 317 | this.synapses = []; // Connections to axons of transmitting cells 318 | this.activeSynapses = []; // both connected and potential synapses 319 | this.connectedSynapses = []; // connected synapses only 320 | this.predictedActiveSynapses = []; // synapses receiving input from predicted active cells 321 | this.activeSynapsesHistory = []; // Reverse-order history of active synapses 322 | this.connectedSynapsesHistory = []; // Reverse-order history of connected synapses 323 | this.predictedActiveSynapsesHistory = []; // Reverse-order history of synapses receiving input from predicted active cells 324 | 325 | this.active = false; 326 | this.learning = false; 327 | 328 | if( this.cellRx !== null ) { 329 | if( this.type == DISTAL ) { 330 | this.cellRx.distalSegments.push( this ); 331 | } else if( this.type == APICAL ) { 332 | this.cellRx.apicalSegments.push( this ); 333 | } else { 334 | this.cellRx.proximalSegments.push( this ); 335 | } 336 | } 337 | } 338 | 339 | function Synapse( cellTx, segment, permanence ) { 340 | 341 | this.cellTx = cellTx; // Transmitting cell 342 | this.segment = segment; // Dendrite segment of receiving cell 343 | this.permanence = permanence; // Connection strength 344 | 345 | // Let the transmitting cell and receiving segment know about this synapse 346 | this.segment.synapses.push( this ); 347 | this.cellTx.axonSynapses.push( this ); 348 | } 349 | 350 | // Defaults to use for any param not specified: 351 | this.defaultParams = { 352 | "columnCount" : 2048, 353 | "cellsPerColumn" : 32, 354 | "activationThreshold" : 15, 355 | "initialPermanence" : 31, // % 356 | "connectedPermanence" : 40, // % 357 | "minThreshold" : 10, 358 | "maxNewSynapseCount" : 32, 359 | "permanenceIncrement" : 15, // % 360 | "permanenceDecrement" : 10, // % 361 | "predictedSegmentDecrement" : 1, // % 362 | "maxSegmentsPerCell" : 128, 363 | "maxSynapsesPerSegment" : 128, 364 | "potentialPercent" : 50, // % 365 | "sparsity" : 2, // % 366 | "inputCellCount" : 1024, 367 | "skipSpatialPooling" : false, 368 | "historyLength" : 4, 369 | // Temporal Pooling parameters 370 | "tpSparsity" : 10, // % 371 | "meanLifetime" : 4, 372 | "excitationMin" : 10, 373 | "excitationMax" : 20, 374 | "excitationXMidpoint" : 5, 375 | "excitationSteepness" : 1, 376 | "weightActive" : 1, 377 | "weightPredictedActive" : 4, 378 | "forwardPermananceIncrement" : 2, 379 | "backwardPermananceIncrement" : 1 380 | }; 381 | 382 | /** 383 | * This function creates a cell matrix containing the number of 384 | * input cells specifed in the params, and returns it. 385 | */ 386 | this.createInputCells = function( params ) { 387 | var i, cell; 388 | // Create a matrix to hold the new cells 389 | var inputCells = new CellMatrix( params ); 390 | // Generate the specified number of input cells 391 | for( i = 0; i < params.inputCellCount; i++ ) { 392 | cell = new Cell( inputCells, i ); 393 | } 394 | // Return the cell matrix 395 | return inputCells; 396 | } 397 | 398 | /** 399 | * This function generates a new layer. If spatial pooling is enabled 400 | * and an input layer is not specified, a matrix of input cells is also 401 | * created, containing the cell count specified in the params. 402 | * 403 | * TM_LAYER is a layer which receives distal input from its own cells. 404 | * TP_LAYER is a layer which produces stable representations. 405 | */ 406 | this.createLayer = function( params, layerType, inputLayerIdx ) { 407 | var property; 408 | var type = ( ( typeof layerType === "undefined" ) ? TM_LAYER : layerType ); 409 | var inputLayer = ( ( typeof inputLayerIdx === "undefined" ) ? null : my.layers[inputLayerIdx] ); 410 | 411 | // Start with a copy of the default params 412 | var layerParams = []; 413 | for( property in my.defaultParams ) { 414 | if( my.defaultParams.hasOwnProperty( property ) ) { 415 | layerParams[property] = my.defaultParams[property]; 416 | } 417 | } 418 | // Override default params with any provided 419 | if( ( typeof params !== "undefined" ) && ( params !== null ) ) { 420 | for( property in params ) { 421 | if( params.hasOwnProperty( property ) ) { 422 | layerParams[property] = params[property]; 423 | } 424 | } 425 | } 426 | 427 | // Determine where feed-forward input should come from 428 | var inputCells = null; 429 | if( inputLayer !== null ) { 430 | // Input coming from another layer 431 | inputCells = inputLayer.cellMatrix; 432 | } else if( !layerParams.skipSpatialPooling ) { 433 | // Create a new matrix of input cells 434 | inputCells = my.createInputCells( layerParams ); 435 | } 436 | // Create the layer 437 | var layer = new Layer( layerParams, layerType, [inputCells] ); 438 | 439 | if( type == TM_LAYER || type == TP_LAYER ) { 440 | // TM and TP layers receive distal input from their own cell matrix 441 | layer.distalInput = layer.cellMatrix; 442 | } 443 | 444 | my.layers.push( layer ); // Save for easy lookup 445 | 446 | return my; // Allows chaining function calls 447 | } 448 | 449 | /** 450 | * This function increments a layer"s timestep and activates its columns which 451 | * best match the input. If learning is enabled, adjusts the columns to better 452 | * match the input. 453 | * 454 | * This function also performs temporal pooling if layer is configured as such. 455 | * 456 | * Note: The active input SDRs must align with the proximal input cell matrices 457 | * in the layer. 458 | */ 459 | this.spatialPooling = function( layerIdx, activeInputSDRs, learningEnabled ) { 460 | var c, i, randomIndexes, input, indexes, synapse, column, cell; 461 | var learn = ( ( typeof learningEnabled === "undefined" ) ? false : learningEnabled ); 462 | var layer = my.layers[layerIdx]; 463 | //throw JSON.stringify(activeInputSDRs); 464 | layer.timestep++; 465 | 466 | // If we were given activeInputSDRs, update input cell activity to match 467 | if( activeInputSDRs.length > 0 ) { 468 | // Clear input cell active states 469 | for( i = 0; i < layer.proximalInputs.length; i++ ) { 470 | layer.proximalInputs[i].resetActiveStates(); 471 | } 472 | 473 | // Update active state of input cells which match the specified SDR. 474 | // If learning is enabled, also set their learn state. 475 | for( i = 0; i < activeInputSDRs.length; i++ ) { 476 | indexes = activeInputSDRs[i]; 477 | input = layer.proximalInputs[i]; 478 | for( c = 0; c < indexes.length; c++ ) { 479 | cell = input.cells[indexes[c]]; 480 | cell.active = true; 481 | input.activeCells.push( cell ); 482 | // If cell was predicted, add to predictedActive list as well 483 | if( cell.predictive ) { 484 | cell.predictedActive = true; 485 | input.predictedActiveCells.push( cell ); 486 | } 487 | if( learn ) { // Learning enabled, set learn states 488 | cell.learning = true; 489 | input.learningCells.push( cell ); 490 | } 491 | } 492 | } 493 | 494 | // Clear input cell predictive states 495 | for( i = 0; i < layer.proximalInputs.length; i++ ) { 496 | layer.proximalInputs[i].resetPredictiveStates(); 497 | } 498 | 499 | // Activate the input cells (may generate new predictions) 500 | for( i = 0; i < activeInputSDRs.length; i++ ) { 501 | input = layer.proximalInputs[i]; 502 | // Activate input cells (also generates new column scores) 503 | my.activateCellMatrix( input, layer.timestep ); 504 | } 505 | } 506 | 507 | // Select the columns with the highest scores to become active 508 | var bestColumns = []; 509 | var activeColumnCount = parseInt( ( parseFloat( layer.params.sparsity ) / 100 ) * layer.params.columnCount ); 510 | if( activeColumnCount < 1 ) { 511 | activeColumnCount = 1; 512 | } 513 | for( i = 0; i < layer.columns.length; i++ ) { 514 | column = layer.columns[i]; 515 | // Calculate the column score 516 | if( column.score === null ) { 517 | if( layer.type == TM_LAYER ) { 518 | // For TM layers, this is just the overlap with active input cells 519 | column.score = column.overlapActive; 520 | } else if( layer.type == TP_LAYER ) { 521 | // For TP layers, use a weighted average of overlap with active and predicted active cells 522 | column.score = ( parseFloat( column.overlapActive ) * parseFloat( layer.params.weightActive ) ) 523 | + ( parseFloat( column.overlapPredictedActive ) * parseFloat( layer.params.weightPredictedActive ) ); 524 | } 525 | } 526 | // Check if this column has a higher score than what has already been chosen 527 | for( c = 0; c < activeColumnCount; c++ ) { 528 | // If bestColumns array is not full, or if score is better, add it 529 | if( ( !( c in bestColumns ) ) || bestColumns[c].score < column.score ) { 530 | bestColumns.splice( c, 0, column ); 531 | // Don"t let bestColumns array grow larger than activeColumnCount 532 | if( bestColumns.length > activeColumnCount ) { 533 | bestColumns.length = activeColumnCount; 534 | } 535 | break; 536 | } 537 | } 538 | } 539 | 540 | for( i = 0; i < activeColumnCount; i++ ) { 541 | column = bestColumns[i]; 542 | if( layer.type == TP_LAYER ) { 543 | // Increase the column persistence based on overlap with correctly predicted inputs 544 | column.persistence = my.excite( column.persistence, column.overlapPredictedActive, 545 | layer.params.excitationMin, layer.params.excitationMax, layer.params.excitationXMidpoint, layer.params.excitationSteepness ); 546 | column.initialPersistence = column.persistence; 547 | } 548 | column.lastUsedTimestep = layer.timestep; 549 | // SP learning 550 | if( learn ) { 551 | for( c = 0; c < column.proximalSegment.synapses.length; c++ ) { 552 | synapse = column.proximalSegment.synapses[c]; 553 | // For TM layers, enforce all active cells. For TP layers, only correctly predicted cells 554 | if( 555 | ( ( layer.type == TM_LAYER ) && synapse.cellTx.active ) 556 | || ( ( layer.type == TP_LAYER ) && synapse.cellTx.predictedActive ) 557 | ) { 558 | synapse.permanence += layer.params.permanenceIncrement; 559 | if( synapse.permanence > 100 ) { 560 | synapse.permanence = 100; 561 | } 562 | } else { 563 | synapse.permanence -= layer.params.permanenceDecrement; 564 | if( synapse.permanence < 0 ) { 565 | synapse.permanence = 0; 566 | } 567 | } 568 | } 569 | } 570 | } 571 | 572 | // Activated columns for a TP layer are those with highest persistence 573 | if( layer.type == TP_LAYER ) { 574 | // Clear the "bestColumns" array so it can be rebuilt. 575 | bestColumns = []; 576 | // Calculate a new active column count based on TP sparsity param 577 | activeColumnCount = parseInt( ( parseFloat( layer.params.tpSparsity ) / 100 ) * layer.params.columnCount ); 578 | if( activeColumnCount < 1 ) { 579 | activeColumnCount = 1; 580 | } 581 | } 582 | 583 | // Post-processing, cleanup 584 | for( i = 0; i < layer.columns.length; i++ ) { 585 | column = layer.columns[i]; 586 | if( layer.type == TP_LAYER ) { 587 | // Generate a new set of "best columns" based on persistence values 588 | for( c = 0; c < activeColumnCount; c++ ) { 589 | // If bestColumns array is not full, or if score is better, add it 590 | if( ( !( c in bestColumns ) ) || bestColumns[c].persistence < column.persistence ) { 591 | // Only use column if it has some persistence 592 | if( column.persistence > 0 ) { 593 | bestColumns.splice( c, 0, column ); 594 | // Don"t let bestColumns array grow larger than activeColumnCount 595 | if( bestColumns.length > activeColumnCount ) { 596 | bestColumns.length = activeColumnCount; 597 | } 598 | } 599 | break; 600 | } 601 | } 602 | // Decay persistence value 603 | column.persistence = my.decay( layer.params.decayConstant, 604 | column.initialPersistence, layer.timestep - column.lastUsedTimestep ); 605 | } 606 | // Reset overlap scores 607 | column.overlapActive = 0; 608 | column.overlapPredictedActive = 0; 609 | column.score = null; 610 | } 611 | 612 | layer.activeColumns = bestColumns; 613 | 614 | 615 | // TODO: Forward learning 616 | 617 | // TODO: Backward learning 618 | 619 | 620 | return my; // Allows chaining function calls 621 | } 622 | 623 | /** 624 | * This function activates cells in the active columns, generates predictions, and 625 | * if learning is enabled, learns new temporal patterns. 626 | */ 627 | this.temporalMemory = function( layerIdx, learningEnabled ) { 628 | var learn = ( ( typeof learningEnabled === "undefined" ) ? false : learningEnabled ); 629 | var layer = my.layers[layerIdx]; 630 | 631 | // Phase 1: Activate 632 | my.tmActivate( layer, learn ); 633 | 634 | // Phase 2: Predict 635 | my.tmPredict( layer ); 636 | 637 | // Phase 3: Learn 638 | if( learn ) { 639 | my.tmLearn( layer ); 640 | } 641 | return my; // Allows chaining function calls 642 | } 643 | 644 | /** 645 | * This function allows the input cells to grow apical connections with the active cells in 646 | * the specified layer, allowing next inputs to be predicted. This is designed to replace 647 | * the heavier-weight classifier logic for making predictions one timestep in the future. 648 | */ 649 | this.inputMemory = function( layerIdx ) { 650 | var i; 651 | var layer = my.layers[layerIdx]; 652 | 653 | for( i = 0; i < layer.proximalInputs.length; i++ ) { 654 | my.trainCellMatrix( layer.cellMatrix, layer.proximalInputs[i], APICAL, layer.timestep ); 655 | } 656 | } 657 | 658 | /** 659 | * Activates cells in each active column, and selects cells to learn in the next 660 | * timestep. Activity is queued up, but not transmitted to receiving cells until 661 | * tmPredict() is executed. 662 | * 663 | * This is Phase 1 of the temporal memory process. 664 | */ 665 | this.tmActivate = function( layer, learn ) { 666 | var i, c, x, predicted, column, cell, learningCell, synapse; 667 | 668 | // Reset this layer"s active cell states after saving history. 669 | layer.cellMatrix.resetActiveStates(); 670 | 671 | // Loop through each active column and activate cells 672 | for( i = 0; i < layer.activeColumns.length; i++ ) { 673 | column = layer.activeColumns[i]; 674 | predicted = false; 675 | for( c = 0; c < column.cells.length; c++ ) { 676 | cell = column.cells[c]; 677 | if( cell.predictive ) { 678 | cell.active = true; // Activate predictive cell 679 | layer.cellMatrix.activeCells.push( cell ); 680 | cell.predictedActive = true; 681 | layer.cellMatrix.predictedActiveCells.push( cell ); 682 | if( learn ) { 683 | cell.learning = true; // Flag cell for learning 684 | layer.cellMatrix.learningCells.push( cell ); 685 | } 686 | predicted = true; // Input was predicted 687 | } 688 | } 689 | if( !predicted ) { 690 | // Input was not predicted, activate all cells in column 691 | for( c = 0; c < column.cells.length; c++ ) { 692 | cell = column.cells[c]; 693 | cell.active = true; 694 | layer.cellMatrix.activeCells.push( cell ); 695 | } 696 | if( learn ) { 697 | // Select a cell for learning 698 | if( column.bestDistalSegment === null ) { 699 | // No segments matched the input, pick least used cell to learn 700 | x = Math.floor( Math.random() * column.cells.length ); 701 | learningCell = column.cells[x]; // Start with a random cell 702 | // Loop through all cells to find one with fewest segments 703 | for( c = 0; c < column.cells.length; c++ ) { 704 | cell = column.cells[x]; 705 | if( cell.distalSegments.length < learningCell.distalSegments.length ){ 706 | learningCell = cell; // Fewer segments, use this one 707 | } 708 | x++; 709 | if( x >= column.cells.length ) { 710 | x = 0; // Wrap around to beginning of cells array 711 | } 712 | } 713 | learningCell.learning = true; // Flag chosen cell to learn 714 | layer.cellMatrix.learningCells.push( learningCell ); 715 | } else { 716 | // Flag cell with best matching segment to learn 717 | column.bestDistalSegment.cellRx.learning = true; 718 | layer.cellMatrix.learningCells.push( column.bestDistalSegment.cellRx ); 719 | } 720 | } 721 | } 722 | } 723 | } 724 | 725 | /** 726 | * Transmits queued activity, driving cells into predictive state based on 727 | * distal or apical connections with active cells. Also identifies the 728 | * distal and apical segments that best match the current activity, which 729 | * is later used when tmLearn() is executed. 730 | * 731 | * This is Phase 2 of the temporal memory process. 732 | */ 733 | this.tmPredict = function( layer ) { 734 | var i, c, column, cell, synapse; 735 | 736 | // Reset this layer"s predictive cell states after saving history. 737 | layer.cellMatrix.resetPredictiveStates(); 738 | 739 | // Save column best matching segments history, and clear references 740 | for( i = 0; i < layer.columns.length; i++ ) { 741 | // Save best matching distal segment history 742 | column = layer.columns[i]; 743 | column.bestDistalSegmentHistory.unshift( column.bestDistalSegment ); 744 | if( column.bestDistalSegmentHistory.length > layer.params.historyLength ) { 745 | column.bestDistalSegmentHistory.length = layer.params.historyLength; 746 | } 747 | // Clear reference to best matching distal segment 748 | column.bestDistalSegment = null; 749 | // Save best matching apical segment history 750 | column.bestApicalSegmentHistory.unshift( column.bestApicalSegment ); 751 | if( column.bestApicalSegmentHistory.length > layer.params.historyLength ) { 752 | column.bestApicalSegmentHistory.length = layer.params.historyLength; 753 | } 754 | // Clear reference to best matching apical segment 755 | column.bestApicalSegment = null; 756 | } 757 | 758 | // Transmit queued activity to receiving synapses to generate predictions 759 | my.activateCellMatrix( layer.cellMatrix, layer.timestep ); 760 | } 761 | 762 | /** 763 | * This function allows cells in a layer to grow distal connections with other cells 764 | * in the same layer, allowing next state to be predicted. Enforces good predictions 765 | * and degrades wrong predictions. 766 | * 767 | * This is Phase 3 of the temporal memory process. 768 | */ 769 | this.tmLearn = function( layer ) { 770 | 771 | my.trainCellMatrix( layer.distalInput, layer.cellMatrix, DISTAL, layer.timestep ); 772 | } 773 | 774 | /** 775 | * Activates the cells in a matrix which have had their "active" flag set. 776 | * If cells are feeding a spatial pooler, increases the scores of the columns 777 | * they are connected to. Otherwise, transmits to dendrites of other receiving 778 | * cells, and may place them into predictive or active states. 779 | */ 780 | this.activateCellMatrix = function( cellMatrix, timestep ) { 781 | var c, s, column, cell, synapse; 782 | 783 | for( c = 0; c < cellMatrix.activeCells.length; c++ ) { 784 | cell = cellMatrix.activeCells[c]; 785 | // Activate synapses along the cell"s axon 786 | for( s = 0; s < cell.axonSynapses.length; s++ ) { 787 | synapse = cell.axonSynapses[s]; 788 | synapse.segment.lastUsedTimestep = timestep; // Update segment"s last used timestep 789 | if( synapse.segment.cellRx === null ) { 790 | // This is the proximal segment of a column. Just update the column score. 791 | if( synapse.permanence >= cellMatrix.params.connectedPermanence ) { 792 | synapse.segment.column.overlapActive++; 793 | if( cell.predictedActive ) { 794 | synapse.segment.column.overlapPredictedActive++; 795 | } 796 | } 797 | } else { 798 | // This is the segment of a cell. Determine if state should be updated. 799 | // First, add to segment"s active synapses list 800 | synapse.segment.activeSynapses.push( synapse ); 801 | if( cell.predictedActive ) { 802 | // Transmitting cell was correctly predicted, add synapse to predicted active list 803 | synapse.segment.predictedActiveSynapses.push( synapse ); 804 | } 805 | if( synapse.permanence >= cellMatrix.params.connectedPermanence ) { 806 | // Synapse connected, add to connected synapses list 807 | synapse.segment.connectedSynapses.push( synapse ); 808 | if( synapse.segment.connectedSynapses.length >= cellMatrix.params.activationThreshold ) { 809 | // Number of connected synapses above threshold. Update receiving cell. 810 | if( !synapse.segment.cellRx.predictive ) { 811 | // Mark receiving cell as predictive (TODO: consider proximal segments) 812 | synapse.segment.cellRx.predictive = true; 813 | // Update the receiving cell"s matrix 814 | synapse.segment.cellRx.matrix.predictiveCells.push( synapse.segment.cellRx ); 815 | // Add segment to appropriate list for learning 816 | if( synapse.segment.type == DISTAL ) { 817 | synapse.segment.cellRx.distalLearnSegment = synapse.segment; 818 | } else if( synapse.segment.type == APICAL ) { 819 | // TODO: Consider cases where distal + apical should activate cell. 820 | synapse.segment.cellRx.apicalLearnSegment = synapse.segment; 821 | } 822 | } 823 | } 824 | } 825 | // If receiving cell is in a column, update best matching segment references 826 | if( synapse.segment.cellRx.column !== null ) { 827 | column = synapse.segment.cellRx.column; 828 | // Save a reference to the best matching distal and apical segments in the column 829 | if( synapse.segment.type === DISTAL ) { 830 | if( ( column.bestDistalSegment === null ) 831 | || ( synapse.segment.connectedSynapses.length > column.bestDistalSegment.connectedSynapses.length ) 832 | || ( synapse.segment.activeSynapses.length > column.bestDistalSegment.activeSynapses.length ) ) 833 | { 834 | // Make sure segment has at least minimum number of potential synapses 835 | if( synapse.segment.activeSynapses.length >= cellMatrix.params.minThreshold ) { 836 | // This segment is a better match, use it 837 | column.bestDistalSegment = synapse.segment; 838 | synapse.segment.cellRx.distalLearnSegment = synapse.segment; 839 | } 840 | } 841 | } else if( synapse.segment.type === APICAL ) { 842 | if( ( column.bestApicalSegment === null ) 843 | || ( synapse.segment.connectedSynapses.length > column.bestApicalSegment.connectedSynapses.length ) 844 | || ( synapse.segment.activeSynapses.length > column.bestApicalSegment.activeSynapses.length ) ) 845 | { 846 | // Make sure segment has at least minimum number of potential synapses 847 | if( synapse.segment.activeSynapses.length >= cellMatrix.params.minThreshold ) { 848 | // This segment is a better match, use it 849 | column.bestApicalSegment = synapse.segment; 850 | synapse.segment.cellRx.apicalLearnSegment = synapse.segment; 851 | } 852 | } 853 | } 854 | } 855 | } 856 | } 857 | } 858 | } 859 | 860 | /** 861 | * Creates or adapts distal and apical segments in a receiving cell matrix to 862 | * align with previously active cells in a transmitting cell matrix. Enforces 863 | * good predictions and degrades wrong predictions. 864 | */ 865 | this.trainCellMatrix = function( cellMatrixTx, cellMatrixRx, inputType, timestep ) { 866 | var c, s, p, sourcePredicted, randomIndexes, cell, segment, synapse; 867 | 868 | if( ( cellMatrixTx.activeCellHistory.length > 0 ) && ( cellMatrixRx.predictiveCellHistory.length > 0 ) ) { 869 | // Enforce correct predictions, degrade wrong predictions 870 | for( c = 0; c < cellMatrixRx.predictiveCellHistory[0].length; c++ ) { 871 | segment = null; 872 | cell = cellMatrixRx.predictiveCellHistory[0][c]; 873 | if( cell.column !== null ) { 874 | // Cell is part of a layer"s cell matrix. 875 | // Make sure this cell is the one referenced by column"s best segment history 876 | if( inputType == DISTAL 877 | && cell.column.bestDistalSegmentHistory.length > 0 878 | && cell.column.bestDistalSegmentHistory[0] !== null 879 | && cell.column.bestDistalSegmentHistory[0].cellRx === cell ) 880 | { 881 | segment = cell.column.bestDistalSegmentHistory[0]; 882 | } else if( inputType == APICAL 883 | && cell.column.bestApicalSegmentHistory.length > 0 884 | && cell.column.bestApicalSegmentHistory[0] !== null 885 | && cell.column.bestApicalSegmentHistory[0].cellRx === cell ) 886 | { 887 | segment = cell.column.bestApicalSegmentHistory[0]; 888 | } 889 | } else { 890 | // Cell is part of an input cell matrix. 891 | if( inputType == DISTAL ) { 892 | segment = cell.distalLearnSegment; 893 | } else if( inputType == APICAL ) { 894 | segment = cell.apicalLearnSegment; 895 | } 896 | } 897 | if( segment !== null 898 | && segment.activeSynapsesHistory.length > 0 899 | && segment.activeSynapsesHistory[0].length > 0 ) 900 | { 901 | if( cell.active ) { 902 | // Correct prediction. Train it to better align with activity. 903 | my.trainSegment( segment, cellMatrixTx.learningCellHistory[0], cellMatrixRx.params, timestep ); 904 | } else { 905 | // Wrong prediction. 906 | for( s = 0; s < segment.synapses.length; s++ ) { 907 | synapse = segment.synapses[s]; 908 | // Check if transmitting cell was itself predicted 909 | sourcePredicted = false; 910 | if( segment.predictedActiveSynapsesHistory.length > 0 ) { 911 | for( p = 0; p < segment.predictedActiveSynapsesHistory[0].length; p++ ) { 912 | if( segment.predictedActiveSynapsesHistory[0][p] === synapse ) { 913 | sourcePredicted = true; 914 | } 915 | } 916 | } 917 | // Only punish wrong predictions if the source minicolumn was not bursting (fixes some undesirable forgetfulness) 918 | if( sourcePredicted ) { 919 | // Degrade this connection. 920 | synapse.permanence -= cellMatrixRx.params.predictedSegmentDecrement; 921 | if( synapse.permanence < 0 ) { 922 | synapse.permanence = 0; 923 | } 924 | } 925 | } 926 | } 927 | } 928 | cell.learning = false; // Remove learning flag, so cell doesn"t get double-trained 929 | } 930 | // If this isn"t first input (or reset), train cells which were not predicted 931 | if( cellMatrixRx.learningCellHistory[0].length > 0 ) { 932 | // Loop through cells which have been flagged for learning 933 | for( c = 0; c < cellMatrixRx.learningCells.length; c++ ) { 934 | segment = null; 935 | cell = cellMatrixRx.learningCells[c]; 936 | 937 | // Make sure we haven"t already trained this cell 938 | if( cell.learning ) { 939 | if( cell.column !== null ) { 940 | // Cell is part of a layer"s cell matrix 941 | if( inputType == DISTAL 942 | && cell.column.bestDistalSegmentHistory.length > 0 943 | && cell.column.bestDistalSegmentHistory[0] !== null 944 | && cell.column.bestDistalSegmentHistory[0].cellRx === cell ) 945 | { 946 | segment = cell.column.bestDistalSegmentHistory[0]; 947 | }else if( inputType == APICAL 948 | && cell.column.bestApicalSegmentHistory.length > 0 949 | && cell.column.bestApicalSegmentHistory[0] !== null 950 | && cell.column.bestApicalSegmentHistory[0].cellRx === cell ) 951 | { 952 | segment = cell.column.bestApicalSegmentHistory[0]; 953 | } 954 | } else { 955 | // Cell is part of an input cell matrix 956 | if( inputType == DISTAL ) { 957 | segment = cell.distalLearnSegment; 958 | } else if( inputType == APICAL ) { 959 | segment = cell.apicalLearnSegment; 960 | } 961 | } 962 | // We haven"t trained this cell yet. Check if it had a matching segment 963 | if( segment !== null 964 | && segment.activeSynapsesHistory.length > 0 965 | && segment.activeSynapsesHistory[0].length > 0 ) 966 | { 967 | // Found a matching segment. Train it to better align with activity. 968 | my.trainSegment( segment, cellMatrixTx.learningCellHistory[0], cellMatrixRx.params, timestep ); 969 | } else { 970 | // No matching segment. Create a new one. 971 | segment = new Segment( inputType, cell, cell.column ); 972 | segment.lastUsedTimestep = timestep; 973 | // Connect segment with random sampling of previously active learning cells, up to max new synapse count 974 | randomIndexes = my.randomIndexes( cellMatrixTx.learningCellHistory[0].length, cellMatrixRx.params.maxNewSynapseCount, false ); 975 | for( s = 0; s < randomIndexes.length; s++ ) { 976 | synapse = new Synapse( cellMatrixTx.learningCellHistory[0][randomIndexes[s]], segment, cellMatrixRx.params.initialPermanence ); 977 | } 978 | } 979 | cell.learning = false; 980 | } 981 | } 982 | } 983 | } 984 | } 985 | 986 | /** 987 | * Trains a segment of any type to better match the specified active cells. 988 | * Active synapses are enforced, inactive synapses are degraded, and new synapses are formed 989 | * with a random sampling of the active cells, up to max new synapses. 990 | */ 991 | this.trainSegment = function( segment, activeCells, params, timestep ) { 992 | var s, i, synapse, segments, segmentIndex, lruSegmentIndex; 993 | var randomIndexes = my.randomIndexes( activeCells.length, params.maxNewSynapseCount, false ); 994 | var inactiveSynapses = segment.synapses.slice(); // Inactive synapses (will remove active ones below) 995 | // Enforce synapses that were active 996 | if( segment.activeSynapsesHistory.length > 0 ) { 997 | for( s = 0; s < segment.activeSynapsesHistory[0].length; s++ ) { 998 | synapse = segment.activeSynapsesHistory[0][s]; 999 | synapse.permanence += params.permanenceIncrement; 1000 | if( synapse.permanence > 100 ) { 1001 | synapse.permanence = 100; 1002 | } 1003 | // Remove cell from random sampling if present (prevents duplicate connections) 1004 | for( i = 0; i < randomIndexes.length; i++ ) { 1005 | if( activeCells[randomIndexes[i]].index == synapse.cellTx.index ) { 1006 | // Cell is in the random sampling, remove it 1007 | randomIndexes.splice( i, 1 ); 1008 | break; 1009 | } 1010 | } 1011 | // Remove synapse from the list of inactive synapses 1012 | for( i = 0; i < inactiveSynapses.length; i++ ) { 1013 | if( inactiveSynapses[i] === synapse ) { 1014 | // Found it 1015 | inactiveSynapses.splice( i, 1 ); 1016 | break; 1017 | } 1018 | } 1019 | } 1020 | } 1021 | // Degrade synapses that were not active 1022 | for( s = 0; s < inactiveSynapses.length; s++ ) { 1023 | synapse = inactiveSynapses[s]; 1024 | synapse.permanence -= params.permanenceDecrement; 1025 | if( synapse.permanence < 0 ) { 1026 | synapse.permanence = 0; 1027 | // TODO: Delete synapse to free resources 1028 | } 1029 | } 1030 | // Select the relevant list of segments, based on type 1031 | if( segment.type == DISTAL ) { 1032 | segments = segment.cellRx.distalSegments; 1033 | } else if( segment.type == APICAL ) { 1034 | segments = segment.cellRx.apicalSegments; 1035 | } else { 1036 | segments = segment.cellRx.proximalSegments; 1037 | } 1038 | if( segment.activeSynapsesHistory[0].length < params.maxNewSynapseCount ) { 1039 | // Connect segment with random sampling of previously active cells, up to max new synapse count 1040 | for( i = 0; i < randomIndexes.length; i++ ) { 1041 | if( segment.synapses.length >= params.maxSynapsesPerSegment ) { 1042 | // Cannot add any more synapses to this segment. Check if we can add a new segment. 1043 | if( segments.length >= params.maxSegmentsPerCell ) { 1044 | // Cannot add any more segments to this cell. Select least recently used and remove it. 1045 | segmentIndex = Math.floor( Math.random() * segments.length ); 1046 | lruSegmentIndex = segmentIndex; // Start with a random segment index 1047 | // Loop through segments to find least recently used 1048 | for( s = 0; s < segments.length; s++ ) { 1049 | segmentIndex++; 1050 | if( segmentIndex >= segments.length ) { 1051 | segmentIndex = 0; // Wrap back around to beginning of list 1052 | } 1053 | // Check if this segment is less recently used than selected one 1054 | if( segments[segmentIndex].lastUsedTimestep < segments[lruSegmentIndex].lastUsedTimestep ) { 1055 | lruSegmentIndex = segmentIndex; // Used less recently.. select this one instead 1056 | } 1057 | } 1058 | } 1059 | // Add new segment to this cell 1060 | segment = new Segment( segment.type, segment.cellRx, segment.cellRx.column ); 1061 | segment.lastUsedTimestep = timestep; 1062 | } 1063 | // Add new synapse to this segment 1064 | synapse = new Synapse( activeCells[randomIndexes[i]], segment, params.initialPermanence ); 1065 | } 1066 | } 1067 | } 1068 | 1069 | /** 1070 | * Returns an array of size "resultCount", containing unique indexes in the range (0, length - 1) 1071 | * If "ordered" is true, indexes will be in sequential order starting from a random position 1072 | * If "ordered" is false, indexes will be in random order 1073 | */ 1074 | this.randomIndexes = function( length, resultCount, ordered ) { 1075 | var i1, i2; 1076 | var results = []; // Array to hold the random indexes 1077 | var rc = resultCount; 1078 | // Make sure not to return more results than there are available 1079 | if( rc > length ) { 1080 | rc = length; 1081 | } 1082 | if( ordered ) { 1083 | // Start at a random index 1084 | i1 = Math.floor( Math.random() * length ); 1085 | // Capture indexes in order from this point 1086 | for( i2 = 0; i2 < rc; i2++ ) { 1087 | results.push( i1 ); 1088 | i1++; 1089 | if( i1 >= length ) { 1090 | // End of list, loop back around to beginning 1091 | i1 = 0; 1092 | } 1093 | } 1094 | } else { 1095 | // Create an array to hold unprocessed indexes 1096 | var indexes = []; 1097 | for( i1 = 0; i1 < length; i1++ ) { 1098 | indexes.push( i1 ); 1099 | } 1100 | // Capture random indexes out of order 1101 | for( i2 = 0; i2 < rc; i2++ ) { 1102 | // Pick a random element from the unprocessed list 1103 | i1 = Math.floor( Math.random() * ( length - i2 ) ); 1104 | // Capture the index in this element 1105 | results.push( indexes[i1] ); 1106 | // Remove it from the unprocessed list 1107 | indexes.splice( i1, 1 ); 1108 | } 1109 | } 1110 | return results; 1111 | } 1112 | 1113 | /** 1114 | * This function calculates an exponential decay 1115 | * 1116 | * @param decayConstant: 1/meanLifetime 1117 | */ 1118 | this.decay = function( decayConstant, initialValue, timesteps ) { 1119 | return ( Math.exp( -decayConstant * timesteps ) * initialValue ); 1120 | } 1121 | 1122 | /** 1123 | * This function calculates a logistic excitement based on overlap 1124 | */ 1125 | this.excite = function( currentValue, overlap, minValue, maxValue, xMidpoint, steepness ) { 1126 | return ( currentValue + ( maxValue - minValue ) / ( 1 + Math.exp( -steepness * ( overlap - xMidpoint ) ) ) ); 1127 | } 1128 | 1129 | /** 1130 | * This function clears all layers 1131 | */ 1132 | this.clear = function() { 1133 | // Loop through all saved layers 1134 | var i; 1135 | for( i = 0; i < my.layers.length; i++ ) { 1136 | my.layers[i].clear(); // Clears all references 1137 | } 1138 | my.layers = []; // Empty the layers array 1139 | return my; // Allows chaining function calls 1140 | } 1141 | 1142 | }, 1143 | 1144 | processRow: function (row, rowWriter, context) { 1145 | 1146 | 1147 | // Activate TM layer 1148 | this.htmController.spatialPooling( 0, [row.INPUTS], true ); 1149 | this.htmController.temporalMemory( 0, true ); 1150 | 1151 | // Activate TP layer (skipping this step for demonstration purposes) 1152 | //this.htmController.spatialPooling( 1, [], true ); 1153 | //this.htmController.temporalMemory( 1, true ); 1154 | 1155 | // Train cells in input layer to predict next input 1156 | this.htmController.inputMemory( 0 ); 1157 | 1158 | rowWriter.writeRow({"ACTIVE": this.htmController.layers[0].cellMatrix.getActiveCellIndexes(), "PREDICTIVE": this.htmController.layers[0].cellMatrix.getPredictedActiveCellIndexes()}); 1159 | 1160 | }, 1161 | finalize: function (rowWriter, context) { 1162 | 1163 | }, 1164 | initialize: function(argumentInfo, context) { 1165 | // each of these variables will contain a map of cluster_no to running total 1166 | this.htmController=new this.HTMController(); 1167 | this.htmController.createLayer( {}, 0 ); 1168 | 1169 | // Create the pooling layer (not needed for this demo) 1170 | //this.htmController.createLayer( {}, 1, [0] ); 1171 | 1172 | }}'; 1173 | -------------------------------------------------------------------------------- /htm/scalar_encoder.sql: -------------------------------------------------------------------------------- 1 | create or replace function SCALAR_ENCODER(INPUTNUMBER float, SDR_WIDTH float,MIN_VAL float,MAX_VAL float,WIDTH float, DENSE_OUTPUT) 2 | returns array 3 | language javascript 4 | immutable 5 | as 6 | $$ 7 | var scaleFunction = function(opts){ 8 | var istart = opts.domain[0], 9 | istop = opts.domain[1], 10 | ostart = opts.range[0], 11 | ostop = opts.range[1]; 12 | 13 | return function scaleFunction(value) { 14 | return ostart + (ostop - ostart) * ((value - istart) / (istop - istart)); 15 | } 16 | }; 17 | 18 | function applyBitmaskAtIndex(index, w,n,reverseScale, dense_output) { 19 | let out = []; 20 | let lowerValue = reverseScale(index - (w/2)); 21 | let upperValue = reverseScale(index + (w/2)) 22 | 23 | // For each bit in the encoding, we get the input domain 24 | // value. Using w, we know how wide the bitmask should 25 | // be, so we use the reverse scales to define the size 26 | // of the bitmask. If this index is within the value 27 | // range, we turn it on. 28 | for (let i = 0; i < n; i++) { 29 | let bitValue = reverseScale(i); 30 | let bitOut = 0; 31 | // if dense==true, push out the index value only when active 32 | // otherwise, always output 0 or 1 accordingly 33 | if (lowerValue <= bitValue && bitValue < upperValue) { 34 | if (dense_output){ 35 | out.push(index) 36 | } 37 | bitOut = 1 38 | } 39 | if (!dense_output){ 40 | out.push(bitOut) 41 | } 42 | } 43 | return out 44 | } 45 | // Accepts a scalar value within the input domain, returns an 46 | // array of bits representing the value. 47 | function encode(value,n,min,max,width) { 48 | let generatedScaleFunction = scaleFunction({domain: [min,max],range:[0,n]}) 49 | let generatedReverseScaleFunction = scaleFunction({domain: [0,n],range:[min,max]}) 50 | // Using the scale, get the corresponding integer 51 | // index for this value 52 | let index = Math.floor(generatedScaleFunction(value)); 53 | if (index > n - 1) { 54 | index = n - 1 55 | } 56 | return applyBitmaskAtIndex(index,width,n,generatedReverseScaleFunction); 57 | } 58 | return encode(INPUTNUMBER,SDR_WIDTH,MIN_VAL,MAX_VAL,WIDTH, DENSE_OUTPUT); 59 | $$ 60 | ; 61 | -------------------------------------------------------------------------------- /htm/sparse_to_dense.sql: -------------------------------------------------------------------------------- 1 | create or replace function SPARSE_TO_DENSE(SPARSE_ARRAY array) 2 | returns array 3 | language javascript 4 | immutable 5 | as 6 | $$ 7 | return SPARSE_ARRAY.map(function(element, index){ 8 | if (element==1){ 9 | return index; 10 | } 11 | }).filter(function(element){return typeof(element)!=='undefined'}); 12 | $$ 13 | ; 14 | -------------------------------------------------------------------------------- /k-means/kmeans_stored_procedure.sql: -------------------------------------------------------------------------------- 1 | /** 2 | * K-Means algorithm. 3 | * WARNING, not securable at the stored proc level due to its use of string interpolation in queries. 4 | */ 5 | create or replace procedure k_means(TABLE_NAME varchar, COLUMN_NAMES varchar, CLUSTER_INDEX_COLUMN_NAME varchar, CLUSTER_COUNT float, ITERATIONS float) 6 | returns String not null 7 | language javascript 8 | as 9 | $$ 10 | var columnNamesArray = COLUMN_NAMES.split(","); 11 | if (columnNamesArray.length != 2){ 12 | throw "k_means currently only supports two dimensions"; 13 | } 14 | // First, clear out any existing centroids for this table+column combo 15 | var results = snowflake.execute({ 16 | sqlText: "delete from KMEANS_CLUSTERS where TABLE_NAME='"+TABLE_NAME+"' and COLUMNS='"+COLUMN_NAMES+"'" 17 | }); 18 | 19 | // next, select random rows to use as initial centroid values 20 | results = snowflake.execute({ 21 | sqlText: "select "+COLUMN_NAMES+" from "+TABLE_NAME+" sample ("+CLUSTER_COUNT+" rows)" 22 | }); 23 | var clusterCentroids = {}; 24 | var cluster_index = 0; 25 | while (results.next()) { 26 | clusterCentroids[cluster_index] = {}; 27 | clusterCentroids[cluster_index]['x'] = results.getColumnValue(columnNamesArray[0]); 28 | clusterCentroids[cluster_index]['y'] = results.getColumnValue(columnNamesArray[1]); 29 | cluster_index++; 30 | } 31 | 32 | // Store the initial centroids in the tracking table 33 | snowflake.execute({ 34 | sqlText: "insert into KMEANS_CLUSTERS (TABLE_NAME,COLUMNS,CENTROIDS) select '"+TABLE_NAME+"','"+COLUMN_NAMES+"',PARSE_JSON('"+JSON.stringify(clusterCentroids)+"')" 35 | }); 36 | 37 | // Now iterate through and update the centroids 38 | for (var i=0;i0){ 46 | newClusters[clusterId].x=(this.clusterXTotals[clusterId]/this.clusterCounts[clusterId]).toPrecision(2); 47 | newClusters[clusterId].y=(this.clusterYTotals[clusterId]/this.clusterCounts[clusterId]).toPrecision(2); 48 | } 49 | } 50 | rowWriter.writeRow({"MERGED_CLUSTERS": newClusters}); 51 | }, 52 | initialize: function(argumentInfo, context) { 53 | // each of these variables will contain a map of cluster_no to running total 54 | this.clusterXTotals={}; 55 | this.clusterYTotals={}; 56 | this.clusterCounts={}; 57 | }}'; 58 | -------------------------------------------------------------------------------- /k-means/update_clusters_udtf.sql: -------------------------------------------------------------------------------- 1 | -- The purpose of UPDATE_CLUSTERS is to assign each input record to the closest cluster out of a list of clusters, 2 | -- and gather information required to recalculate new centroids. This forms part of the k-means clustering algorithm 3 | -- As a User Defined Table Function, it does this by traversing a table, and for each record: 4 | -- 1) Determine which of the clusters in CLUSTER_CENTROIDS it belongs to (closest in 2D distance) 5 | -- 2) Update a running total of values for that cluster, so that the new centroids can be calculated later 6 | -- The reason we output totals in this manner rather than just new centroids, is so that the processing can be parallelized. 7 | -- So UPDATE_CLUSTERS acts like the Map side of a MapReduce. 8 | 9 | -- A worked example of a couple of iterations, where the rows looks like this: 10 | --+------+------+----------------------------------------------------------+ 11 | --| X | Y | CLUSTER_CENTROIDS | 12 | --|------+------+----------------------------------------------------------| 13 | --| 5 | 6 | {0:{x:4.5,y:7.6}},{1:{x:1.0,y:-3.9},{2:{x:8.4,y:9.0}} | 14 | --| 2.2 | -3.0 | {0:{x:4.5,y:7.6}},{1:{x:1.0,y:-3.9},{2:{x:8.4,y:9.0}} | 15 | --+------+------+----------------------------------------------------------+ 16 | -- Row 1 17 | -- ----- 18 | -- Using Euclidian distance formula: 19 | -- - Distance to cluster 0 is 1.6763054614240207 (initial winner) 20 | -- - Distance to cluster 1 is 10.677546534667972 (not closer than current winner) 21 | -- - Distance to cluster 2 is 4.534313619501853 (not closer than current winner) 22 | -- Cluster 0 is the winner, so we add X value (5) and y value (6) to cluster 0's running totals 23 | -- Row 2 24 | -- ----- 25 | -- Using Euclidian distance formula: 26 | -- - Distance to cluster 0 is 10.846658471621572 (initial winner) 27 | -- - Distance to cluster 1 is 1.500000000000000 (closer than current winner, so this is the new winner) 28 | -- - Distance to cluster 2 is 13.507035203922436 (not closer than current winner) 29 | -- Cluster 1 is the winner, so we add X value (2.2) and y value (-3.0) to cluster 1's running totals 30 | -- If this were the only row, the final output would be: 31 | --+------------------------------------------------------------------------+ 32 | --| NEW_CLUSTER_TOTALS | 33 | --|------------------------------------------------------------------------| 34 | --| {0:{x_total:5,y_total:6,count:1},1:{x_total:2.2,y_total:-3.0,count:1}} | 35 | --+------------------------------------------------------------------------+ 36 | -- Normally there will be many matches for each cluster. 37 | -- 38 | -- Note: Cluster centroids are stored in an object rather than an array, so that we can maintain a 39 | -- consistent identifier for each of them. It doesn't serve much purpose currently, but in future 40 | -- we might handle 0-member clusters differently. 41 | create or replace function UPDATE_CLUSTERS(X float, Y float, CLUSTER_CENTROIDS variant) 42 | returns table (NEW_CLUSTER_TOTALS variant) 43 | language javascript 44 | AS '{ 45 | euclidianDistance: function(x1,x2,y1,y2){ 46 | return Math.sqrt((x1 - x2) ** 2 + (y1 - y2) ** 2); 47 | }, 48 | processRow: function (row, rowWriter, context) { 49 | var clusterIds=Object.keys(row.CLUSTER_CENTROIDS); 50 | var winningClusterIndex=clusterIds[0]; 51 | let cluster=row.CLUSTER_CENTROIDS[winningClusterIndex]; 52 | let distance; 53 | let clusterId; 54 | var winningClusterDistance=this.euclidianDistance(cluster.x,row.X,cluster.y,row.Y); 55 | // compare all clusters, starting from the second cluster id 56 | for (var clusterIdIndex=1; clusterIdIndex