├── .gitignore ├── .travis.yml ├── LICENSE.txt ├── README.md ├── docs ├── attentive_recurrent_network_test.ods ├── batch_feedforward_test.ods ├── birnn_test.ods ├── feedforward_network_test.ods ├── layers │ ├── attention_layer_test.ods │ ├── batch_norm_tests.ods │ ├── cfn_tests.ods │ ├── delta_rnn_tests.ods │ ├── feedforward_tests.ods │ ├── gru_tests.ods │ ├── highway_tests.ods │ ├── indrnn_tests.ods │ ├── lstm_tests.ods │ ├── ltm_tests.ods │ ├── merge_layers_tests.ods │ ├── norm_tests.ods │ ├── ran_tests.ods │ ├── scaled_dot_attention_layer_test.ods │ └── simple_recurrent_tests.ods ├── multiprediction_scorer_tests.ods ├── tree_rnn_tests.ods └── update_methods_tests.ods ├── examples ├── Configuration.kt ├── HANClassifierTest.kt ├── ProgressiveSumTest.kt ├── SerializeLMDBEmbeddings.kt ├── SparseInputTest.kt ├── SumSignRelevanceTest.kt ├── VectorsAverageBiaffineTest.kt ├── bert │ ├── convert │ │ ├── CommandLineArguments.kt │ │ └── ConvertModel.kt │ ├── test │ │ ├── CommandLineArguments.kt │ │ └── TestBERT.kt │ └── training │ │ ├── CommandLineArguments.kt │ │ └── TrainBERT.kt ├── config │ └── configuration.yaml.example ├── logicgates │ ├── ANDGate.kt │ ├── GateTestUtils.kt │ ├── NOTGate.kt │ ├── ORGate.kt │ └── XORGate.kt ├── mnist │ ├── MNISTSequenceTest.kt │ ├── MNISTSparseBinaryTest.kt │ ├── MNISTTest.kt │ └── helpers │ │ ├── MNISTSequenceExampleExtractor.kt │ │ └── MNISTSparseExampleExtractor.kt ├── pom.xml ├── traininghelpers │ ├── training │ │ ├── FeedforwardTrainer.kt │ │ ├── SequenceTrainer.kt │ │ └── SequenceWithFinalOutputTrainer.kt │ └── validation │ │ ├── FeedforwardEvaluator.kt │ │ ├── SequenceEvaluator.kt │ │ └── SequenceWithFinalOutputEvaluator.kt └── utils │ ├── CorpusReader.kt │ ├── ReadArrayExtensions.kt │ ├── dataset.kt │ └── exampleextractor │ ├── ClassificationBinaryOutputExampleExtractor.kt │ ├── ClassificationExampleExtractor.kt │ ├── ClassificationSequenceExampleExtractor.kt │ ├── ClassificationSparseExampleExtractor.kt │ └── ExampleExtractor.kt ├── pom.xml └── src ├── main └── kotlin │ └── com │ └── kotlinnlp │ └── simplednn │ ├── core │ ├── arrays │ │ ├── ActivableArray.kt │ │ ├── AugmentedArray.kt │ │ ├── AugmentedArrayExtensions.kt │ │ ├── DistributionArray.kt │ │ ├── Norm1Array.kt │ │ └── ParamsArray.kt │ ├── embeddings │ │ ├── EmbeddingsMap.kt │ │ └── lmdb │ │ │ ├── EmbeddingsMap.kt │ │ │ └── EmbeddingsStorage.kt │ ├── functionalities │ │ ├── activations │ │ │ ├── ActivationFunction.kt │ │ │ ├── CeLU.kt │ │ │ ├── ELU.kt │ │ │ ├── GeLU.kt │ │ │ ├── HardSigmoid.kt │ │ │ ├── HardTanh.kt │ │ │ ├── LeakyRELU.kt │ │ │ ├── ReLU.kt │ │ │ ├── ScalarActivationFunction.kt │ │ │ ├── SeLU.kt │ │ │ ├── Sigmoid.kt │ │ │ ├── Softmax.kt │ │ │ ├── SoftmaxBase.kt │ │ │ ├── Softplus.kt │ │ │ ├── Softshrink.kt │ │ │ ├── Softsign.kt │ │ │ ├── Tanh.kt │ │ │ └── Threshold.kt │ │ ├── decaymethods │ │ │ ├── DecayMethod.kt │ │ │ ├── ExponentialDecay.kt │ │ │ └── HyperbolicDecay.kt │ │ ├── gradientclipping │ │ │ └── GradientClipping.kt │ │ ├── initializers │ │ │ ├── ConstantInitializer.kt │ │ │ ├── GlorotInitializer.kt │ │ │ ├── Initializer.kt │ │ │ └── RandomInitializer.kt │ │ ├── losses │ │ │ ├── AugmentedMSECalculator.kt │ │ │ ├── LossCalculator.kt │ │ │ ├── LossesUtils.kt │ │ │ ├── MSECalculator.kt │ │ │ ├── MulticlassMSECalculator.kt │ │ │ ├── NegativeLogProbability.kt │ │ │ └── SoftmaxCrossEntropyCalculator.kt │ │ ├── outputevaluation │ │ │ ├── ClassificationEvaluation.kt │ │ │ ├── MulticlassEvaluation.kt │ │ │ └── OutputEvaluationFunction.kt │ │ ├── randomgenerators │ │ │ ├── BaseRandom.kt │ │ │ ├── FixedRangeRandom.kt │ │ │ ├── GaussianDistributedRandom.kt │ │ │ └── RandomGenerator.kt │ │ ├── regularization │ │ │ ├── L1Regularization.kt │ │ │ ├── L2Regularization.kt │ │ │ ├── MaxNormRegularization.kt │ │ │ └── ParamsRegularization.kt │ │ └── updatemethods │ │ │ ├── UpdateMethod.kt │ │ │ ├── UpdateMethodConfig.kt │ │ │ ├── UpdaterSupportStructure.kt │ │ │ ├── adagrad │ │ │ ├── AdaGradMethod.kt │ │ │ └── AdaGradStructure.kt │ │ │ ├── adam │ │ │ ├── ADAMMethod.kt │ │ │ └── ADAMStructure.kt │ │ │ ├── learningrate │ │ │ ├── LearningRateMethod.kt │ │ │ └── LearningRateStructure.kt │ │ │ ├── momentum │ │ │ ├── MomentumMethod.kt │ │ │ └── MomentumStructure.kt │ │ │ ├── nesterovmomentum │ │ │ ├── NesterovMomentumMethod.kt │ │ │ └── NesterovMomentumStructure.kt │ │ │ ├── radam │ │ │ └── RADAMMethod.kt │ │ │ └── rmsprop │ │ │ ├── RMSPropMethod.kt │ │ │ └── RMSPropStructure.kt │ ├── layers │ │ ├── Layer.kt │ │ ├── LayerFactory.kt │ │ ├── LayerInterface.kt │ │ ├── LayerParameters.kt │ │ ├── LayerParametersFactory.kt │ │ ├── LayerType.kt │ │ ├── RecurrentStackedLayers.kt │ │ ├── StackedLayers.kt │ │ ├── StackedLayersParameters.kt │ │ ├── StatesWindow.kt │ │ ├── helpers │ │ │ ├── BackwardHelper.kt │ │ │ ├── ForwardHelper.kt │ │ │ ├── ParamsErrorsCollector.kt │ │ │ ├── RelevanceHelper.kt │ │ │ └── RelevanceUtils.kt │ │ └── models │ │ │ ├── LinearParams.kt │ │ │ ├── attention │ │ │ ├── AttentionBackwardHelper.kt │ │ │ ├── AttentionForwardHelper.kt │ │ │ ├── AttentionLayer.kt │ │ │ ├── attentionmechanism │ │ │ │ ├── AttentionMechanismBackwardHelper.kt │ │ │ │ ├── AttentionMechanismForwardHelper.kt │ │ │ │ ├── AttentionMechanismLayer.kt │ │ │ │ └── AttentionMechanismLayerParameters.kt │ │ │ └── scaleddot │ │ │ │ ├── ScaledDotAttentionBackwardHelper.kt │ │ │ │ ├── ScaledDotAttentionForwardHelper.kt │ │ │ │ ├── ScaledDotAttentionLayer.kt │ │ │ │ └── ScaledDotAttentionLayerParameters.kt │ │ │ ├── autoassociative │ │ │ ├── NewRecirculationBackwardHelper.kt │ │ │ ├── NewRecirculationForwardHelper.kt │ │ │ ├── NewRecirculationLayer.kt │ │ │ └── NewRecirculationLayerParameters.kt │ │ │ ├── feedforward │ │ │ ├── batchnorm │ │ │ │ ├── BatchNormBackwardHelper.kt │ │ │ │ ├── BatchNormForwardHelper.kt │ │ │ │ ├── BatchNormLayer.kt │ │ │ │ └── BatchNormLayerParameters.kt │ │ │ ├── highway │ │ │ │ ├── HighwayBackwardHelper.kt │ │ │ │ ├── HighwayForwardHelper.kt │ │ │ │ ├── HighwayLayer.kt │ │ │ │ └── HighwayLayerParameters.kt │ │ │ ├── norm │ │ │ │ ├── NormBackwardHelper.kt │ │ │ │ ├── NormForwardHelper.kt │ │ │ │ ├── NormLayer.kt │ │ │ │ └── NormLayerParameters.kt │ │ │ ├── simple │ │ │ │ ├── FeedforwardBackwardHelper.kt │ │ │ │ ├── FeedforwardForwardHelper.kt │ │ │ │ ├── FeedforwardLayer.kt │ │ │ │ ├── FeedforwardLayerParameters.kt │ │ │ │ └── FeedforwardRelevanceHelper.kt │ │ │ └── squareddistance │ │ │ │ ├── SquaredDistanceBackwardHelper.kt │ │ │ │ ├── SquaredDistanceForwardHelper.kt │ │ │ │ ├── SquaredDistanceLayer.kt │ │ │ │ └── SquaredDistanceLayerParameters.kt │ │ │ ├── merge │ │ │ ├── MergeLayer.kt │ │ │ ├── MergeLayerParameters.kt │ │ │ ├── affine │ │ │ │ ├── AffineBackwardHelper.kt │ │ │ │ ├── AffineForwardHelper.kt │ │ │ │ ├── AffineLayer.kt │ │ │ │ └── AffineLayerParameters.kt │ │ │ ├── avg │ │ │ │ ├── AvgBackwardHelper.kt │ │ │ │ ├── AvgForwardHelper.kt │ │ │ │ ├── AvgLayer.kt │ │ │ │ └── AvgLayerParameters.kt │ │ │ ├── biaffine │ │ │ │ ├── BiaffineBackwardHelper.kt │ │ │ │ ├── BiaffineForwardHelper.kt │ │ │ │ ├── BiaffineLayer.kt │ │ │ │ └── BiaffineLayerParameters.kt │ │ │ ├── concat │ │ │ │ ├── ConcatBackwardHelper.kt │ │ │ │ ├── ConcatForwardHelper.kt │ │ │ │ ├── ConcatLayer.kt │ │ │ │ └── ConcatLayerParameters.kt │ │ │ ├── concatff │ │ │ │ ├── ConcatFFBackwardHelper.kt │ │ │ │ ├── ConcatFFForwardHelper.kt │ │ │ │ ├── ConcatFFLayer.kt │ │ │ │ └── ConcatFFLayerParameters.kt │ │ │ ├── cosinesimilarity │ │ │ │ ├── CosineBackwardHelper.kt │ │ │ │ ├── CosineForwardHelper.kt │ │ │ │ ├── CosineLayer.kt │ │ │ │ └── CosineLayerParameters.kt │ │ │ ├── distance │ │ │ │ ├── DistanceBackwardHelper.kt │ │ │ │ ├── DistanceForwardHelper.kt │ │ │ │ ├── DistanceLayer.kt │ │ │ │ └── DistanceLayerParameters.kt │ │ │ ├── mergeconfig │ │ │ │ ├── AffineMerge.kt │ │ │ │ ├── AvgMerge.kt │ │ │ │ ├── BiaffineMerge.kt │ │ │ │ ├── ConcatFeedforwardMerge.kt │ │ │ │ ├── ConcatMerge.kt │ │ │ │ ├── MergeConfiguration.kt │ │ │ │ ├── ProductMerge.kt │ │ │ │ ├── SumMerge.kt │ │ │ │ └── VariableOutputMergeConfig.kt │ │ │ ├── product │ │ │ │ ├── ProductBackwardHelper.kt │ │ │ │ ├── ProductForwardHelper.kt │ │ │ │ ├── ProductLayer.kt │ │ │ │ └── ProductLayerParameters.kt │ │ │ ├── sub │ │ │ │ ├── SubBackwardHelper.kt │ │ │ │ ├── SubForwardHelper.kt │ │ │ │ ├── SubLayer.kt │ │ │ │ └── SubLayerParameters.kt │ │ │ └── sum │ │ │ │ ├── SumBackwardHelper.kt │ │ │ │ ├── SumForwardHelper.kt │ │ │ │ ├── SumLayer.kt │ │ │ │ └── SumLayerParameters.kt │ │ │ └── recurrent │ │ │ ├── GatedRecurrentLayer.kt │ │ │ ├── GatedRecurrentRelevanceHelper.kt │ │ │ ├── LayersWindow.kt │ │ │ ├── RecurrentLayer.kt │ │ │ ├── RecurrentLayerUnit.kt │ │ │ ├── RecurrentLinearParams.kt │ │ │ ├── RecurrentRelevanceHelper.kt │ │ │ ├── cfn │ │ │ ├── CFNBackwardHelper.kt │ │ │ ├── CFNForwardHelper.kt │ │ │ ├── CFNLayer.kt │ │ │ └── CFNLayerParameters.kt │ │ │ ├── deltarnn │ │ │ ├── DeltaRNNBackwardHelper.kt │ │ │ ├── DeltaRNNForwardHelper.kt │ │ │ ├── DeltaRNNLayer.kt │ │ │ ├── DeltaRNNLayerParameters.kt │ │ │ ├── DeltaRNNRelevanceHelper.kt │ │ │ └── DeltaRNNRelevanceSupport.kt │ │ │ ├── gru │ │ │ ├── GRUBackwardHelper.kt │ │ │ ├── GRUForwardHelper.kt │ │ │ ├── GRULayer.kt │ │ │ └── GRULayerParameters.kt │ │ │ ├── indrnn │ │ │ ├── IndRNNBackwardHelper.kt │ │ │ ├── IndRNNForwardHelper.kt │ │ │ ├── IndRNNLayer.kt │ │ │ └── IndRNNLayerParameters.kt │ │ │ ├── lstm │ │ │ ├── LSTMBackwardHelper.kt │ │ │ ├── LSTMForwardHelper.kt │ │ │ ├── LSTMLayer.kt │ │ │ └── LSTMLayerParameters.kt │ │ │ ├── ltm │ │ │ ├── LTMBackwardHelper.kt │ │ │ ├── LTMForwardHelper.kt │ │ │ ├── LTMLayer.kt │ │ │ └── LTMLayerParameters.kt │ │ │ ├── ran │ │ │ ├── RANBackwardHelper.kt │ │ │ ├── RANForwardHelper.kt │ │ │ ├── RANLayer.kt │ │ │ ├── RANLayerParameters.kt │ │ │ └── RANRelevanceHelper.kt │ │ │ ├── simple │ │ │ ├── SimpleRecurrentBackwardHelper.kt │ │ │ ├── SimpleRecurrentForwardHelper.kt │ │ │ ├── SimpleRecurrentLayer.kt │ │ │ ├── SimpleRecurrentLayerParameters.kt │ │ │ └── SimpleRecurrentRelevanceHelper.kt │ │ │ └── tpr │ │ │ ├── TPRBackwardHelper.kt │ │ │ ├── TPRForwardHelper.kt │ │ │ ├── TPRLayer.kt │ │ │ └── TPRLayerParameters.kt │ ├── neuralnetwork │ │ └── preset │ │ │ ├── CFN.kt │ │ │ ├── DeltaRNN.kt │ │ │ ├── FeedforwardNeuralNetwork.kt │ │ │ ├── GRUNeuralNetwork.kt │ │ │ ├── GenericNeuralNetwork.kt │ │ │ ├── HighwayNeuralNetwork.kt │ │ │ ├── IndRNNNeuralNetwork.kt │ │ │ ├── LSTMNeuralNetwork.kt │ │ │ ├── LTMNeuralNetwork.kt │ │ │ ├── RANNeuralNetwork.kt │ │ │ └── SimpleRecurrentNeuralNetwork.kt │ ├── neuralprocessor │ │ ├── ChainProcessor.kt │ │ ├── NeuralProcessor.kt │ │ ├── batchfeedforward │ │ │ ├── BatchFeedforwardProcessor.kt │ │ │ └── MultiBatchFeedforwardProcessor.kt │ │ ├── embeddingsprocessor │ │ │ ├── EmbeddingsProcessor.kt │ │ │ └── EmbeddingsProcessorWithContext.kt │ │ ├── feedforward │ │ │ ├── FeedforwardNeuralProcessor.kt │ │ │ └── FeedforwardNeuralProcessorsPool.kt │ │ └── recurrent │ │ │ ├── NNSequence.kt │ │ │ ├── RANImportanceHelper.kt │ │ │ ├── RecurrentNeuralProcessor.kt │ │ │ └── RecurrentNeuralProcessorsPool.kt │ └── optimizer │ │ ├── Alias.kt │ │ ├── Extensions.kt │ │ ├── ParamsErrorsAccumulator.kt │ │ ├── ParamsOptimizer.kt │ │ └── ScheduledUpdater.kt │ ├── deeplearning │ ├── attention │ │ ├── attentionnetwork │ │ │ ├── AttentionNetwork.kt │ │ │ ├── AttentionNetworkParameters.kt │ │ │ └── AttentionNetworksPool.kt │ │ ├── han │ │ │ ├── ArrayListExtension.kt │ │ │ ├── HAN.kt │ │ │ ├── HANEncoder.kt │ │ │ ├── HANEncodersPool.kt │ │ │ ├── HANParameters.kt │ │ │ ├── HierarchyGroup.kt │ │ │ ├── HierarchyItem.kt │ │ │ └── HierarchySequence.kt │ │ ├── multihead │ │ │ ├── MultiHeadAttentionNetwork.kt │ │ │ └── MultiHeadAttentionParameters.kt │ │ └── pointernetwork │ │ │ ├── BackwardHelper.kt │ │ │ ├── ForwardHelper.kt │ │ │ ├── PointerNetworkModel.kt │ │ │ ├── PointerNetworkParameters.kt │ │ │ └── PointerNetworkProcessor.kt │ ├── birnn │ │ ├── BiRNN.kt │ │ ├── BiRNNConfig.kt │ │ ├── BiRNNEncoder.kt │ │ ├── BiRNNEncodersPool.kt │ │ ├── BiRNNParameters.kt │ │ ├── BiRNNUtils.kt │ │ └── deepbirnn │ │ │ ├── DeepBiRNN.kt │ │ │ ├── DeepBiRNNEncoder.kt │ │ │ └── DeepBiRNNParameters.kt │ ├── multitasknetwork │ │ ├── MultiTaskNetwork.kt │ │ ├── MultiTaskNetworkConfig.kt │ │ ├── MultiTaskNetworkModel.kt │ │ └── MultitaskNetworksPool.kt │ ├── sequenceencoder │ │ └── FOFE.kt │ └── transformers │ │ ├── BERT.kt │ │ ├── BERTBaseImportHelper.kt │ │ ├── BERTLayer.kt │ │ ├── BERTModel.kt │ │ ├── BERTParameters.kt │ │ └── BERTTrainer.kt │ ├── helpers │ ├── Counter.kt │ ├── Evaluator.kt │ ├── Statistics.kt │ └── Trainer.kt │ ├── simplemath │ ├── SimpleMath.kt │ └── ndarray │ │ ├── NDArray.kt │ │ ├── NDArrayFactory.kt │ │ ├── NDArrayMask.kt │ │ ├── Shape.kt │ │ ├── TypeAliases.kt │ │ ├── dense │ │ ├── DenseNDArray.kt │ │ └── DenseNDArrayFactory.kt │ │ ├── sparse │ │ ├── SparseNDArray.kt │ │ └── SparseNDArrayFactory.kt │ │ └── sparsebinary │ │ ├── SparseBinaryNDArray.kt │ │ └── SparseBinaryNDArrayFactory.kt │ └── utils │ └── scheduling │ ├── BatchScheduling.kt │ ├── EpochScheduling.kt │ └── ExampleScheduling.kt └── test ├── kotlin ├── core │ ├── arrays │ │ ├── ActivableArraySpec.kt │ │ ├── AugmentedArraySpec.kt │ │ ├── DistributionArraySpec.kt │ │ └── ParamsArraySpec.kt │ ├── attention │ │ ├── AttentionLayerStructureSpec.kt │ │ ├── AttentionLayerUtils.kt │ │ ├── AttentionParametersSpec.kt │ │ └── scaleddot │ │ │ ├── ScaledDotAttentionLayerSpec.kt │ │ │ ├── ScaledDotAttentionLayerUtils.kt │ │ │ └── ScaledDotAttentionParametersSpec.kt │ ├── functionalities │ │ ├── activations │ │ │ ├── CeLUSpec.kt │ │ │ ├── ELUSpec.kt │ │ │ ├── GeLUSpec.kt │ │ │ ├── HardSigmoidSpec.kt │ │ │ ├── HardTanhSpec.kt │ │ │ ├── LeakyRELUSpec.kt │ │ │ ├── ReLUSpec.kt │ │ │ ├── SeLUSpec.kt │ │ │ ├── SigmoidSpec.kt │ │ │ ├── SoftmaxSpec.kt │ │ │ ├── SoftplusSpec.kt │ │ │ ├── SoftshrinkSpec.kt │ │ │ ├── SoftsignSpec.kt │ │ │ ├── TanhSpec.kt │ │ │ └── ThresholdSpec.kt │ │ ├── decaymethods │ │ │ ├── ExponentialDecaySpec.kt │ │ │ └── HyperbolicDecaySpec.kt │ │ ├── gradientclipping │ │ │ ├── GradientClippingSpec.kt │ │ │ └── GradientClippingUtils.kt │ │ ├── losses │ │ │ ├── AugmentedMSECalculatorSpec.kt │ │ │ ├── MSECalculatorSpec.kt │ │ │ ├── MulticlassMSECalculatorSpec.kt │ │ │ └── SoftmaxCrossEntropyCalculatorSpec.kt │ │ └── updatemethods │ │ │ ├── ADAMSpec.kt │ │ │ ├── AdaGradSpec.kt │ │ │ ├── LearningRateSpec.kt │ │ │ ├── MomentumSpec.kt │ │ │ ├── NesterovMomentumSpec.kt │ │ │ ├── RADAMSpec.kt │ │ │ ├── RMSPropSpec.kt │ │ │ └── UpdateMethodsUtils.kt │ ├── layers │ │ ├── LayerStructureSpec.kt │ │ ├── feedforward │ │ │ ├── batchnorm │ │ │ │ ├── BatchNormLayerParametersSpec.kt │ │ │ │ ├── BatchNormLayerStructureSpec.kt │ │ │ │ └── BatchNormLayerStructureUtils.kt │ │ │ ├── highway │ │ │ │ ├── HighwayLayerStructureSpec.kt │ │ │ │ └── HighwayLayerStructureUtils.kt │ │ │ ├── norm │ │ │ │ ├── NormLayerParametersSpec.kt │ │ │ │ ├── NormLayerStructureSpec.kt │ │ │ │ └── NormLayerStructureUtils.kt │ │ │ ├── simple │ │ │ │ ├── FeedforwardLayerParametersSpec.kt │ │ │ │ ├── FeedforwardLayerStructureSpec.kt │ │ │ │ └── FeedforwardLayerStructureUtils.kt │ │ │ └── squareddistance │ │ │ │ ├── SquaredDistanceLayerStructureSpec.kt │ │ │ │ └── SquaredDistanceLayerUtils.kt │ │ ├── merge │ │ │ ├── affine │ │ │ │ ├── AffineLayerParametersSpec.kt │ │ │ │ ├── AffineLayerStructureSpec.kt │ │ │ │ └── AffineLayerUtils.kt │ │ │ ├── avg │ │ │ │ ├── AvgLayerStructureSpec.kt │ │ │ │ └── AvgLayerUtils.kt │ │ │ ├── biaffine │ │ │ │ ├── BiaffineLayerParametersSpec.kt │ │ │ │ ├── BiaffineLayerStructureSpec.kt │ │ │ │ └── BiaffineLayerUtils.kt │ │ │ ├── concat │ │ │ │ ├── ConcatLayerStructureSpec.kt │ │ │ │ └── ConcatLayerUtils.kt │ │ │ ├── concatff │ │ │ │ ├── ConcatFFLayerStructureSpec.kt │ │ │ │ └── ConcatFFLayerUtils.kt │ │ │ ├── cosinesimilarity │ │ │ │ ├── CosineLayerStructureSpec.kt │ │ │ │ └── CosineLayerUtils.kt │ │ │ ├── distance │ │ │ │ ├── DistanceLayerStructureSpec.kt │ │ │ │ └── DistanceLayerUtils.kt │ │ │ ├── product │ │ │ │ ├── ProductLayerStructureSpec.kt │ │ │ │ └── ProductLayerUtils.kt │ │ │ ├── sub │ │ │ │ ├── SubLayerStructureSpec.kt │ │ │ │ └── SubLayerUtils.kt │ │ │ └── sum │ │ │ │ ├── SumLayerStructureSpec.kt │ │ │ │ └── SumLayerUtils.kt │ │ └── recurrent │ │ │ ├── cfn │ │ │ ├── CFNLayerParametersSpec.kt │ │ │ ├── CFNLayerStructureSpec.kt │ │ │ ├── CFNLayerStructureUtils.kt │ │ │ └── CFNLayersWindow.kt │ │ │ ├── deltarnn │ │ │ ├── DeltaLayersWindow.kt │ │ │ ├── DeltaRNNLayerStructureSpec.kt │ │ │ └── DeltaRNNLayerStructureUtils.kt │ │ │ ├── gru │ │ │ ├── GRULayerParametersSpec.kt │ │ │ ├── GRULayerStructureSpec.kt │ │ │ ├── GRULayerStructureUtils.kt │ │ │ └── GRULayersWindow.kt │ │ │ ├── indrnn │ │ │ ├── IndRNNLayerStructureSpec.kt │ │ │ ├── IndRNNLayerStructureUtils.kt │ │ │ └── IndRNNLayersWindow.kt │ │ │ ├── lstm │ │ │ ├── LSTMLayerParametersSpec.kt │ │ │ ├── LSTMLayerStructureSpec.kt │ │ │ ├── LSTMLayerStructureUtils.kt │ │ │ └── LSTMLayersWindow.kt │ │ │ ├── ltm │ │ │ ├── LTMLayerParametersSpec.kt │ │ │ ├── LTMLayerStructureSpec.kt │ │ │ ├── LTMLayerStructureUtils.kt │ │ │ └── LTMLayersWindow.kt │ │ │ ├── ran │ │ │ ├── RANLayerParametersSpec.kt │ │ │ ├── RANLayerStructureSpec.kt │ │ │ ├── RANLayerStructureUtils.kt │ │ │ └── RANLayersWindow.kt │ │ │ ├── simple │ │ │ ├── SimpleRecurrentLayerParametersSpec.kt │ │ │ ├── SimpleRecurrentLayerStructureSpec.kt │ │ │ ├── SimpleRecurrentLayerStructureUtils.kt │ │ │ └── SimpleRecurrentLayersWindow.kt │ │ │ └── tpr │ │ │ ├── TPRLayerParametersSpec.kt │ │ │ ├── TPRLayerStructureSpec.kt │ │ │ ├── TPRLayerStructureUtils.kt │ │ │ └── TPRLayersWindow.kt │ ├── neuralnetwork │ │ ├── FeedforwardNetworkStructureSpec.kt │ │ ├── NeuralNetworkSpec.kt │ │ ├── RecurrentNetworkStructureSpec.kt │ │ ├── preset │ │ │ ├── CFNSpec.kt │ │ │ ├── DeltaRNNSpec.kt │ │ │ ├── FeedforwardSpec.kt │ │ │ ├── GRUSpec.kt │ │ │ ├── LSTMSpec.kt │ │ │ ├── RANSpec.kt │ │ │ └── SimpleRecurrentSpec.kt │ │ └── utils │ │ │ ├── FeedforwardNetworkStructureUtils.kt │ │ │ ├── RecurrentNetworkStructureUtils.kt │ │ │ └── SerializedNetwork.kt │ ├── neuralprocessor │ │ ├── batchfeedforward │ │ │ ├── BatchFeedforwardProcessorSpec.kt │ │ │ └── BatchFeedforwardUtils.kt │ │ └── feedforward │ │ │ ├── FeedforwardNeuralProcessorSpec.kt │ │ │ └── NeuralProcessorsPoolSpec.kt │ └── optimizer │ │ ├── ParamsErrorsAccumulatorSpec.kt │ │ ├── ParamsErrorsAccumulatorUtils.kt │ │ ├── ParamsOptimizerSpec.kt │ │ └── ParamsOptimizerUtils.kt ├── deeplearning │ ├── attention │ │ ├── AttentionNetworkSpec.kt │ │ ├── AttentionNetworkUtils.kt │ │ └── HANSpec.kt │ └── birnn │ │ ├── BiRNNEncoderSpec.kt │ │ ├── BiRNNUtilsSpec.kt │ │ └── utils │ │ └── BiRNNEncoderUtils.kt └── ndarray │ ├── DenseNDArraySpec.kt │ ├── ShapeSpec.kt │ ├── SparseBinaryNDArraySpec.kt │ └── SparseNDArraySpec.kt └── resources └── mockito-extensions └── org.mockito.plugins.MockMaker /.gitignore: -------------------------------------------------------------------------------- 1 | ### Examples Configuration ### 2 | examples/config/* 3 | !examples/config/configuration.yaml.example 4 | 5 | ### Intellij ### 6 | .idea/ 7 | /out/ 8 | 9 | ### Intellij Patch ### 10 | *.iml 11 | 12 | ### Maven ### 13 | target/ 14 | pom.xml.tag 15 | pom.xml.releaseBackup 16 | pom.xml.versionsBackup 17 | pom.xml.next 18 | release.properties 19 | dependency-reduced-pom.xml 20 | buildNumber.properties 21 | .mvn/timing.properties 22 | 23 | # Avoid ignoring Maven wrapper jar file (.jar files are usually ignored) 24 | !/.mvn/wrapper/maven-wrapper.jar 25 | -------------------------------------------------------------------------------- /.travis.yml: -------------------------------------------------------------------------------- 1 | language: java 2 | 3 | sudo: false 4 | 5 | addons: 6 | apt: 7 | packages: 8 | - oracle-java8-installer 9 | - gfortran 10 | - libgfortran3 11 | 12 | os: 13 | - linux 14 | 15 | dist: trusty 16 | 17 | jdk: 18 | - oraclejdk8 19 | 20 | install: true 21 | 22 | script: mvn test compile -B -Dmaven.javadoc.skip=true 23 | -------------------------------------------------------------------------------- /docs/attentive_recurrent_network_test.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/attentive_recurrent_network_test.ods -------------------------------------------------------------------------------- /docs/batch_feedforward_test.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/batch_feedforward_test.ods -------------------------------------------------------------------------------- /docs/birnn_test.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/birnn_test.ods -------------------------------------------------------------------------------- /docs/feedforward_network_test.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/feedforward_network_test.ods -------------------------------------------------------------------------------- /docs/layers/attention_layer_test.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/attention_layer_test.ods -------------------------------------------------------------------------------- /docs/layers/batch_norm_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/batch_norm_tests.ods -------------------------------------------------------------------------------- /docs/layers/cfn_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/cfn_tests.ods -------------------------------------------------------------------------------- /docs/layers/delta_rnn_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/delta_rnn_tests.ods -------------------------------------------------------------------------------- /docs/layers/feedforward_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/feedforward_tests.ods -------------------------------------------------------------------------------- /docs/layers/gru_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/gru_tests.ods -------------------------------------------------------------------------------- /docs/layers/highway_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/highway_tests.ods -------------------------------------------------------------------------------- /docs/layers/indrnn_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/indrnn_tests.ods -------------------------------------------------------------------------------- /docs/layers/lstm_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/lstm_tests.ods -------------------------------------------------------------------------------- /docs/layers/ltm_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/ltm_tests.ods -------------------------------------------------------------------------------- /docs/layers/merge_layers_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/merge_layers_tests.ods -------------------------------------------------------------------------------- /docs/layers/norm_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/norm_tests.ods -------------------------------------------------------------------------------- /docs/layers/ran_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/ran_tests.ods -------------------------------------------------------------------------------- /docs/layers/scaled_dot_attention_layer_test.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/scaled_dot_attention_layer_test.ods -------------------------------------------------------------------------------- /docs/layers/simple_recurrent_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/layers/simple_recurrent_tests.ods -------------------------------------------------------------------------------- /docs/multiprediction_scorer_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/multiprediction_scorer_tests.ods -------------------------------------------------------------------------------- /docs/tree_rnn_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/tree_rnn_tests.ods -------------------------------------------------------------------------------- /docs/update_methods_tests.ods: -------------------------------------------------------------------------------- https://raw.githubusercontent.com/KotlinNLP/SimpleDNN/a167008eda2c766df836f2baee01d23beab32c59/docs/update_methods_tests.ods -------------------------------------------------------------------------------- /examples/SerializeLMDBEmbeddings.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | import com.kotlinnlp.simplednn.core.embeddings.lmdb.EmbeddingsStorage 9 | 10 | /** 11 | * TODO: Add documentation 12 | */ 13 | fun main(args: Array) = EmbeddingsStorage(args[0], readOnly = false).use { 14 | it.load(args[1], verbose = true) 15 | } -------------------------------------------------------------------------------- /examples/bert/convert/CommandLineArguments.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2020-present Simone Cangialosi. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 6 | * -----------------------------------------------------------------------------*/ 7 | 8 | package bert.convert 9 | 10 | import com.xenomachina.argparser.ArgParser 11 | 12 | /** 13 | * The interpreter of command line arguments. 14 | * 15 | * @param args the array of command line arguments 16 | */ 17 | internal class CommandLineArguments(args: Array) { 18 | 19 | /** 20 | * The parser of the string arguments. 21 | */ 22 | private val parser = ArgParser(args) 23 | 24 | /** 25 | * The file path of the input model. 26 | */ 27 | val inputModelPath: String by parser.storing( 28 | "-i", 29 | "--input", 30 | help="the file path of the input model" 31 | ) 32 | 33 | /** 34 | * The path of the file in which to serialize the output model. 35 | */ 36 | val outputModelPath: String by parser.storing( 37 | "-o", 38 | "--output", 39 | help="the path of the file in which to serialize the output model" 40 | ) 41 | 42 | /** 43 | * The path of the vocabulary used to train the model. 44 | */ 45 | val vocabPath: String by parser.storing( 46 | "-v", 47 | "--vocabulary", 48 | help="the path of the vocabulary used to train the model" 49 | ) 50 | 51 | /** 52 | * The number of attention heads. 53 | */ 54 | val numOfHeads: Int by parser.storing( 55 | "-a", 56 | "--attention-heads", 57 | help="the number of attention heads" 58 | ) { toInt() } 59 | 60 | /** 61 | * Force parsing all arguments (only read ones are parsed by default). 62 | */ 63 | init { 64 | this.parser.force() 65 | } 66 | } 67 | -------------------------------------------------------------------------------- /examples/bert/convert/ConvertModel.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2020-present Simone Cangialosi. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package bert.convert 9 | 10 | import com.kotlinnlp.simplednn.deeplearning.transformers.BERTBaseImportHelper 11 | import com.kotlinnlp.simplednn.deeplearning.transformers.BERTModel 12 | import com.kotlinnlp.utils.DictionarySet 13 | import com.xenomachina.argparser.mainBody 14 | import java.io.File 15 | import java.io.FileOutputStream 16 | 17 | /** 18 | * Build a [BERTModel] from a file of named parameters and serialize it to file. 19 | * 20 | * Launch with the '-h' option for help about the command line arguments. 21 | */ 22 | fun main(args: Array) = mainBody { 23 | 24 | val parsedArgs = CommandLineArguments(args) 25 | 26 | val bertModel = BERTBaseImportHelper.buildModel( 27 | params = parsedArgs.inputModelPath.let { 28 | println("Reading parameters from '$it'...") 29 | BERTBaseImportHelper.readParams(filename = it, numOfHeads = parsedArgs.numOfHeads) 30 | }, 31 | vocab = parsedArgs.vocabPath.let { 32 | println("Reading vocabulary from '$it'...") 33 | DictionarySet().apply { File(it).forEachLine { line -> add(line.trim()) } } 34 | }, 35 | numOfHeads = parsedArgs.numOfHeads) 36 | 37 | parsedArgs.outputModelPath.let { 38 | println("Serializing the model to '$it'...") 39 | bertModel.dump(FileOutputStream(File(it))) 40 | } 41 | } 42 | -------------------------------------------------------------------------------- /examples/bert/test/CommandLineArguments.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2020-present Simone Cangialosi. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, You can obtain one at http://mozilla.org/MPL/2.0/. 6 | * -----------------------------------------------------------------------------*/ 7 | 8 | package bert.test 9 | 10 | import com.xenomachina.argparser.ArgParser 11 | 12 | /** 13 | * The interpreter of command line arguments. 14 | * 15 | * @param args the array of command line arguments 16 | */ 17 | internal class CommandLineArguments(args: Array) { 18 | 19 | /** 20 | * The parser of the string arguments. 21 | */ 22 | private val parser = ArgParser(args) 23 | 24 | /** 25 | * The file path of the serialized BERT model. 26 | */ 27 | val bertModelPath: String by parser.storing( 28 | "-m", 29 | "--model", 30 | help="the file path of the serialized BERT model" 31 | ) 32 | 33 | /** 34 | * Force parsing all arguments (only read ones are parsed by default). 35 | */ 36 | init { 37 | this.parser.force() 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /examples/config/configuration.yaml.example: -------------------------------------------------------------------------------- 1 | mnist: 2 | datasets_paths: 3 | training: "/.json" 4 | validation: "/.json" 5 | test: "/.json" 6 | 7 | mnist_sequence: 8 | datasets_paths: 9 | training: "/.jsonl" 10 | validation: "/.jsonl" 11 | test: "/.jsonl" 12 | 13 | sparse_input: 14 | datasets_paths: 15 | training: "/.jsonl" 16 | validation: "/.jsonl" 17 | test: "/.jsonl" 18 | 19 | progressive_sum: 20 | datasets_paths: 21 | training: "/.jsonl" 22 | validation: "/.jsonl" 23 | test: "/.jsonl" 24 | 25 | han_classifier: 26 | datasets_paths: 27 | training: "/.jsonl" 28 | validation: "/.jsonl" 29 | test: "/.jsonl" 30 | 31 | vectors_average: 32 | datasets_paths: 33 | training: "/.csv" 34 | validation: "" 35 | test: "" 36 | -------------------------------------------------------------------------------- /examples/logicgates/NOTGate.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package logicgates 9 | 10 | import utils.SimpleExample 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | 13 | fun main() { 14 | println("Start 'NOT Gate Test'") 15 | println("Accuracy (softmax): %.1f%%".format(100.0 * NOTGate.testAccuracyWithSoftmax())) 16 | println("Accuracy (sigmoid): %.1f%%".format(100.0 * NOTGate.testAccuracyWithSigmoid())) 17 | println("End.") 18 | } 19 | 20 | object NOTGate { 21 | 22 | /** 23 | * 24 | */ 25 | fun testAccuracyWithSoftmax(): Double { 26 | 27 | val examples: ArrayList> = ArrayList() 28 | 29 | examples.addAll(listOf( 30 | SimpleExample(doubleArrayOf(0.0), doubleArrayOf(0.0, 1.0)), 31 | SimpleExample(doubleArrayOf(1.0), doubleArrayOf(1.0, 0.0)) 32 | )) 33 | 34 | return GateTestUtils.testAccuracyWithSoftmax(inputSize = 1, examples = examples, epochs = 100) 35 | } 36 | 37 | /** 38 | * 39 | */ 40 | fun testAccuracyWithSigmoid(): Double { 41 | 42 | val examples: ArrayList> = ArrayList() 43 | 44 | examples.addAll(listOf( 45 | SimpleExample(doubleArrayOf(0.0), doubleArrayOf(1.0)), 46 | SimpleExample(doubleArrayOf(1.0), doubleArrayOf(0.0)) 47 | )) 48 | 49 | return GateTestUtils.testAccuracyWithSigmoid(inputSize = 1, examples = examples, epochs = 100) 50 | } 51 | 52 | } 53 | -------------------------------------------------------------------------------- /examples/mnist/helpers/MNISTSequenceExampleExtractor.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package mnist.helpers 9 | 10 | import com.beust.klaxon.JsonArray 11 | import com.beust.klaxon.JsonBase 12 | import com.beust.klaxon.JsonObject 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 14 | import utils.SequenceExampleWithFinalOutput 15 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 16 | import utils.exampleextractor.ExampleExtractor 17 | 18 | /** 19 | * 20 | */ 21 | class MNISTSequenceExampleExtractor(val outputSize: Int) 22 | : ExampleExtractor> { 23 | 24 | /** 25 | * 26 | */ 27 | override fun extract(jsonElement: JsonBase): SequenceExampleWithFinalOutput { 28 | 29 | val jsonObject = jsonElement as JsonObject 30 | 31 | val outputGold = DenseNDArrayFactory.oneHotEncoder(length = 10, oneAt = jsonObject.int("digit")!!) 32 | val featuresList: List = jsonElement.array>("sequence_data")!!.map { 33 | val deltaX = (it[0] as Int).toDouble() 34 | val deltaY = (it[1] as Int).toDouble() 35 | DenseNDArrayFactory.arrayOf(doubleArrayOf(deltaX, deltaY)) 36 | } 37 | 38 | return SequenceExampleWithFinalOutput(featuresList, outputGold) 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /examples/mnist/helpers/MNISTSparseExampleExtractor.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package mnist.helpers 9 | 10 | import com.beust.klaxon.JsonArray 11 | import com.beust.klaxon.JsonBase 12 | import utils.SimpleExample 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.sparsebinary.SparseBinaryNDArray 15 | import utils.exampleextractor.ExampleExtractor 16 | import utils.readSparseBinaryNDArrayFromDense 17 | 18 | /** 19 | * 20 | */ 21 | class MNISTSparseExampleExtractor(val outputSize: Int) : ExampleExtractor> { 22 | 23 | /** 24 | * 25 | */ 26 | override fun extract(jsonElement: JsonBase): SimpleExample { 27 | 28 | val jsonArray = jsonElement as JsonArray<*> 29 | 30 | val features: SparseBinaryNDArray = (jsonArray[0] as JsonArray<*>).readSparseBinaryNDArrayFromDense(size = 784) 31 | val outputGold = DenseNDArrayFactory.oneHotEncoder(length = outputSize, oneAt = jsonArray[1] as Int) 32 | 33 | return SimpleExample(features, outputGold) 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /examples/utils/ReadArrayExtensions.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package utils 9 | 10 | import com.beust.klaxon.JsonArray 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.Shape 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.sparsebinary.SparseBinaryNDArray 15 | import com.kotlinnlp.simplednn.simplemath.ndarray.sparsebinary.SparseBinaryNDArrayFactory 16 | 17 | /** 18 | * 19 | */ 20 | fun JsonArray<*>.readDenseNDArray(): DenseNDArray = 21 | DenseNDArrayFactory.arrayOf(DoubleArray(size = this.size, init = { i -> (this[i] as Number).toDouble() })) 22 | 23 | /** 24 | * 25 | */ 26 | fun JsonArray<*>.readSparseBinaryNDArray(size: Int): SparseBinaryNDArray = 27 | SparseBinaryNDArrayFactory.arrayOf(activeIndices = this.map { it as Int }.sorted(), shape = Shape(size)) 28 | 29 | /** 30 | * 31 | */ 32 | fun JsonArray<*>.readSparseBinaryNDArrayFromDense(size: Int): SparseBinaryNDArray { 33 | 34 | val activeIndices: List = 35 | this.mapIndexed { index, it -> if (it as Double > 0.5) index else -1 }.filter { it > 0 } 36 | 37 | return SparseBinaryNDArrayFactory.arrayOf(activeIndices = activeIndices.sorted(), shape = Shape(size)) 38 | } 39 | -------------------------------------------------------------------------------- /examples/utils/exampleextractor/ClassificationBinaryOutputExampleExtractor.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package utils.exampleextractor 9 | 10 | import com.beust.klaxon.JsonArray 11 | import com.beust.klaxon.JsonBase 12 | import utils.BinaryOutputExample 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 14 | import utils.readDenseNDArray 15 | 16 | /** 17 | * 18 | */ 19 | class ClassificationBinaryOutputExampleExtractor : ExampleExtractor> { 20 | 21 | /** 22 | * 23 | */ 24 | override fun extract(jsonElement: JsonBase): BinaryOutputExample { 25 | 26 | val jsonArray = jsonElement as JsonArray<*> 27 | 28 | val features: DenseNDArray = (jsonArray[0] as JsonArray<*>).readDenseNDArray() 29 | val goldIndex: Int = jsonArray[1] as Int 30 | 31 | return BinaryOutputExample(features, goldIndex) 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /examples/utils/exampleextractor/ClassificationExampleExtractor.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package utils.exampleextractor 9 | 10 | import com.beust.klaxon.JsonArray 11 | import com.beust.klaxon.JsonBase 12 | import utils.SimpleExample 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.Shape 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 15 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 16 | import utils.readDenseNDArray 17 | 18 | /** 19 | * 20 | */ 21 | class ClassificationExampleExtractor(val outputSize: Int) : ExampleExtractor> { 22 | 23 | /** 24 | * 25 | */ 26 | override fun extract(jsonElement: JsonBase): SimpleExample { 27 | 28 | val jsonArray = jsonElement as JsonArray<*> 29 | 30 | val features: DenseNDArray = (jsonArray[0] as JsonArray<*>).readDenseNDArray() 31 | val outputGold = DenseNDArrayFactory.zeros(Shape(this.outputSize)) 32 | 33 | outputGold[jsonArray[1] as Int] = 1.0 34 | 35 | return SimpleExample(features, outputGold) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /examples/utils/exampleextractor/ClassificationSequenceExampleExtractor.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package utils.exampleextractor 9 | 10 | import com.beust.klaxon.JsonArray 11 | import com.beust.klaxon.JsonBase 12 | import utils.SequenceExample 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 15 | 16 | /** 17 | * 18 | */ 19 | class ClassificationSequenceExampleExtractor(val outputSize: Int) : ExampleExtractor> { 20 | 21 | /** 22 | * 23 | */ 24 | override fun extract(jsonElement: JsonBase): SequenceExample { 25 | 26 | val jsonArray = jsonElement as JsonArray<*> 27 | 28 | val featuresList = ArrayList() 29 | val outputGoldList = ArrayList() 30 | 31 | jsonArray.forEach { 32 | it as JsonArray<*> 33 | 34 | featuresList.add(DenseNDArrayFactory.arrayOf(doubleArrayOf(it[0] as Double))) 35 | outputGoldList.add(DenseNDArrayFactory.oneHotEncoder(length = 11, oneAt = it[1] as Int)) 36 | } 37 | 38 | return SequenceExample(featuresList, outputGoldList) 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /examples/utils/exampleextractor/ClassificationSparseExampleExtractor.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package utils.exampleextractor 9 | 10 | import com.beust.klaxon.JsonArray 11 | import com.beust.klaxon.JsonBase 12 | import utils.SimpleExample 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.sparsebinary.SparseBinaryNDArray 15 | import utils.readSparseBinaryNDArray 16 | 17 | /** 18 | * 19 | */ 20 | class ClassificationSparseExampleExtractor( 21 | val inputSize: Int, 22 | val outputSize: Int 23 | ) : ExampleExtractor> { 24 | 25 | /** 26 | * 27 | */ 28 | override fun extract(jsonElement: JsonBase): SimpleExample { 29 | 30 | val jsonArray = jsonElement as JsonArray<*> 31 | 32 | val features: SparseBinaryNDArray = (jsonArray[0] as JsonArray<*>).readSparseBinaryNDArray(this.inputSize) 33 | val outputGold = DenseNDArrayFactory.oneHotEncoder(length = this.outputSize, oneAt = jsonArray[1] as Int) 34 | 35 | return SimpleExample(features, outputGold) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /examples/utils/exampleextractor/ExampleExtractor.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package utils.exampleextractor 9 | 10 | import com.beust.klaxon.JsonBase 11 | import utils.Example 12 | 13 | /** 14 | * 15 | */ 16 | interface ExampleExtractor { 17 | 18 | /** 19 | * 20 | */ 21 | fun extract(jsonElement: JsonBase): ExampleType 22 | } 23 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/arrays/AugmentedArrayExtensions.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.arrays 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.RelevanceUtils 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | 14 | /** 15 | * Get the errors of the input of the unit. The errors of the output must be already set. 16 | * 17 | * @param w the weights 18 | * 19 | * @return the errors of the input of this unit 20 | */ 21 | fun AugmentedArray.getInputErrors(w: NDArray<*>): DenseNDArray = this.errors.t.dot(w) 22 | 23 | /** 24 | * Get the relevance of the input of the unit. The relevance of the output must be already set. 25 | * 26 | * @param x the input of the unit 27 | * @param cw the weights-contribution of the input to calculate the output 28 | * 29 | * @return the relevance of the input of the unit 30 | */ 31 | fun AugmentedArray.getInputRelevance(x: DenseNDArray, cw: DenseNDArray): DenseNDArray = 32 | RelevanceUtils.calculateRelevanceOfArray( 33 | x = x, 34 | y = this.valuesNotActivated, 35 | yRelevance = this.relevance, 36 | contributions = cw 37 | ) 38 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/arrays/Norm1Array.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.arrays 9 | 10 | import com.kotlinnlp.simplednn.simplemath.equals 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.* 12 | 13 | /** 14 | * The [Norm1Array] is a wrapper of an [NDArray] in which values represent a vector with norm equals to 1. 15 | * 16 | * @property values the values as [NDArray] 17 | */ 18 | open class Norm1Array>(val values: NDArrayType) { 19 | 20 | init { 21 | require(equals(this.values.sum(), 1.0, tolerance = 1.0e-08)) { "Values sum must be equal to 1.0" } 22 | require(this.values.columns == 1) { "Values must be a column vector" } 23 | } 24 | 25 | /** 26 | * The length of this array. 27 | */ 28 | val length: Int = this.values.length 29 | 30 | /** 31 | * Assign values to the array. 32 | * 33 | * @param values values to assign to this [Norm1Array] 34 | */ 35 | open fun assignValues(values: NDArray<*>) { 36 | require(equals(values.sum(), 1.0, tolerance = 1.0e-08)) { "Values sum must be equal to 1.0" } 37 | 38 | this.values.assignValues(values) 39 | } 40 | 41 | /** 42 | * Clone this array. 43 | * 44 | * @return a clone of this [Norm1Array] 45 | */ 46 | open fun clone(): Norm1Array = Norm1Array(values = this.values.copy()) 47 | } 48 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/embeddings/lmdb/EmbeddingsMap.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.embeddings.lmdb 9 | 10 | import com.kotlinnlp.simplednn.core.embeddings.EmbeddingsMap 11 | import com.kotlinnlp.simplednn.core.functionalities.initializers.GlorotInitializer 12 | import com.kotlinnlp.simplednn.core.functionalities.initializers.Initializer 13 | 14 | /** 15 | * @param storage the embeddings storage 16 | * @param initializer the initializer of the values of the embeddings (zeros if null, default: Glorot) 17 | * @param pseudoRandomDropout a Boolean indicating if embeddings must be dropped out with pseudo random probability 18 | * (default = true) 19 | */ 20 | class EmbeddingsMap( 21 | storage: EmbeddingsStorage, 22 | initializer: Initializer? = GlorotInitializer(), 23 | pseudoRandomDropout: Boolean = true 24 | ) : EmbeddingsMap( 25 | size = storage.embeddingsSize, 26 | initializer = initializer, 27 | pseudoRandomDropout = pseudoRandomDropout 28 | ) { 29 | override val embeddings = storage 30 | } 31 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/activations/CeLU.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.activations 9 | 10 | /** 11 | * CeLU(x) = max(0, x) + min(0, α ∗ (exp(x / α) − 1))) 12 | * 13 | * References 14 | * [Jonathan T. Barron, 2017, Continuously Differentiable Exponential Linear Units](https://arxiv.org/abs/1704.07483) 15 | * 16 | * @property alpha defines the decreasing exponential rate for the negative values. Defaults to 1.0 17 | * Using default value, the function is identical to ELU. 18 | * 19 | */ 20 | class CeLU(val alpha: Double = 1.0) : ScalarActivationFunction { 21 | 22 | companion object { 23 | 24 | /** 25 | * Private val used to serialize the class (needed by Serializable). 26 | */ 27 | @Suppress("unused") 28 | private const val serialVersionUID: Long = 1L 29 | } 30 | 31 | /** 32 | * Check if alpha is positive 33 | */ 34 | init { 35 | require(this.alpha > 0.0) 36 | } 37 | 38 | /** 39 | * Calculate the CeLU function in [x]. 40 | * 41 | * @param x input 42 | * 43 | * @return f([x]) 44 | */ 45 | override fun f(x: Double): Double = if (x > 0.0) x else this.alpha * (Math.exp(x / this.alpha) - 1.0) 46 | 47 | /** 48 | * Optimized derivative of the CeLU function, calculated respect to the input already activated. 49 | * 50 | * @param fx the input already activated [f(x)] 51 | * 52 | * @return the CeLU derivative calculated in x 53 | */ 54 | override fun dfOptimized(fx: Double): Double = if (fx > 0.0) 1.0 else (fx + this.alpha) / this.alpha 55 | } 56 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/activations/HardSigmoid.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.activations 9 | 10 | /** 11 | * The Hardsigmoid(x) activation function, 12 | * transforming the values x into the range [0, 1]. 13 | */ 14 | 15 | object HardSigmoid: ScalarActivationFunction { 16 | 17 | /** 18 | * Private val used to serialize the class (needed by Serializable). 19 | */ 20 | @Suppress("unused") 21 | private const val serialVersionUID: Long = 1L 22 | 23 | /** 24 | * Calculate the activation function in [x]. 25 | * 26 | * @param x input 27 | * 28 | * @return f([x]) 29 | */ 30 | override fun f(x: Double): Double = if (x > 2.5) 1.0 else if (x < -2.5) 0.0 else 0.2 * x + 0.5 31 | 32 | /** 33 | * Optimized derivative of the HardSigmoid function, calculated respect to the input already activated. 34 | * 35 | * @param fx the input already activated [f(x)] 36 | * 37 | * @return the HardSigmoid derivative calculated in x 38 | */ 39 | override fun dfOptimized(fx: Double): Double = if ((fx - 0.5) / 0.2 < 2.5 && (fx - 0.5) / 0.2 > -2.5) 0.2 else 0.0 40 | } 41 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/activations/HardTanh.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.activations 9 | 10 | /** 11 | * The Hardtanh(x) activation function, 12 | * transforming the values x into the range [−1, 1]. 13 | */ 14 | object HardTanh : ScalarActivationFunction { 15 | 16 | /** 17 | * Private val used to serialize the class (needed by Serializable). 18 | */ 19 | @Suppress("unused") 20 | private const val serialVersionUID: Long = 1L 21 | 22 | /** 23 | * Calculate the activation function in [x]. 24 | * 25 | * @param x input 26 | * 27 | * @return f([x]) 28 | */ 29 | override fun f(x: Double): Double = if (x > 1.0) 1.0 else if (x < -1.0) -1.0 else x 30 | 31 | /** 32 | * Optimized derivative of the HardTanh function, calculated respect to the input already activated. 33 | * 34 | * @param fx the input already activated [f(x)] 35 | * 36 | * @return the HardTanh derivative calculated in x 37 | */ 38 | override fun dfOptimized(fx: Double): Double = if (fx < 1.0 && fx > -1.0) 1.0 else 0.0 39 | } 40 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/activations/LeakyRELU.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.activations 9 | 10 | /** 11 | * LeakyRELU(x) = (max(0, x) + min(0, slope * x)) 12 | * 13 | * @property slope defines the decreasing rate for the negative values. Defaults to 0.01 14 | * 15 | */ 16 | class LeakyRELU(val slope: Double = 0.01) : ScalarActivationFunction { 17 | 18 | companion object { 19 | 20 | /** 21 | * Private val used to serialize the class (needed by Serializable). 22 | */ 23 | @Suppress("unused") 24 | private const val serialVersionUID: Long = 1L 25 | } 26 | 27 | /** 28 | * Check if slope is positive 29 | */ 30 | init { 31 | require(this.slope > 0.0) 32 | } 33 | 34 | /** 35 | * Calculate the LeakyReLU function in [x]. 36 | * 37 | * @param x input 38 | * 39 | * @return f([x]) 40 | */ 41 | override fun f(x: Double): Double = if (x <= 0.0) this.slope * x else x 42 | 43 | /** 44 | * Optimized derivative of the LeakyReLU function, calculated respect to the input already activated. 45 | * 46 | * @param fx the input already activated [f(x)] 47 | * 48 | * @return the LeakyReLU derivative calculated in x 49 | */ 50 | override fun dfOptimized(fx: Double): Double = if (fx > 0.0) 1.0 else this.slope 51 | } 52 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/activations/ReLU.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.activations 9 | 10 | /** 11 | * The Rectifier activation function clips each value x < 0 at 0, 12 | * transforming each value x into the range [0,∞] 13 | * 14 | * References 15 | * [Deep Sparse Rectifier Neural Networks](http://proceedings.mlr.press/v15/glorot11a/glorot11a.pdf) 16 | */ 17 | object ReLU : ScalarActivationFunction { 18 | 19 | /** 20 | * Private val used to serialize the class (needed by Serializable). 21 | */ 22 | @Suppress("unused") 23 | private const val serialVersionUID: Long = 1L 24 | 25 | /** 26 | * Calculate the ReLU function in [x]. 27 | * 28 | * @param x input 29 | * 30 | * @return f([x]) 31 | */ 32 | override fun f(x: Double): Double = if (x <= 0.0) 0.0 else x 33 | 34 | /** 35 | * Optimized derivative of the ReLU function, calculated respect to the input already activated. 36 | * 37 | * @param fx the input already activated [f(x)] 38 | * 39 | * @return the ReLU derivative calculated in x 40 | */ 41 | override fun dfOptimized(fx: Double): Double = if (fx > 0.0) 1.0 else 0.0 42 | } 43 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/activations/Sigmoid.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.activations 9 | 10 | import kotlin.math.exp 11 | 12 | /** 13 | * The sigmoid activation function σ(x) = 1/(1 + e−x) is an S-shaped function, 14 | * transforming each value x into the range [0, 1]. 15 | */ 16 | object Sigmoid : ScalarActivationFunction { 17 | 18 | /** 19 | * Private val used to serialize the class (needed by Serializable). 20 | */ 21 | @Suppress("unused") 22 | private const val serialVersionUID: Long = 1L 23 | 24 | /** 25 | * Calculate the Sigmoid function in [x]. 26 | * 27 | * @param x input 28 | * 29 | * @return f([x]) 30 | */ 31 | override fun f(x: Double): Double = 1.0 / (1.0 + exp(-x)) 32 | 33 | /** 34 | * Optimized derivative of the Sigmoid function, calculated respect to the input already activated. 35 | * 36 | * @param fx the input already activated [f(x)] 37 | * 38 | * @return the Sigmoid derivative calculated in x 39 | */ 40 | override fun dfOptimized(fx: Double): Double = fx * (1.0 - fx) 41 | } 42 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/activations/Softshrink.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.activations 9 | 10 | /** 11 | * Softshrink(x) = 12 | * x - lambda, if x > lambda 13 | * x + lambda, if x < lambda 14 | * 0, otherwise 15 | * 16 | * @property lambda defines the lambda value 17 | */ 18 | class Softshrink(val lambda: Double = 0.5) : ScalarActivationFunction { 19 | 20 | companion object { 21 | 22 | /** 23 | * Private val used to serialize the class (needed by Serializable). 24 | */ 25 | @Suppress("unused") 26 | private const val serialVersionUID: Long = 1L 27 | } 28 | 29 | /** 30 | * Check if lambda is positive 31 | */ 32 | init { 33 | require(this.lambda > 0.0) 34 | } 35 | 36 | /** 37 | * Calculate the Softplus function in [x]. 38 | * 39 | * @param x input 40 | * 41 | * @return f([x]) 42 | */ 43 | override fun f(x: Double): Double = when { 44 | x > this.lambda -> x - this.lambda 45 | x < -this.lambda -> x + this.lambda 46 | else -> 0.0 47 | } 48 | 49 | /** 50 | * Optimized derivative of the Softplus function, calculated respect to the input already activated. 51 | * 52 | * @param fx the input already activated [f(x)] 53 | * 54 | * @return the Softplus derivative calculated in x 55 | */ 56 | override fun dfOptimized(fx: Double): Double = when { 57 | fx + this.lambda > this.lambda -> 1.0 58 | fx - this.lambda < -this.lambda -> 1.0 59 | else -> 0.0 60 | } 61 | } 62 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/activations/Softsign.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.activations 9 | 10 | import kotlin.math.abs 11 | import kotlin.math.pow 12 | 13 | /** 14 | * The Softsign function can be considered an alternative to [Tanh], 15 | * transforming the values x into the range [−1, 1]. 16 | */ 17 | object Softsign : ScalarActivationFunction { 18 | 19 | /** 20 | * Private val used to serialize the class (needed by Serializable). 21 | */ 22 | @Suppress("unused") 23 | private const val serialVersionUID: Long = 1L 24 | 25 | /** 26 | * Calculate the activation function in [x]. 27 | * 28 | * @param x input 29 | * 30 | * @return f([x]) 31 | */ 32 | override fun f(x: Double): Double = x / (1.0 + abs(x)) 33 | 34 | /** 35 | * Optimized derivative of the Softsign function, calculated respect to the input already activated. 36 | * 37 | * @param fx the input already activated [f(x)] 38 | * 39 | * @return the Softsign derivative calculated in x 40 | */ 41 | override fun dfOptimized(fx: Double): Double = (1.0 - abs(fx)).pow(2.0) 42 | } 43 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/activations/Tanh.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.activations 9 | 10 | import kotlin.math.pow 11 | import kotlin.math.tanh 12 | 13 | /** 14 | * The hyperbolic tangent tanh(x) activation function is an S-shaped function, 15 | * transforming the values x into the range [−1, 1]. 16 | */ 17 | object Tanh : ScalarActivationFunction { 18 | 19 | /** 20 | * Private val used to serialize the class (needed by Serializable). 21 | */ 22 | @Suppress("unused") 23 | private const val serialVersionUID: Long = 1L 24 | 25 | /** 26 | * Calculate the activation function in [x]. 27 | * 28 | * @param x input 29 | * 30 | * @return f([x]) 31 | */ 32 | override fun f(x: Double): Double = tanh(x) 33 | 34 | /** 35 | * Optimized derivative of the Tanh function, calculated respect to the input already activated. 36 | * 37 | * @param fx the input already activated [f(x)] 38 | * 39 | * @return the Tanh derivative calculated in x 40 | */ 41 | override fun dfOptimized(fx: Double): Double = 1.0 - fx.pow(2.0) 42 | } 43 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/activations/Threshold.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.activations 9 | 10 | /** 11 | * Threshold(x) = 12 | * x, if x > threshold 13 | * value, otherwise 14 | * 15 | * @property threshold the threshold of the function 16 | * @property value the result of threshold function if x <= [threshold] 17 | */ 18 | class Threshold(val threshold: Double= 0.1, val value: Double = 0.0): ScalarActivationFunction { 19 | 20 | companion object { 21 | 22 | /** 23 | * Private val used to serialize the class (needed by Serializable). 24 | */ 25 | @Suppress("unused") 26 | private const val serialVersionUID: Long = 1L 27 | } 28 | 29 | /** 30 | * Calculate the activation function in [x]. 31 | * 32 | * @param x input 33 | * 34 | * @return f([x]) 35 | */ 36 | override fun f(x: Double): Double = if (x > this.threshold) x else this.value 37 | 38 | /** 39 | * Optimized derivative of the Threshold function, calculated respect to the input already activated. 40 | * 41 | * @param fx the input already activated [f(x)] 42 | * 43 | * @return the Threshold derivative calculated in x 44 | */ 45 | override fun dfOptimized(fx: Double): Double = if (fx > this.threshold) 1.0 else this.value 46 | } 47 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/decaymethods/DecayMethod.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.decaymethods 9 | 10 | /** 11 | * An interface which defines the decay method used to decrease the learning rate of an UpdateMethod, given 12 | * a scheduled time step. 13 | */ 14 | interface DecayMethod { 15 | 16 | /** 17 | * Update the learning rate given a time step. 18 | * 19 | * @param learningRate the learning rate to decrease 20 | * @param timeStep the current time step 21 | * 22 | * @return the updated learning rate 23 | */ 24 | fun update(learningRate: Double, timeStep: Int): Double 25 | } 26 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/decaymethods/ExponentialDecay.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.decaymethods 9 | 10 | /** 11 | * ExponentialDecay defines an exponential decay depending on the time step 12 | * => LR = exp((iterations - t) * log(LR) + log(LRfinal)) 13 | * 14 | * @property initLearningRate the initial Learning rate (must be >= [finalLearningRate]) 15 | * @property finalLearningRate the final value which the learning rate will reach (must be >= 0) 16 | * @property totalIterations total amount of iterations (must be >= 0) 17 | */ 18 | class ExponentialDecay( 19 | val initLearningRate: Double = 0.0, 20 | val finalLearningRate: Double = 0.0, 21 | val totalIterations: Int 22 | ) : DecayMethod { 23 | 24 | /** 25 | * 26 | */ 27 | init { require(this.initLearningRate > this.finalLearningRate) } 28 | 29 | /** 30 | * Update the learning rate given a time step. 31 | * 32 | * @param learningRate the learning rate to decrease 33 | * @param timeStep the current time step 34 | * 35 | * @return the updated learning rate 36 | */ 37 | override fun update(learningRate: Double, timeStep: Int): Double { 38 | return if (learningRate > this.finalLearningRate && timeStep > 1) { 39 | Math.exp( 40 | ((this.totalIterations - timeStep) * Math.log(learningRate) + Math.log(this.finalLearningRate)) 41 | / 42 | (this.totalIterations - timeStep + 1)) 43 | } else { 44 | learningRate 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/decaymethods/HyperbolicDecay.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.decaymethods 9 | 10 | /** 11 | * HyperbolicDecay defines an hyperbolic decay depending on the time step => LR = LRinit / (1 + decay * t). 12 | * 13 | * @property decay the learning rate decay applied at each time step (must be >= 0) 14 | * @property initLearningRate the initial learning rate (must be >= [finalLearningRate]) 15 | * @property finalLearningRate the final value which the learning rate will reach (must be >= 0) 16 | */ 17 | class HyperbolicDecay( 18 | val decay: Double, 19 | val initLearningRate: Double = 0.0, 20 | val finalLearningRate: Double = 0.0 21 | ) : DecayMethod { 22 | 23 | /** 24 | * 25 | */ 26 | init { require(this.initLearningRate > this.finalLearningRate) } 27 | 28 | /** 29 | * Update the learning rate given a time step. 30 | * 31 | * @param learningRate the learning rate to decrease 32 | * @param timeStep the current time step 33 | * 34 | * @return the updated learning rate 35 | */ 36 | override fun update(learningRate: Double, timeStep: Int): Double { 37 | return if (learningRate > this.finalLearningRate && this.decay > 0.0 && timeStep > 1) { 38 | this.initLearningRate / (1.0 + this.decay * timeStep) 39 | } else { 40 | learningRate 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/initializers/ConstantInitializer.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.initializers 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 11 | 12 | /** 13 | * An initializer of dense arrays with the same value for all the elements. 14 | * 15 | * @param value the initializing value 16 | */ 17 | class ConstantInitializer(val value: Double) : Initializer { 18 | 19 | companion object { 20 | 21 | /** 22 | * Private val used to serialize the class (needed by Serializable). 23 | */ 24 | @Suppress("unused") 25 | private const val serialVersionUID: Long = 1L 26 | } 27 | 28 | /** 29 | * Initialize the values of the given [array]. 30 | * 31 | * @param array a dense array 32 | */ 33 | override fun initialize(array: DenseNDArray) { 34 | array.assignValues(this.value) 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/initializers/Initializer.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.initializers 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 11 | import java.io.Serializable 12 | 13 | /** 14 | * An initializer of the values of dense arrays. 15 | */ 16 | interface Initializer : Serializable { 17 | 18 | /** 19 | * Initialize the values of the given [array]. 20 | * 21 | * @param array a dense array 22 | */ 23 | fun initialize(array: DenseNDArray) 24 | } 25 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/initializers/RandomInitializer.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.initializers 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.randomgenerators.RandomGenerator 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | 13 | /** 14 | * An initializer of dense arrays with random values. 15 | * 16 | * @param randomGenerator a generator of random double numbers 17 | */ 18 | class RandomInitializer(val randomGenerator: RandomGenerator) : Initializer { 19 | 20 | companion object { 21 | 22 | /** 23 | * Private val used to serialize the class (needed by Serializable). 24 | */ 25 | @Suppress("unused") 26 | private const val serialVersionUID: Long = 1L 27 | } 28 | 29 | /** 30 | * Initialize the values of the given [array]. 31 | * 32 | * @param array a dense array 33 | */ 34 | override fun initialize(array: DenseNDArray) { 35 | array.randomize(this.randomGenerator) 36 | } 37 | } 38 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/losses/MSECalculator.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.losses 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 11 | 12 | /** 13 | * Mean Squared Error calculator. 14 | */ 15 | open class MSECalculator : LossCalculator { 16 | 17 | /** 18 | * Calculate the loss between an output and its gold. 19 | * 20 | * @param output the output prediction 21 | * @param outputGold the expected output 22 | * 23 | * @return the loss within [output] and [outputGold] 24 | */ 25 | override fun calculateLoss(output: DenseNDArray, outputGold: DenseNDArray): DenseNDArray = 26 | output.sub(outputGold).assignPow(2.0).assignProd(0.5) 27 | 28 | /** 29 | * Calculate the errors between an output and its gold. 30 | * 31 | * @param output the output prediction 32 | * @param outputGold the expected output 33 | * 34 | * @return the derivative of the loss within [output] and [outputGold] 35 | */ 36 | override fun calculateErrors(output: DenseNDArray, outputGold: DenseNDArray): DenseNDArray = 37 | output.sub(outputGold) 38 | } 39 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/losses/MulticlassMSECalculator.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.losses 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 11 | 12 | /** 13 | * 14 | * Multiclass Mean Squared Error calculator. 15 | */ 16 | object MulticlassMSECalculator : MSECalculator() { 17 | 18 | /** 19 | * Calculate the errors between an output and its gold. 20 | * 21 | * @param output the output prediction 22 | * @param outputGold the expected output 23 | * 24 | * @return the derivative of the loss within [output] and [outputGold] 25 | */ 26 | override fun calculateErrors(output: DenseNDArray, outputGold: DenseNDArray): DenseNDArray { 27 | 28 | val lossDerivative = output.copy() 29 | 30 | (0 until output.length).forEach { i -> if (outputGold[i] == 1.0) lossDerivative[i] = output[i] - 1.0 } 31 | 32 | return lossDerivative 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/losses/NegativeLogProbability.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.losses 9 | 10 | import kotlin.math.exp 11 | import kotlin.math.ln 12 | 13 | /** 14 | * TODO: move the class into a more appropriate package (part of the 'loss' refactoring) 15 | * 16 | * @param predictions the predictions 17 | * @param goldIndex the index of the gold item in the predictions 18 | */ 19 | class NegativeLogProbability(private val predictions: List, private val goldIndex: Int) { 20 | 21 | /** 22 | * The sum of the exponentials. 23 | */ 24 | private val sumExp: Double by lazy { this.predictions.map { exp(it) }.sum() } 25 | 26 | /** 27 | * @return the loss 28 | */ 29 | fun f(): Double = -this.predictions[this.goldIndex] + ln(this.sumExp) 30 | 31 | /** 32 | * @return the gradients 33 | */ 34 | fun df(): List { 35 | 36 | val c = 1.0 / this.sumExp 37 | 38 | return this.predictions.mapIndexed { i, xi -> 39 | if (i == this.goldIndex) 40 | c * exp(xi) - 1.0 41 | else 42 | c * exp(xi) 43 | } 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/outputevaluation/ClassificationEvaluation.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.outputevaluation 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 11 | 12 | /** 13 | * Evaluation function which compares output and output gold as one hot encoders 14 | */ 15 | object ClassificationEvaluation : OutputEvaluationFunction { 16 | 17 | /** 18 | * The evaluation function. 19 | * 20 | * @param output the output of a NeuralNetwork 21 | * @param outputGold the expected gold output 22 | * 23 | * @return a Boolean indicating whether the output must be considered equal to the gold or not 24 | */ 25 | override fun invoke(output: DenseNDArray, outputGold: DenseNDArray): Boolean { 26 | 27 | require(outputGold.isOneHotEncoder) { "outputGold should be a one hot encoder"} 28 | 29 | return output.argMaxIndex() == outputGold.argMaxIndex() 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/outputevaluation/MulticlassEvaluation.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.outputevaluation 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 11 | 12 | /** 13 | * Evaluation function which returns true if all the binary outputs are equal to the gold binary outputs. 14 | */ 15 | object MulticlassEvaluation : OutputEvaluationFunction { 16 | 17 | /** 18 | * The evaluation function. 19 | * 20 | * @param output the output of a NeuralNetwork 21 | * @param outputGold the expected gold output 22 | * 23 | * @return a Boolean indicating whether the output must be considered equal to the gold or not 24 | */ 25 | override fun invoke(output: DenseNDArray, outputGold: DenseNDArray): Boolean { 26 | require(output.length == outputGold.length) { "output and outputGold must have the same dimension" } 27 | 28 | for (i in 0 until outputGold.length) { 29 | val outputInt = output[i].toInt() 30 | val outputGoldInt = outputGold[i].toInt() 31 | 32 | require(outputInt == 0 || outputInt == 1) { "non-binary output value" } 33 | require(outputGoldInt == 0 || outputGoldInt == 1) { "non-binary gold output value" } 34 | 35 | if (outputInt != outputGoldInt) { 36 | return false 37 | } 38 | } 39 | 40 | return true 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/outputevaluation/OutputEvaluationFunction.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.outputevaluation 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 11 | 12 | /** 13 | * An interface which defines a function to evaluate whether the output of a network must be considered correct or not. 14 | */ 15 | interface OutputEvaluationFunction { 16 | 17 | /** 18 | * The evaluation function. 19 | * 20 | * @param output the output of a NeuralNetwork 21 | * @param outputGold the expected gold output 22 | * 23 | * @return a Boolean indicating whether the output must be considered equal to the gold or not 24 | */ 25 | operator fun invoke(output: DenseNDArray, outputGold: DenseNDArray): Boolean 26 | } 27 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/randomgenerators/BaseRandom.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2020-present Simone Cangialosi. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.randomgenerators 9 | 10 | import java.util.* 11 | 12 | /** 13 | * A generator of random numbers uniformly distributed in the range [0.0, 1.0]. 14 | * 15 | * @property enablePseudoRandom whether to use a pseudo-random generation with the given [seed] 16 | * @property seed seed used for the pseudo-random generation 17 | */ 18 | class BaseRandom(val enablePseudoRandom: Boolean = true, val seed: Long = 743) : RandomGenerator { 19 | 20 | companion object { 21 | 22 | /** 23 | * Private val used to serialize the class (needed by Serializable). 24 | */ 25 | @Suppress("unused") 26 | private const val serialVersionUID: Long = 1L 27 | } 28 | 29 | /** 30 | * A random numbers generator with a uniform distribution. 31 | */ 32 | private val rndGenerator = if (this.enablePseudoRandom) Random(this.seed) else Random() 33 | 34 | /** 35 | * @return a random value uniformly distributed in in the range [0.0, 1.0] 36 | */ 37 | override fun next(): Double = this.rndGenerator.nextDouble() 38 | } 39 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/randomgenerators/FixedRangeRandom.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.randomgenerators 9 | 10 | import java.util.* 11 | 12 | /** 13 | * A generator of random numbers uniformly distributed in a closed range centered in 0.0 with a given radius. 14 | * 15 | * @property radius radius of the range 16 | * @property enablePseudoRandom whether to use a pseudo-random generation with the given [seed] 17 | * @property seed seed used for the pseudo-random generation 18 | */ 19 | class FixedRangeRandom( 20 | val radius: Double = 0.01, 21 | val enablePseudoRandom: Boolean = true, 22 | val seed: Long = 743 23 | ) : RandomGenerator { 24 | 25 | companion object { 26 | 27 | /** 28 | * Private val used to serialize the class (needed by Serializable). 29 | */ 30 | @Suppress("unused") 31 | private const val serialVersionUID: Long = 1L 32 | } 33 | 34 | /** 35 | * A random numbers generator with a uniform distribution. 36 | */ 37 | private val rndGenerator = if (this.enablePseudoRandom) Random(this.seed) else Random() 38 | 39 | /** 40 | * @return a random value uniformly distributed in in the range [-[radius], [radius]] 41 | */ 42 | override fun next(): Double = (2.0 * this.rndGenerator.nextDouble() * this.radius) - this.radius 43 | } 44 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/randomgenerators/GaussianDistributedRandom.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.randomgenerators 9 | 10 | import java.util.* 11 | import kotlin.math.sqrt 12 | 13 | /** 14 | * A generator of random numbers with a Gaussian distribution. 15 | * 16 | * @property variance the variance of the distribution (e.g. 2.0 / n) 17 | * @property enablePseudoRandom whether to use a pseudo-random generation with the given [seed] 18 | * @property seed seed used for the pseudo-random generation 19 | */ 20 | class GaussianDistributedRandom( 21 | val variance: Double = 1.0, 22 | val enablePseudoRandom: Boolean = true, 23 | val seed: Long = 1 24 | ) : RandomGenerator { 25 | 26 | companion object { 27 | 28 | /** 29 | * Private val used to serialize the class (needed by Serializable). 30 | */ 31 | @Suppress("unused") 32 | private const val serialVersionUID: Long = 1L 33 | } 34 | 35 | /** 36 | * A random numbers generator with a uniform distribution. 37 | */ 38 | private val rndGenerator = if (enablePseudoRandom) Random(seed) else Random() 39 | 40 | /** 41 | * @return a random value generated following a Gaussian distribution 42 | */ 43 | override fun next(): Double = rndGenerator.nextGaussian() * sqrt(variance) 44 | } 45 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/randomgenerators/RandomGenerator.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.randomgenerators 9 | 10 | import java.io.Serializable 11 | 12 | /** 13 | * A generator of random numbers. 14 | */ 15 | interface RandomGenerator : Serializable { 16 | 17 | /** 18 | * Generate a new random number. 19 | * 20 | * @return a random number 21 | */ 22 | fun next(): Double 23 | } 24 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/regularization/L1Regularization.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.regularization 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | 12 | /** 13 | * L1 regularization method. 14 | * 15 | * @param lambda regularization parameter 16 | */ 17 | class L1Regularization(private val lambda: Double) : ParamsRegularization { 18 | 19 | /** 20 | * Apply the regularization to given parameters. 21 | * 22 | * w = w - sign(w) * lambda 23 | * 24 | * @param params the parameters to regularize 25 | */ 26 | override fun apply(params: ParamsArray) { 27 | params.values.assignSub(params.values.sign().assignProd(this.lambda)) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/regularization/L2Regularization.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.regularization 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | 12 | /** 13 | * L2 regularization method. 14 | * 15 | * @param lambda regularization parameter 16 | */ 17 | class L2Regularization(private val lambda: Double) : ParamsRegularization { 18 | 19 | /** 20 | * Apply the regularization to given parameters. 21 | * 22 | * w = (1 - lambda) * w 23 | * 24 | * @param params the parameters to regularize 25 | */ 26 | override fun apply(params: ParamsArray) { 27 | params.values.assignProd(1 - this.lambda) 28 | } 29 | } 30 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/regularization/MaxNormRegularization.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.regularization 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | 12 | /** 13 | * Regularization method based on the Euclidean norm. 14 | */ 15 | object MaxNormRegularization : ParamsRegularization { 16 | 17 | /** 18 | * Apply the regularization to given parameters. 19 | * 20 | * @param params the parameters to regularize 21 | */ 22 | override fun apply(params: ParamsArray) { 23 | 24 | val norm2 = params.values.norm2() 25 | 26 | (0 until params.values.length) 27 | .filter { params.values[it] > norm2 } 28 | .forEach { params.values[it] = norm2 } 29 | } 30 | } 31 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/regularization/ParamsRegularization.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.regularization 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | 12 | /** 13 | * A parameters regularization method. 14 | */ 15 | interface ParamsRegularization { 16 | 17 | /** 18 | * Regularize parameters before the update. 19 | * 20 | * @param params the parameters to regularize 21 | */ 22 | fun apply(params: ParamsArray) 23 | } 24 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/updatemethods/UpdaterSupportStructure.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.updatemethods 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.Shape 11 | import java.io.Serializable 12 | 13 | /** 14 | * Support structure for the Update Method. 15 | * 16 | * @param shape the shape of the structure 17 | */ 18 | abstract class UpdaterSupportStructure(val shape: Shape) : Serializable { 19 | 20 | companion object { 21 | 22 | /** 23 | * Private val used to serialize the class (needed by Serializable). 24 | */ 25 | @Suppress("unused") 26 | private const val serialVersionUID: Long = 1L 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/updatemethods/adagrad/AdaGradStructure.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.updatemethods.adagrad 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.updatemethods.UpdaterSupportStructure 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.Shape 14 | 15 | /** 16 | * Support structure for the [AdaGradMethod]. 17 | * 18 | * @param shape the shape of the related parameter 19 | */ 20 | class AdaGradStructure(shape: Shape) : UpdaterSupportStructure(shape) { 21 | 22 | /** 23 | * Support array for the second order moments. 24 | * Its values are overridden to avoid the creation of new objects. 25 | */ 26 | val secondOrderMoments: DenseNDArray = DenseNDArrayFactory.zeros(shape) 27 | } 28 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/updatemethods/adam/ADAMStructure.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.updatemethods.adam 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.updatemethods.UpdaterSupportStructure 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.Shape 14 | 15 | /** 16 | * Support structure for the [ADAMMethod]. 17 | * 18 | * @param shape the shape of the related parameter 19 | */ 20 | class ADAMStructure(shape: Shape) : UpdaterSupportStructure(shape) { 21 | 22 | /** 23 | * Support array for the first order moments. 24 | * Its values are overridden to avoid the creation of new objects. 25 | */ 26 | val firstOrderMoments: DenseNDArray = DenseNDArrayFactory.zeros(shape) 27 | 28 | /** 29 | * Support array for the second order moments. 30 | * Its values are overridden to avoid the creation of new objects. 31 | */ 32 | val secondOrderMoments: DenseNDArray = DenseNDArrayFactory.zeros(shape) 33 | } 34 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/updatemethods/learningrate/LearningRateStructure.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.updatemethods.learningrate 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.updatemethods.UpdaterSupportStructure 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.Shape 14 | 15 | /** 16 | * Support structure for the [LearningRateMethod]. 17 | * 18 | * @param shape the shape of the related parameter 19 | */ 20 | class LearningRateStructure(shape: Shape) : UpdaterSupportStructure(shape) { 21 | 22 | /** 23 | * Support array for dense errors. 24 | * Its values are overridden to avoid the creation of new objects. 25 | */ 26 | val denseErrors: DenseNDArray = DenseNDArrayFactory.zeros(shape) 27 | } 28 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/updatemethods/momentum/MomentumStructure.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.updatemethods.momentum 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.updatemethods.UpdaterSupportStructure 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.Shape 14 | 15 | /** 16 | * Support structure for the [MomentumMethod]. 17 | * 18 | * @param shape the shape of the related parameter 19 | */ 20 | open class MomentumStructure(shape: Shape) : UpdaterSupportStructure(shape) { 21 | 22 | /** 23 | * Support array for the 'velocity' parameter. 24 | * Its values are overridden to avoid the creation of new objects. 25 | */ 26 | val v: DenseNDArray = DenseNDArrayFactory.zeros(shape) 27 | } 28 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/updatemethods/nesterovmomentum/NesterovMomentumStructure.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.updatemethods.nesterovmomentum 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.updatemethods.momentum.MomentumStructure 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.Shape 14 | 15 | /** 16 | * Support structure for the [NesterovMomentumMethod]. 17 | * 18 | * @param shape the shape of the related parameter 19 | */ 20 | class NesterovMomentumStructure(shape: Shape) : MomentumStructure(shape) { 21 | 22 | /** 23 | * Support array for the temporary 'velocity' parameter. 24 | * Its values are overridden to avoid the creation of new objects. 25 | */ 26 | val vTmp: DenseNDArray by lazy { DenseNDArrayFactory.zeros(shape) } 27 | } 28 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/functionalities/updatemethods/rmsprop/RMSPropStructure.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.functionalities.updatemethods.rmsprop 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.updatemethods.UpdaterSupportStructure 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.Shape 14 | 15 | /** 16 | * Support structure for the [RMSPropMethod]. 17 | * 18 | * @param shape the shape of the related parameter 19 | */ 20 | class RMSPropStructure(shape: Shape) : UpdaterSupportStructure(shape) { 21 | 22 | /** 23 | * Support array for the second order moments. 24 | * Its values are overridden to avoid the creation of new objects. 25 | */ 26 | val secondOrderMoments: DenseNDArray = DenseNDArrayFactory.zeros(shape) 27 | } 28 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/StatesWindow.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 11 | 12 | /** 13 | * A window of recurrent states. 14 | * It provides methods to get to the next and the previous state of a recurrent network. 15 | * Useful during the forward and backward operations. 16 | */ 17 | abstract class StatesWindow> { 18 | 19 | /** 20 | * @return the previous recurrent state 21 | */ 22 | internal abstract fun getPrevState(): RecurrentStackedLayers? 23 | 24 | /** 25 | * @return the next recurrent state 26 | */ 27 | internal abstract fun getNextState(): RecurrentStackedLayers? 28 | } 29 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/helpers/ParamsErrorsCollector.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.helpers 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | import com.kotlinnlp.simplednn.core.optimizer.ParamsErrorsList 12 | 13 | /** 14 | * Generic params errors collector. 15 | */ 16 | class ParamsErrorsCollector { 17 | 18 | /** 19 | * The structure in which to accumulate the parameters errors. 20 | */ 21 | private val paramsErrorsMap = mutableMapOf>() 22 | 23 | /** 24 | * Return the errors associated to the given [params]. 25 | * If the [params] has no errors yet, these are built using the [params] default errors type (sparse vs. dense). 26 | * 27 | * @param params the parameters 28 | * 29 | * @return the current errors of the given parameters 30 | */ 31 | fun getErrors(params: ParamsArray) = 32 | this.paramsErrorsMap.getOrPut(params.uuid, defaultValue = { params.buildDefaultErrors() }) 33 | 34 | /** 35 | * @return all the collected params errors 36 | */ 37 | fun getAll(): ParamsErrorsList = this.paramsErrorsMap.values.toList() 38 | 39 | /** 40 | * Clear the accumulated errors. 41 | */ 42 | fun clear() = this.paramsErrorsMap.clear() 43 | } 44 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/LinearParams.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | import java.io.Serializable 12 | 13 | /** 14 | * The parameters for a linear transformation. 15 | * 16 | * @property inputSize input size 17 | * @property outputSize output size 18 | * @param sparseInput whether the weights connected to the input are sparse or not (default false) 19 | */ 20 | open class LinearParams( 21 | val inputSize: Int, 22 | val outputSize: Int, 23 | private val sparseInput: Boolean = false 24 | ) : Serializable { 25 | 26 | companion object { 27 | 28 | /** 29 | * Private val used to serialize the class (needed by Serializable). 30 | */ 31 | @Suppress("unused") 32 | private const val serialVersionUID: Long = 1L 33 | } 34 | 35 | /** 36 | * The biases initialized to zero. 37 | */ 38 | val biases = ParamsArray(this.outputSize) 39 | 40 | /** 41 | * The weights initialized to zero. 42 | */ 43 | val weights = ParamsArray( 44 | dim1 = this.outputSize, 45 | dim2 = this.inputSize, 46 | defaultErrorsType = if (this.sparseInput) 47 | ParamsArray.ErrorsType.Sparse 48 | else 49 | ParamsArray.ErrorsType.Dense 50 | ) 51 | } 52 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/attention/AttentionForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.attention 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | 14 | /** 15 | * The helper which executes the forward on a [layer]. 16 | * 17 | * @property layer the [AttentionLayer] in which the forward is executed 18 | */ 19 | internal class AttentionForwardHelper>( 20 | override val layer: AttentionLayer 21 | ) : ForwardHelper(layer) { 22 | 23 | /** 24 | * Forward the input to the output combining it with the parameters. 25 | * 26 | * Calculate the values of the output array. 27 | * 28 | * y = sum by { x_i * alpha_i } 29 | */ 30 | override fun forward() { 31 | 32 | this.layer.attentionMechanism.forward() 33 | 34 | val y: DenseNDArray = this.layer.outputArray.values 35 | 36 | y.zeros() 37 | 38 | this.layer.inputArrays.forEachIndexed { i, inputArray -> 39 | y.assignSum(inputArray.values.prod(this.layer.attentionMechanism.outputArray.values[i])) 40 | } 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/attention/attentionmechanism/AttentionMechanismForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.attention.attentionmechanism 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | 13 | /** 14 | * The helper which executes the forward on a [layer]. 15 | * 16 | * @property layer the [AttentionMechanismLayer] in which the forward is executed 17 | */ 18 | internal class AttentionMechanismForwardHelper( 19 | override val layer: AttentionMechanismLayer 20 | ) : ForwardHelper(layer) { 21 | 22 | /** 23 | * Forward the input to the output combining it with the parameters. 24 | * 25 | * am = attention matrix 26 | * cv = context vector 27 | * 28 | * y = activation(am (dot) cv) 29 | */ 30 | override fun forward() { 31 | 32 | this.layer.outputArray.assignValues(this.layer.attentionMatrix.values.dot(this.layer.params.contextVector.values)) 33 | this.layer.outputArray.activate() 34 | } 35 | } 36 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/feedforward/norm/NormForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2020-present Simone Cangialosi. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.feedforward.norm 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | import kotlin.math.sqrt 14 | 15 | /** 16 | * The helper which executes the forward on the [NormLayer]. 17 | * 18 | * @param layer the layer with which this helper works 19 | */ 20 | internal class NormForwardHelper>( 21 | override val layer: NormLayer 22 | ) : ForwardHelper(layer) { 23 | 24 | /** 25 | * Forward the input to the output combining it with the parameters. 26 | * 27 | * y = (x - E\[x\]) / sqrt(VAR\[x\] + EPS) * g + b 28 | */ 29 | override fun forward() { 30 | 31 | val x: InputNDArrayType = this.layer.inputArray.values 32 | val y: DenseNDArray = this.layer.outputArray.values 33 | val g: DenseNDArray = this.layer.params.g.values 34 | val b: DenseNDArray = this.layer.params.b.values 35 | 36 | val dev: InputNDArrayType = x.sub(x.avg()) 37 | val v: Double = dev.pow(2.0).avg() 38 | val stdDev: Double = sqrt(v + NormLayer.EPS) 39 | val devStdDev: InputNDArrayType = dev.div(stdDev) 40 | 41 | y.assignValues(devStdDev).assignProd(g).assignSum(b) 42 | 43 | this.layer.dev = dev 44 | this.layer.v = v 45 | this.layer.stdDev = stdDev 46 | this.layer.devStdDev = devStdDev 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/feedforward/simple/FeedforwardRelevanceHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.feedforward.simple 9 | 10 | import com.kotlinnlp.simplednn.core.layers.LayerParameters 11 | import com.kotlinnlp.simplednn.core.arrays.getInputRelevance 12 | import com.kotlinnlp.simplednn.core.layers.helpers.RelevanceHelper 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 14 | 15 | /** 16 | * The helper which calculates the relevance of the input of a [layer] respect of its output. 17 | * 18 | * @property layer the [FeedforwardLayer] in which to calculate the input relevance 19 | */ 20 | internal class FeedforwardRelevanceHelper(override val layer: FeedforwardLayer) : RelevanceHelper(layer) { 21 | 22 | /** 23 | * @param contributions the contributions saved during the last forward 24 | * 25 | * @return the relevance of the input respect to the output 26 | */ 27 | override fun getInputRelevance(contributions: LayerParameters): DenseNDArray = 28 | this.layer.outputArray.getInputRelevance( 29 | x = this.layer.inputArray.values, 30 | cw = (contributions as FeedforwardLayerParameters).unit.weights.values) 31 | } 32 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/feedforward/squareddistance/SquaredDistanceForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.feedforward.squareddistance 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | 13 | /** 14 | * The helper which executes the forward on a [SquaredDistanceLayer]. 15 | * 16 | * @property layer the layer in which the forward is executed 17 | */ 18 | internal class SquaredDistanceForwardHelper>( 19 | override val layer: SquaredDistanceLayer 20 | ) : ForwardHelper(layer){ 21 | 22 | /** 23 | * Forward the input to the output calculating a score value d >= 0. 24 | * The output is `BhT dot Bh`, where `B` is the parameter matrix, and `h` is the input. 25 | */ 26 | override fun forward() { 27 | 28 | this.layer.bhOut.assignValues(this.layer.params.wB.values.dot(this.layer.inputArray.values)) 29 | 30 | this.layer.bhOut.values.let { bhOut -> 31 | this.layer.outputArray.assignValues(bhOut.t.dot(bhOut)) 32 | } 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/MergeLayerParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.initializers.Initializer 11 | import com.kotlinnlp.simplednn.core.layers.LayerParameters 12 | 13 | /** 14 | * The parameters of a merge layer. 15 | * It has two inputs instead of one. 16 | * 17 | * @property inputsSize the size of each input 18 | * @property outputSize the size of the output 19 | * @param weightsInitializer the initializer of the weights (zeros if null) 20 | * @param biasesInitializer the initializer of the biases (zeros if null) 21 | * @property sparseInput whether the weights connected to the input are sparse or not 22 | */ 23 | abstract class MergeLayerParameters( 24 | val inputsSize: List, 25 | outputSize: Int, 26 | weightsInitializer: Initializer?, 27 | biasesInitializer: Initializer?, 28 | val sparseInput: Boolean 29 | ) : LayerParameters( 30 | inputSize = inputsSize[0], 31 | outputSize = outputSize, 32 | weightsInitializer = weightsInitializer, 33 | biasesInitializer = biasesInitializer 34 | ) 35 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/affine/AffineForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.affine 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 12 | import com.kotlinnlp.simplednn.core.layers.LayerParameters 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 15 | 16 | /** 17 | * The helper which executes the forward on an [AffineLayer]. 18 | * 19 | * @property layer the layer in which the forward is executed 20 | */ 21 | internal class AffineForwardHelper>( 22 | override val layer: AffineLayer 23 | ) : ForwardHelper(layer) { 24 | 25 | /** 26 | * Forward the input to the output combining it with the parameters. 27 | * 28 | * y = f(w1 (dot) x1 + w2 (dot) x2 (+ ... + wn (dot) xn) + b) 29 | */ 30 | override fun forward() { 31 | 32 | val y: AugmentedArray = this.layer.outputArray 33 | 34 | y.assignValues(this.layer.params.b.values) 35 | 36 | this.layer.inputArrays.zip(this.layer.params.w).forEach { (x, w) -> 37 | y.values.assignSum(w.values.dot(x.values)) 38 | } 39 | 40 | this.layer.outputArray.activate() 41 | } 42 | } 43 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/avg/AvgBackwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.avg 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.BackwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | 14 | /** 15 | * The helper which executes the backward on a [AvgLayer]. 16 | * 17 | * @property layer the layer in which the backward is executed 18 | */ 19 | internal class AvgBackwardHelper>( 20 | override val layer: AvgLayer 21 | ) : BackwardHelper(layer) { 22 | 23 | /** 24 | * Executes the backward calculating the errors of the parameters and eventually of the input through the SGD 25 | * algorithm, starting from the preset errors of the output array. 26 | * 27 | * @param propagateToInput whether to propagate the errors to the input array 28 | */ 29 | override fun execBackward(propagateToInput: Boolean) { 30 | 31 | if (propagateToInput) { 32 | this.assignLayerGradients() 33 | } 34 | } 35 | 36 | /** 37 | * Assign the the layer gradients. 38 | */ 39 | private fun assignLayerGradients() { 40 | 41 | val gy: DenseNDArray = this.layer.outputArray.errors 42 | val gx: DenseNDArray = gy.div(this.layer.inputArrays.size.toDouble()) 43 | 44 | this.layer.inputArrays.forEach { x -> 45 | x.assignErrors(gx) 46 | } 47 | } 48 | } 49 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/avg/AvgForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.avg 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.core.layers.LayerParameters 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 15 | 16 | /** 17 | * The helper which executes the forward on an [AvgLayer]. 18 | * 19 | * @property layer the layer in which the forward is executed 20 | */ 21 | internal class AvgForwardHelper>( 22 | override val layer: AvgLayer 23 | ) : ForwardHelper(layer) { 24 | 25 | /** 26 | * Forward the input to the output adding the input arrays. 27 | */ 28 | override fun forward() { 29 | 30 | this.layer.inputArrays.let { arrays -> 31 | 32 | val firstInput: DenseNDArray = arrays.first().values.let { 33 | if (it is DenseNDArray) it else DenseNDArrayFactory.fromNDArray(it) 34 | } 35 | 36 | this.layer.outputArray.assignValues(firstInput) 37 | 38 | (1 until arrays.size).forEach { i -> this.layer.outputArray.values.assignSum(arrays[i].values) } 39 | 40 | this.layer.outputArray.values.assignDiv(arrays.size.toDouble()) 41 | } 42 | } 43 | } 44 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/avg/AvgLayerParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.avg 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | import com.kotlinnlp.simplednn.core.layers.models.merge.MergeLayerParameters 12 | 13 | /** 14 | * The parameters of the Avg layer. 15 | * 16 | * @property inputSize the size of each input 17 | * @property nInputs the number of input arrays 18 | */ 19 | class AvgLayerParameters( 20 | inputSize: Int, 21 | val nInputs: Int 22 | ) : MergeLayerParameters( 23 | inputsSize = List(size = nInputs, init = { inputSize }), 24 | outputSize = inputSize, 25 | weightsInitializer = null, 26 | biasesInitializer = null, 27 | sparseInput = false // actually not used because there are no parameters 28 | ) { 29 | 30 | companion object { 31 | 32 | /** 33 | * Private val used to serialize the class (needed by Serializable). 34 | */ 35 | @Suppress("unused") 36 | private const val serialVersionUID: Long = 1L 37 | } 38 | 39 | /** 40 | * Check input arrays size. 41 | */ 42 | init { 43 | require(this.inputsSize.all { it == this.inputSize }) 44 | } 45 | 46 | /** 47 | * The list of weights parameters. 48 | */ 49 | override val weightsList = emptyList() 50 | 51 | /** 52 | * The list of biases parameters. 53 | */ 54 | override val biasesList = emptyList() 55 | } 56 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/concat/ConcatForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.concat 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.concatVectorsV 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 14 | 15 | /** 16 | * The helper which executes the forward on a [ConcatLayer]. 17 | * 18 | * @property layer the layer in which the forward is executed 19 | */ 20 | internal class ConcatForwardHelper>( 21 | override val layer: ConcatLayer 22 | ) : ForwardHelper(layer) { 23 | 24 | /** 25 | * Forward the input to the output concatenating the input arrays. 26 | * TODO: make it working with all types of input arrays. 27 | */ 28 | override fun forward() { 29 | 30 | this.layer.outputArray.assignValues( 31 | concatVectorsV(this.layer.inputArrays.map { it.values as DenseNDArray }) 32 | ) 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/concat/ConcatLayerParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.concat 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | import com.kotlinnlp.simplednn.core.layers.models.merge.MergeLayerParameters 12 | 13 | /** 14 | * The parameters of the Concat layer. 15 | * 16 | * @property inputsSize the size of each input 17 | */ 18 | class ConcatLayerParameters( 19 | inputsSize: List 20 | ) : MergeLayerParameters( 21 | inputsSize = inputsSize, 22 | outputSize = inputsSize.sum(), 23 | weightsInitializer = null, 24 | biasesInitializer = null, 25 | sparseInput = false // actually not used because there are no parameters 26 | ) { 27 | 28 | companion object { 29 | 30 | /** 31 | * Private val used to serialize the class (needed by Serializable). 32 | */ 33 | @Suppress("unused") 34 | private const val serialVersionUID: Long = 1L 35 | } 36 | 37 | /** 38 | * The list of weights parameters. 39 | */ 40 | override val weightsList = emptyList() 41 | 42 | /** 43 | * The list of biases parameters. 44 | */ 45 | override val biasesList = emptyList() 46 | } 47 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/concatff/ConcatFFForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2020-present Simone Cangialosi. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.concatff 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.concatVectorsV 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 14 | 15 | /** 16 | * The helper which executes the forward on a [ConcatFFLayer]. 17 | * 18 | * @property layer the layer in which the forward is executed 19 | */ 20 | internal class ConcatFFForwardHelper>( 21 | override val layer: ConcatFFLayer 22 | ) : ForwardHelper(layer) { 23 | 24 | /** 25 | * Forward the input to the output through a concatenation and a feed-forward layer. 26 | * TODO: make it working also with non-dense input arrays. 27 | */ 28 | override fun forward() { 29 | 30 | this.layer.outputFeedforward.inputArray.assignValues( 31 | concatVectorsV(this.layer.inputArrays.map { it.values as DenseNDArray }) 32 | ) 33 | 34 | this.layer.outputFeedforward.forward() 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/cosinesimilarity/CosineLayerParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.cosinesimilarity 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | import com.kotlinnlp.simplednn.core.layers.models.merge.MergeLayerParameters 12 | 13 | /** 14 | * The parameters of the Cosine layer. 15 | * 16 | * @property inputSize the size of each input 17 | */ 18 | class CosineLayerParameters ( 19 | inputSize: Int 20 | ) : MergeLayerParameters( 21 | inputsSize = List(size = 2, init = { inputSize }), 22 | outputSize = 1, 23 | weightsInitializer = null, 24 | biasesInitializer = null, 25 | sparseInput = false // actually not used because there are no parameters 26 | ) { 27 | 28 | companion object { 29 | 30 | /** 31 | * Private val used to serialize the class (needed by Serializable). 32 | */ 33 | @Suppress("unused") 34 | private const val serialVersionUID: Long = 1L 35 | } 36 | 37 | /** 38 | * The list of weights parameters. 39 | */ 40 | override val weightsList = emptyList() 41 | 42 | /** 43 | * The list of biases parameters. 44 | */ 45 | override val biasesList = emptyList() 46 | } 47 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/distance/DistanceForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.distance 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 13 | import kotlin.math.abs 14 | import kotlin.math.exp 15 | 16 | /** 17 | * The helper which executes the forward on a [DistanceLayer]. 18 | * 19 | * @property layer the layer in which the forward is executed 20 | */ 21 | internal class DistanceForwardHelper(override val layer: DistanceLayer) : ForwardHelper(layer) { 22 | 23 | /** 24 | * Forward the input to the output calculating a score value d ∈ [0, 1]. d = exp(-||input1-input2||1) 25 | */ 26 | override fun forward() { 27 | 28 | val diffVector : DenseNDArray = DenseNDArrayFactory.fromNDArray(this.layer.inputArray1.values) 29 | var sum = 0.0 30 | val outputScore = DoubleArray(1) 31 | 32 | diffVector.assignSub(this.layer.inputArray2.values) 33 | diffVector.toDoubleArray().forEach { element -> sum += abs(element) } 34 | 35 | outputScore[0] = exp(-sum) 36 | 37 | this.layer.outputArray.assignValues(DenseNDArrayFactory.arrayOf(outputScore)) 38 | } 39 | } 40 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/distance/DistanceLayerParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.distance 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | import com.kotlinnlp.simplednn.core.layers.models.merge.MergeLayerParameters 12 | 13 | /** 14 | * The parameters of the Distance layer. 15 | * 16 | * @property inputSize the size of each input 17 | */ 18 | class DistanceLayerParameters( 19 | inputSize: Int 20 | ) : MergeLayerParameters( 21 | inputsSize = List(size = 2, init = { inputSize }), 22 | outputSize = 1, 23 | weightsInitializer = null, 24 | biasesInitializer = null, 25 | sparseInput = false // actually not used because there are no parameters 26 | ) { 27 | 28 | companion object { 29 | 30 | /** 31 | * Private val used to serialize the class (needed by Serializable). 32 | */ 33 | @Suppress("unused") 34 | private const val serialVersionUID: Long = 1L 35 | } 36 | 37 | /** 38 | * The list of weights parameters. 39 | */ 40 | override val weightsList = emptyList() 41 | 42 | /** 43 | * The list of biases parameters. 44 | */ 45 | override val biasesList = emptyList() 46 | } 47 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/mergeconfig/AffineMerge.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFunction 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | 13 | /** 14 | * The Affine merge layer configuration. 15 | * 16 | * @property outputSize the size of the merged output 17 | * @property activationFunction the output activation function 18 | */ 19 | class AffineMerge( 20 | outputSize: Int, 21 | activationFunction: ActivationFunction? = null 22 | ) : VariableOutputMergeConfig( 23 | type = LayerType.Connection.Affine, 24 | outputSize = outputSize, 25 | activationFunction = activationFunction 26 | ) 27 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/mergeconfig/AvgMerge.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig 9 | 10 | import com.kotlinnlp.simplednn.core.layers.LayerType 11 | 12 | /** 13 | * The Avg merge layer configuration. 14 | */ 15 | class AvgMerge : MergeConfiguration(type = LayerType.Connection.Avg) 16 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/mergeconfig/BiaffineMerge.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFunction 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | 13 | /** 14 | * The Biaffine merge layer configuration. 15 | * 16 | * @property outputSize the size of the merged output 17 | * @property activationFunction the output activation function 18 | */ 19 | class BiaffineMerge( 20 | outputSize: Int, 21 | activationFunction: ActivationFunction? = null 22 | ) : VariableOutputMergeConfig( 23 | type = LayerType.Connection.Biaffine, 24 | outputSize = outputSize, 25 | activationFunction = activationFunction 26 | ) 27 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/mergeconfig/ConcatFeedforwardMerge.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFunction 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | 13 | /** 14 | * The ConcatFeedforward merge layer configuration. 15 | * 16 | * @property outputSize the size of the merged output 17 | * @property activationFunction the output activation function 18 | */ 19 | class ConcatFeedforwardMerge( 20 | outputSize: Int, 21 | activationFunction: ActivationFunction? = null 22 | ) : VariableOutputMergeConfig( 23 | type = LayerType.Connection.ConcatFeedforward, 24 | outputSize = outputSize, 25 | activationFunction = activationFunction 26 | ) 27 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/mergeconfig/ConcatMerge.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig 9 | 10 | import com.kotlinnlp.simplednn.core.layers.LayerType 11 | 12 | /** 13 | * The Concat merge layer configuration. 14 | */ 15 | class ConcatMerge : MergeConfiguration(type = LayerType.Connection.Concat) 16 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/mergeconfig/MergeConfiguration.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig 9 | 10 | import com.kotlinnlp.simplednn.core.layers.LayerType 11 | 12 | /** 13 | * The configuration of a merge layer. 14 | * 15 | * @property type the connection type 16 | */ 17 | abstract class MergeConfiguration(val type: LayerType.Connection) { 18 | 19 | /** 20 | * Check the connection type. 21 | */ 22 | init { 23 | require(this.type.property == LayerType.Property.Merge) 24 | } 25 | } 26 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/mergeconfig/ProductMerge.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig 9 | 10 | import com.kotlinnlp.simplednn.core.layers.LayerType 11 | 12 | /** 13 | * The Product merge layer configuration. 14 | */ 15 | class ProductMerge : MergeConfiguration(type = LayerType.Connection.Product) 16 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/mergeconfig/SumMerge.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig 9 | 10 | import com.kotlinnlp.simplednn.core.layers.LayerType 11 | 12 | /** 13 | * The Sum merge layer configuration. 14 | */ 15 | class SumMerge : MergeConfiguration(type = LayerType.Connection.Sum) 16 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/mergeconfig/VariableOutputMergeConfig.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.mergeconfig 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFunction 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | 13 | /** 14 | * The configuration of a merge layer with a variable output size and and an optional activation function. 15 | * 16 | * @property type the connection type 17 | * @property outputSize the size of the merged output 18 | * @property activationFunction the output activation function 19 | */ 20 | abstract class VariableOutputMergeConfig( 21 | type: LayerType.Connection, 22 | val outputSize: Int, 23 | val activationFunction: ActivationFunction? 24 | ) : MergeConfiguration(type) 25 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/product/ProductForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.product 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 14 | 15 | /** 16 | * The helper which executes the forward on a [ProductLayer]. 17 | * 18 | * @property layer the layer in which the forward is executed 19 | */ 20 | internal class ProductForwardHelper>( 21 | override val layer: ProductLayer 22 | ) : ForwardHelper(layer) { 23 | 24 | /** 25 | * Forward the input to the output multiplying element-wise the input arrays. 26 | */ 27 | override fun forward() { 28 | 29 | this.layer.inputArrays.let { arrays -> 30 | 31 | val firstInput: DenseNDArray = arrays.first().values.let { 32 | if (it is DenseNDArray) it else DenseNDArrayFactory.fromNDArray(it) 33 | } 34 | 35 | this.layer.outputArray.assignValues(firstInput) 36 | 37 | (1 until arrays.size).forEach { i -> this.layer.outputArray.values.assignProd(arrays[i].values) } 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/product/ProductLayerParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.product 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | import com.kotlinnlp.simplednn.core.layers.models.merge.MergeLayerParameters 12 | 13 | /** 14 | * The parameters of the Product layer. 15 | * 16 | * @property inputSize the size of each input 17 | * @property nInputs the number of input arrays 18 | */ 19 | class ProductLayerParameters( 20 | inputSize: Int, 21 | val nInputs: Int 22 | ) : MergeLayerParameters( 23 | inputsSize = List(size = nInputs, init = { inputSize }), 24 | outputSize = inputSize, 25 | weightsInitializer = null, 26 | biasesInitializer = null, 27 | sparseInput = false // actually not used because there are no parameters 28 | ) { 29 | 30 | companion object { 31 | 32 | /** 33 | * Private val used to serialize the class (needed by Serializable). 34 | */ 35 | @Suppress("unused") 36 | private const val serialVersionUID: Long = 1L 37 | } 38 | 39 | /** 40 | * Check input arrays size. 41 | */ 42 | init { 43 | require(this.inputsSize.all { it == this.inputSize }) 44 | } 45 | 46 | /** 47 | * The list of weights parameters. 48 | */ 49 | override val weightsList = emptyList() 50 | 51 | /** 52 | * The list of biases parameters. 53 | */ 54 | override val biasesList = emptyList() 55 | } 56 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/sub/SubBackwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.sub 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.BackwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | 14 | /** 15 | * The helper which executes the forward on a [SubLayer]. 16 | * 17 | * @property layer the layer in which the forward is executed 18 | */ 19 | internal class SubBackwardHelper >( 20 | override val layer: SubLayer 21 | ) : BackwardHelper(layer) { 22 | 23 | /** 24 | * Executes the backward calculating the errors of the parameters and eventually of the input through the SGD 25 | * algorithm, starting from the preset errors of the output array. 26 | * 27 | * @param propagateToInput whether to propagate the errors to the input array 28 | */ 29 | override fun execBackward(propagateToInput: Boolean) { 30 | 31 | if (propagateToInput) { 32 | this.assignLayerGradients() 33 | } 34 | } 35 | 36 | /** 37 | * Assign the the layer gradients. 38 | */ 39 | private fun assignLayerGradients() { 40 | 41 | val gy: DenseNDArray = this.layer.outputArray.errors 42 | 43 | this.layer.inputArray1.assignErrors(gy) 44 | this.layer.inputArray2.assignErrors(gy.prod(-1.0)) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/sub/SubForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.sub 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | 14 | /** 15 | * The helper which executes the forward on a [SubLayer]. 16 | * 17 | * @property layer the layer in which the forward is executed 18 | */ 19 | internal class SubForwardHelper >( 20 | override val layer: SubLayer 21 | ) : ForwardHelper(layer) { 22 | 23 | /** 24 | * Forward the input to the output subtracting 2nd input array from 1st input arrays. 25 | */ 26 | override fun forward() { 27 | 28 | val sub: DenseNDArray = this.layer.inputArray1.values.sub(this.layer.inputArray2.values) as DenseNDArray 29 | this.layer.outputArray.assignValues(sub) 30 | } 31 | } 32 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/sub/SubLayerParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.sub 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | import com.kotlinnlp.simplednn.core.layers.models.merge.MergeLayerParameters 12 | 13 | /** 14 | * The parameters of the Distance layer. 15 | * 16 | * @property inputSize the size of each input 17 | */ 18 | class SubLayerParameters( 19 | inputSize: Int 20 | ) : MergeLayerParameters( 21 | inputsSize = List(size = 2, init = { inputSize }), 22 | outputSize = inputSize, 23 | weightsInitializer = null, 24 | biasesInitializer = null, 25 | sparseInput = false // actually not used because there are no parameters 26 | ) { 27 | 28 | companion object { 29 | 30 | /** 31 | * Private val used to serialize the class (needed by Serializable). 32 | */ 33 | @Suppress("unused") 34 | private const val serialVersionUID: Long = 1L 35 | } 36 | 37 | /** 38 | * Check input arrays size. 39 | */ 40 | init { 41 | require(this.inputsSize.all { it == this.inputSize }) 42 | } 43 | 44 | /** 45 | * The list of weights parameters. 46 | */ 47 | override val weightsList = emptyList() 48 | 49 | /** 50 | * The list of biases parameters. 51 | */ 52 | override val biasesList = emptyList() 53 | } 54 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/sum/SumBackwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.sum 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.BackwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | 14 | /** 15 | * The helper which executes the backward on a [SumLayer]. 16 | * 17 | * @property layer the layer in which the backward is executed 18 | */ 19 | internal class SumBackwardHelper>( 20 | override val layer: SumLayer 21 | ) : BackwardHelper(layer) { 22 | 23 | /** 24 | * Executes the backward calculating the errors of the parameters and eventually of the input through the SGD 25 | * algorithm, starting from the preset errors of the output array. 26 | * 27 | * @param propagateToInput whether to propagate the errors to the input array 28 | */ 29 | override fun execBackward(propagateToInput: Boolean) { 30 | 31 | if (propagateToInput) { 32 | this.assignLayerGradients() 33 | } 34 | } 35 | 36 | /** 37 | * Assign the the layer gradients. 38 | */ 39 | private fun assignLayerGradients() { 40 | 41 | val gy: DenseNDArray = this.layer.outputArray.errors 42 | 43 | this.layer.inputArrays.forEach { x -> 44 | x.assignErrors(gy) 45 | } 46 | } 47 | } 48 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/sum/SumForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.sum 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 14 | 15 | /** 16 | * The helper which executes the forward on a [SumLayer]. 17 | * 18 | * @property layer the layer in which the forward is executed 19 | */ 20 | internal class SumForwardHelper>( 21 | override val layer: SumLayer 22 | ) : ForwardHelper(layer) { 23 | 24 | /** 25 | * Forward the input to the output adding the input arrays. 26 | */ 27 | override fun forward() { 28 | 29 | this.layer.inputArrays.let { arrays -> 30 | 31 | val firstInput: DenseNDArray = arrays.first().values.let { 32 | if (it is DenseNDArray) it else DenseNDArrayFactory.fromNDArray(it) 33 | } 34 | 35 | this.layer.outputArray.assignValues(firstInput) 36 | 37 | (1 until arrays.size).forEach { i -> this.layer.outputArray.values.assignSum(arrays[i].values) } 38 | } 39 | } 40 | } 41 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/merge/sum/SumLayerParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.merge.sum 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | import com.kotlinnlp.simplednn.core.layers.models.merge.MergeLayerParameters 12 | 13 | /** 14 | * The parameters of the Sum layer. 15 | * 16 | * @property inputSize the size of each input 17 | * @property nInputs the number of input arrays 18 | */ 19 | class SumLayerParameters( 20 | inputSize: Int, 21 | val nInputs: Int 22 | ) : MergeLayerParameters( 23 | inputsSize = List(size = nInputs, init = { inputSize }), 24 | outputSize = inputSize, 25 | weightsInitializer = null, 26 | biasesInitializer = null, 27 | sparseInput = false // actually not used because there are no parameters 28 | ) { 29 | 30 | companion object { 31 | 32 | /** 33 | * Private val used to serialize the class (needed by Serializable). 34 | */ 35 | @Suppress("unused") 36 | private const val serialVersionUID: Long = 1L 37 | } 38 | 39 | /** 40 | * Check input arrays size. 41 | */ 42 | init { 43 | require(this.inputsSize.all { it == this.inputSize }) 44 | } 45 | 46 | /** 47 | * The list of weights parameters. 48 | */ 49 | override val weightsList = emptyList() 50 | 51 | /** 52 | * The list of biases parameters. 53 | */ 54 | override val biasesList = emptyList() 55 | } 56 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/recurrent/GatedRecurrentRelevanceHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.recurrent 9 | 10 | import com.kotlinnlp.simplednn.core.layers.LayerParameters 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 12 | 13 | /** 14 | * The helper which calculates the relevance of the input of a gated recurrent [layer] respect of its output. 15 | * 16 | * @property layer the [RecurrentLayer] in which to calculate the input relevance 17 | */ 18 | internal abstract class GatedRecurrentRelevanceHelper( 19 | layer: RecurrentLayer 20 | ) : RecurrentRelevanceHelper(layer) { 21 | 22 | /** 23 | * Propagate the relevance from the output to the gates. 24 | * 25 | * @param contributions the contributions saved during the last forward 26 | */ 27 | abstract fun propagateRelevanceToGates(contributions: LayerParameters) 28 | } 29 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/recurrent/LayersWindow.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.recurrent 9 | 10 | import com.kotlinnlp.simplednn.core.layers.Layer 11 | 12 | /** 13 | * A window of recurrent layers. 14 | * It provides methods to get the current stacked layer in the next and the previous state of a recurrent network. 15 | * Useful during the forward and backward operations. 16 | */ 17 | internal interface LayersWindow { 18 | 19 | /** 20 | * @return the current layer in previous state 21 | */ 22 | fun getPrevState(): Layer<*>? 23 | 24 | /** 25 | * @return the current layer in next state 26 | */ 27 | fun getNextState(): Layer<*>? 28 | } 29 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/recurrent/RecurrentLinearParams.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.recurrent 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | import com.kotlinnlp.simplednn.core.layers.models.LinearParams 12 | 13 | /** 14 | * The parameters associated to a [RecurrentLayerUnit]. 15 | * 16 | * @property inputSize input size 17 | * @property outputSize output size 18 | * @param sparseInput whether the weights connected to the input are sparse or not (default false) 19 | */ 20 | class RecurrentLinearParams( 21 | inputSize: Int, 22 | outputSize: Int, 23 | sparseInput: Boolean = false 24 | ) : LinearParams( 25 | inputSize = inputSize, 26 | outputSize = outputSize, 27 | sparseInput = sparseInput) { 28 | 29 | companion object { 30 | 31 | /** 32 | * Private val used to serialize the class (needed by Serializable). 33 | */ 34 | @Suppress("unused") 35 | private const val serialVersionUID: Long = 1L 36 | } 37 | 38 | /** 39 | * The recurrent weights initialized to zero. 40 | */ 41 | val recurrentWeights = ParamsArray(dim1 = this.outputSize, dim2 = this.outputSize) 42 | } 43 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/recurrent/RecurrentRelevanceHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.recurrent 9 | 10 | import com.kotlinnlp.simplednn.core.layers.LayerParameters 11 | import com.kotlinnlp.simplednn.core.layers.helpers.RelevanceHelper 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 13 | 14 | /** 15 | * The helper which calculates the relevance of the input of a recurrent [layer] respect of its output. 16 | * 17 | * @property layer the [RecurrentLayer] in which to calculate the input relevance 18 | */ 19 | internal abstract class RecurrentRelevanceHelper( 20 | override val layer: RecurrentLayer 21 | ) : RelevanceHelper(layer) { 22 | 23 | /** 24 | * Calculate the relevance of the output in the previous state respect to the current one and assign it to the output 25 | * array of the previous state. 26 | * 27 | * WARNING: the previous state must exist! 28 | * 29 | * @param contributions the contributions saved during the last forward 30 | */ 31 | abstract fun setRecurrentRelevance(contributions: LayerParameters) 32 | } 33 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/recurrent/deltarnn/DeltaRNNRelevanceSupport.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.recurrent.deltarnn 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.Shape 12 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 13 | 14 | /** 15 | * A support structure for the DeltaRNN, used to save temporary results during a forward and using them to calculate the 16 | * relevance later. 17 | * 18 | * @property outputSize the size of the output array of the layer 19 | */ 20 | internal data class DeltaRNNRelevanceSupport(val outputSize: Int) { 21 | 22 | /** 23 | * The contribution from the input to the d1 array, including half biases of the candidate. 24 | */ 25 | val d1Input = AugmentedArray(values = DenseNDArrayFactory.emptyArray(Shape(this.outputSize))) 26 | 27 | /** 28 | * The contribution from the previous state to the d1 array, including half biases of the candidate. 29 | */ 30 | val d1Rec = AugmentedArray(values = DenseNDArrayFactory.emptyArray(Shape(this.outputSize))) 31 | 32 | /** 33 | * The d2 array. 34 | */ 35 | val d2 = AugmentedArray(values = DenseNDArrayFactory.emptyArray(Shape(this.outputSize))) 36 | } 37 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/layers/models/recurrent/indrnn/IndRNNForwardHelper.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.layers.models.recurrent.indrnn 9 | 10 | import com.kotlinnlp.simplednn.core.layers.helpers.ForwardHelper 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | 13 | /** 14 | * The helper which executes the forward on a [layer]. 15 | * 16 | * @property layer the [IndRNNLayer] in which the forward is executed 17 | */ 18 | internal class IndRNNForwardHelper>( 19 | override val layer: IndRNNLayer 20 | ) : ForwardHelper(layer) { 21 | 22 | /** 23 | * Forward the input to the output combining it with the parameters. 24 | * 25 | * y = f(w (dot) x + wRec * yPrev + b) 26 | */ 27 | override fun forward() { 28 | 29 | // y = w (dot) x + b 30 | this.layer.outputArray.forward( 31 | w = this.layer.params.feedforwardUnit.weights.values, 32 | b = this.layer.params.feedforwardUnit.biases.values, 33 | x = this.layer.inputArray.values 34 | ) 35 | 36 | // y += wRec * yPrev 37 | this.layer.layersWindow.getPrevState()?.let { prevStateLayer -> 38 | 39 | val wRec = this.layer.params.recurrentWeights.values 40 | this.layer.outputArray.values.assignSum(wRec.prod(prevStateLayer.outputArray.values)) 41 | } 42 | 43 | this.layer.outputArray.activate() 44 | } 45 | } 46 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/optimizer/Alias.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.optimizer 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | 12 | typealias ParamsList = List 13 | typealias ParamsErrorsList = List> -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/optimizer/Extensions.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.core.optimizer 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.ParamsArray 11 | 12 | /** 13 | * @param param a parameters 14 | * 15 | * @return the first params errors referring to the given [param], or `null` if no errors refer to it. 16 | */ 17 | fun ParamsErrorsList.getErrorsOf(param: ParamsArray): ParamsArray.Errors<*>? = this.find { it.refParams === param} -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/core/optimizer/ScheduledUpdater.kt: -------------------------------------------------------------------------------- 1 | package com.kotlinnlp.simplednn.core.optimizer 2 | 3 | import com.kotlinnlp.simplednn.utils.scheduling.BatchScheduling 4 | import com.kotlinnlp.simplednn.utils.scheduling.EpochScheduling 5 | import com.kotlinnlp.simplednn.utils.scheduling.ExampleScheduling 6 | 7 | /** 8 | * Defines a module which optimizes the parameters of neural elements and is scheduled by training events. 9 | */ 10 | interface ScheduledUpdater : ExampleScheduling, BatchScheduling, EpochScheduling { 11 | 12 | /** 13 | * Update the parameters of the neural elements associated to this [ScheduledUpdater]. 14 | */ 15 | fun update() 16 | 17 | /** 18 | * Method to call every new epoch. 19 | */ 20 | override fun newEpoch() 21 | 22 | /** 23 | * Method to call every new batch. 24 | */ 25 | override fun newBatch() 26 | 27 | /** 28 | * Method to call every new example. 29 | */ 30 | override fun newExample() 31 | } 32 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/attention/han/ArrayListExtension.kt: -------------------------------------------------------------------------------- 1 | package com.kotlinnlp.simplednn.deeplearning.attention.han 2 | 3 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 4 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 5 | 6 | /** 7 | * @return a [HierarchySequence] from an array of [DenseNDArray]s 8 | */ 9 | fun >Array.toHierarchySequence(): HierarchySequence { 10 | return HierarchySequence(*this) 11 | } 12 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/attention/han/HANParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.attention.han 9 | 10 | import com.kotlinnlp.simplednn.core.layers.StackedLayersParameters 11 | import com.kotlinnlp.simplednn.deeplearning.attention.attentionnetwork.AttentionNetworkParameters 12 | import com.kotlinnlp.simplednn.deeplearning.birnn.BiRNNParameters 13 | import java.io.Serializable 14 | 15 | /** 16 | * The parameters of the Hierarchical Attention Networks. 17 | * 18 | * @property biRNNs a list containing the parameters of the BiRNNs of the HAN. 19 | * @property attentionNetworks a list containing the parameters of the AttentionNetworks of the HAN 20 | * @property outputStackedLayers the parameters of the output Feedforward network 21 | */ 22 | class HANParameters( 23 | val biRNNs: List, 24 | val attentionNetworks: List, 25 | val outputStackedLayers: StackedLayersParameters 26 | ) : Serializable { 27 | 28 | companion object { 29 | 30 | /** 31 | * Private val used to serialize the class (needed by Serializable) 32 | */ 33 | @Suppress("unused") 34 | private const val serialVersionUID: Long = 1L 35 | } 36 | } 37 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/attention/han/HierarchyGroup.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.attention.han 9 | 10 | /** 11 | * The HierarchyGroup represents a higher level of the input hierarchy of a [HANEncoder]. 12 | * 13 | * It contains a list of other [HierarchyItem]s as sub-levels. 14 | */ 15 | class HierarchyGroup(vararg groups: HierarchyItem) : HierarchyItem, ArrayList(groups.size) { 16 | 17 | init { 18 | groups.forEach { 19 | this.add(it) 20 | } 21 | } 22 | } 23 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/attention/han/HierarchyItem.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.attention.han 9 | 10 | /** 11 | * The HierarchyItem defines a generic item of the hierarchy which represent the input of an [HANEncoder]. 12 | * 13 | * An item could be a list of [HierarchySequence]s if it represents the lowest level of the hierarchy (the input of the 14 | * [HANEncoder]) or a list of [HierarchyGroup]s if it represent a higher level. 15 | */ 16 | interface HierarchyItem 17 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/attention/han/HierarchySequence.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.attention.han 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 12 | 13 | /** 14 | * The HierarchySequence represents the lowest level of the input hierarchy of a [HANEncoder]. 15 | * 16 | * It contains a list of [AugmentedArray]s as input sequence of the lowest level of a [HANEncoder]. 17 | */ 18 | class HierarchySequence>(vararg arrays: NDArrayType) 19 | : HierarchyItem, ArrayList(arrays.size) { 20 | 21 | init { 22 | arrays.forEach { 23 | this.add(it) 24 | } 25 | } 26 | } 27 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/attention/pointernetwork/PointerNetworkParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.attention.pointernetwork 9 | 10 | import com.kotlinnlp.simplednn.core.layers.StackedLayersParameters 11 | import com.kotlinnlp.simplednn.core.layers.models.attention.attentionmechanism.AttentionMechanismLayerParameters 12 | import java.io.Serializable 13 | 14 | /** 15 | * The parameters of the [PointerNetworkProcessor]. 16 | * 17 | * @property mergeParams the parameters of the merge network 18 | * @property attentionParams the parameters of the attention structure 19 | */ 20 | class PointerNetworkParameters( 21 | val mergeParams: StackedLayersParameters, 22 | val attentionParams: AttentionMechanismLayerParameters 23 | ) : Serializable { 24 | 25 | companion object { 26 | 27 | /** 28 | * Private val used to serialize the class (needed by Serializable) 29 | */ 30 | @Suppress("unused") 31 | private const val serialVersionUID: Long = 1L 32 | } 33 | } 34 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/birnn/BiRNNConfig.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.birnn 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFunction 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | import java.io.Serializable 13 | 14 | /** 15 | * The configuration of a BiRNN. 16 | * 17 | * @param connectionType the recurrent connection type of the BiRNN used to encode tokens 18 | * @param hiddenActivation the hidden activation function of the BiRNN used to encode tokens 19 | * @param numberOfLayers number of stacked BiRNNs (default 1) 20 | */ 21 | data class BiRNNConfig( 22 | val connectionType: LayerType.Connection, 23 | val hiddenActivation: ActivationFunction?, 24 | val numberOfLayers: Int = 1 25 | ) : Serializable { 26 | 27 | companion object { 28 | 29 | /** 30 | * Private val used to serialize the class (needed by Serializable). 31 | */ 32 | @Suppress("unused") 33 | private const val serialVersionUID: Long = 1L 34 | } 35 | 36 | /** 37 | * @return the string representation of this class 38 | */ 39 | override fun toString(): String = "%s - %s - %s".format( 40 | this.connectionType, 41 | if (this.hiddenActivation != null) this.hiddenActivation::class.simpleName else null, 42 | this.numberOfLayers 43 | ) 44 | } -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/birnn/BiRNNEncodersPool.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.birnn 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 11 | import com.kotlinnlp.utils.ItemsPool 12 | 13 | /** 14 | * A pool of [BiRNNEncoder]s which allows to allocate and release one when needed, without creating a new one. 15 | * It is useful to optimize the creation of new structures every time a new encoder is created. 16 | * 17 | * @param network the [BiRNN] which the encoders of the pool will work with 18 | * @param rnnDropout the probability of RNNs dropout (default 0.0) 19 | * @param mergeDropout the probability of output merge dropout (default 0.0) 20 | * @param propagateToInput whether to propagate the errors to the input during the backward 21 | */ 22 | class BiRNNEncodersPool>( 23 | private val network: BiRNN, 24 | private val rnnDropout: Double = 0.0, 25 | private val mergeDropout: Double = 0.0, 26 | private val propagateToInput: Boolean 27 | ) : ItemsPool>() { 28 | 29 | /** 30 | * The factory of a new [BiRNNEncoder]. 31 | * 32 | * @param id the unique id of the item to create 33 | * 34 | * @return a new [BiRNNEncoder] with the given [id] 35 | */ 36 | override fun itemFactory(id: Int) = BiRNNEncoder( 37 | network = this.network, 38 | rnnDropout = this.rnnDropout, 39 | mergeDropout = this.mergeDropout, 40 | propagateToInput = this.propagateToInput, 41 | id = id 42 | ) 43 | } 44 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/birnn/BiRNNParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.birnn 9 | 10 | import com.kotlinnlp.simplednn.core.layers.StackedLayersParameters 11 | import java.io.Serializable 12 | 13 | /** 14 | * The BiRNNParameters contains the parameter of its sub-networks (leftToRightNetwork, rightToLeftNetwork). 15 | * 16 | * @property leftToRight network parameters of the left-to-right recurrent neural network 17 | * @property rightToLeft network parameters of the right-to-left recurrent neural network 18 | * @property merge network parameters of the merge output network 19 | */ 20 | class BiRNNParameters( 21 | val leftToRight: StackedLayersParameters, 22 | val rightToLeft: StackedLayersParameters, 23 | val merge: StackedLayersParameters 24 | ) : Serializable { 25 | 26 | companion object { 27 | 28 | /** 29 | * Private val used to serialize the class (needed by Serializable) 30 | */ 31 | @Suppress("unused") 32 | private const val serialVersionUID: Long = 1L 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/birnn/BiRNNUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.birnn 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 11 | 12 | /** 13 | * BiRNNUtils contains functions which help to split and concatenate 14 | * the results of the different processors of the [BiRNNEncoder] 15 | */ 16 | object BiRNNUtils { 17 | 18 | /** 19 | * Split the [errors] in left-to-right and right-to-left errors 20 | * 21 | * @param errors the errors to split 22 | * 23 | * @return a (left-to-right, right-to-left) errors Pair 24 | */ 25 | fun splitErrors(errors: DenseNDArray) = Pair( 26 | errors.getRange(0, errors.length / 2), 27 | errors.getRange(errors.length / 2, errors.length)) 28 | 29 | /** 30 | * Sum the left-to-right and right-to-left errors 31 | * 32 | * @params leftToRightSequenceErrors 33 | * @params rightToLeftSequenceErrors 34 | * 35 | * @return the sum of the left-to-right and right-to-left errors 36 | */ 37 | fun sumBidirectionalErrors(leftToRightSequenceErrors: List, 38 | rightToLeftSequenceErrors: List): List { 39 | 40 | require(leftToRightSequenceErrors.size == rightToLeftSequenceErrors.size) 41 | 42 | return List( 43 | size = leftToRightSequenceErrors.size, 44 | init = { i -> 45 | leftToRightSequenceErrors[i].sum(rightToLeftSequenceErrors[leftToRightSequenceErrors.size - i - 1]) 46 | } 47 | ) 48 | } 49 | } 50 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/birnn/deepbirnn/DeepBiRNNParameters.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.birnn.deepbirnn 9 | 10 | import com.kotlinnlp.simplednn.deeplearning.birnn.BiRNNParameters 11 | import java.io.Serializable 12 | 13 | /** 14 | * The DeepBiRNNParameters contains the parameters of all its stacked BiRNN. 15 | * 16 | * @property paramsPerBiRNN an array of [BiRNNParameters] 17 | */ 18 | class DeepBiRNNParameters(val paramsPerBiRNN: List) : Serializable { 19 | 20 | companion object { 21 | 22 | /** 23 | * Private val used to serialize the class (needed by Serializable). 24 | */ 25 | @Suppress("unused") 26 | private const val serialVersionUID: Long = 1L 27 | } 28 | } 29 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/multitasknetwork/MultiTaskNetworkConfig.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.multitasknetwork 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.ActivationFunction 11 | import java.io.Serializable 12 | 13 | /** 14 | * The configuration of an output network of a [MultiTaskNetwork]. 15 | * 16 | * @property outputSize the size of the output layer 17 | * @property outputActivation the activation function of the output layer 18 | */ 19 | data class MultiTaskNetworkConfig( 20 | val outputSize: Int, 21 | val outputActivation: ActivationFunction? 22 | ) : Serializable { 23 | 24 | companion object { 25 | 26 | /** 27 | * Private val used to serialize the class (needed by Serializable). 28 | */ 29 | @Suppress("unused") 30 | private const val serialVersionUID: Long = 1L 31 | } 32 | } 33 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/deeplearning/multitasknetwork/MultitaskNetworksPool.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.deeplearning.multitasknetwork 9 | 10 | import com.kotlinnlp.simplednn.simplemath.ndarray.NDArray 11 | import com.kotlinnlp.utils.ItemsPool 12 | 13 | /** 14 | * A pool of [MultiTaskNetwork]s which allows to allocate and release one when needed, without creating a new one. 15 | * It is useful to optimize the creation of new structures every time a new network is created. 16 | * 17 | * @property model the model of the [MultiTaskNetwork]s of the pool 18 | * @param propagateToInput whether to propagate the errors to the input during the backward 19 | */ 20 | class MultitaskNetworksPool>( 21 | val model: MultiTaskNetworkModel, 22 | private val propagateToInput: Boolean 23 | ) : ItemsPool>() { 24 | 25 | /** 26 | * The factory of a new [MultiTaskNetwork]. 27 | * 28 | * @param id the unique id of the item to create 29 | * 30 | * @return a new [MultiTaskNetwork] with the given [id] 31 | */ 32 | override fun itemFactory(id: Int) = MultiTaskNetwork( 33 | model = this.model, 34 | propagateToInput = this.propagateToInput, 35 | id = id 36 | ) 37 | } 38 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/helpers/Counter.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2019-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.helpers 9 | 10 | /** 11 | * A counter of values used during the training. 12 | */ 13 | internal data class Counter( 14 | var epoch: Int = 0, 15 | var batch: Int = 0, 16 | var exampleCount: Int = 0, 17 | var bestAccuracy: Double = 0.0 18 | ) { 19 | 20 | /** 21 | * Reset all the counters. 22 | */ 23 | fun reset() { 24 | this.epoch = 0 25 | this.batch = 0 26 | this.exampleCount = 0 27 | this.bestAccuracy = 0.0 28 | } 29 | 30 | /** 31 | * Method to call every new epoch. 32 | * It increments the epochs counter and it sets the batches and the examples counters to zero. 33 | */ 34 | fun newEpoch() { 35 | this.epoch += 1 36 | this.batch = 0 37 | this.exampleCount = 0 38 | } 39 | 40 | /** 41 | * Method to call every new batch. 42 | * It increments the batches counter and it sets the examples counter to zero. 43 | */ 44 | fun newBatch() { 45 | this.batch += 1 46 | this.exampleCount = 0 47 | } 48 | 49 | /** 50 | * Method to call every new example. 51 | * It increments the examples counter. 52 | */ 53 | fun newExample() { 54 | this.exampleCount += 1 55 | } 56 | } 57 | 58 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/helpers/Evaluator.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2019-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.helpers 9 | 10 | import com.kotlinnlp.utils.progressindicator.ProgressIndicatorBar 11 | 12 | /** 13 | * A helper which evaluates a neural model with a list of examples. 14 | * 15 | * @param examples a list of examples to validate the model with 16 | * @param verbose whether to print info about the validation progress (default = true) 17 | */ 18 | abstract class Evaluator( 19 | internal val examples: Iterable, 20 | private val verbose: Boolean = true 21 | ) { 22 | 23 | /** 24 | * The evaluation statistics. 25 | */ 26 | protected abstract val stats: StatsType 27 | 28 | /** 29 | * Evaluate examples. 30 | * 31 | * @return the validation statistics 32 | */ 33 | open fun evaluate(): StatsType { 34 | 35 | val progress: ProgressIndicatorBar? = 36 | if (this.examples is Collection<*>) ProgressIndicatorBar(this.examples.size) else null 37 | 38 | this.stats.reset() 39 | 40 | this.examples.forEach { 41 | 42 | this.evaluate(it) 43 | 44 | if (this.verbose) progress?.tick() 45 | } 46 | 47 | return this.stats 48 | } 49 | 50 | /** 51 | * Evaluate the model with a single example. 52 | * 53 | * @param example the example to validate the model with 54 | */ 55 | protected abstract fun evaluate(example: ExampleType) 56 | } 57 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/helpers/Statistics.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2019-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.helpers 9 | 10 | import com.kotlinnlp.utils.stats.MetricCounter 11 | import kotlin.properties.Delegates 12 | 13 | /** 14 | * Validation statistics. 15 | */ 16 | abstract class Statistics { 17 | 18 | /** 19 | * Simple statistics with a simple metric of accuracy. 20 | */ 21 | class Simple : Statistics() { 22 | 23 | /** 24 | * A metric counter. 25 | */ 26 | val metric = MetricCounter() 27 | 28 | /** 29 | * Reset the metrics. 30 | */ 31 | override fun reset() { 32 | 33 | this.accuracy = 0.0 34 | 35 | this.metric.reset() 36 | } 37 | 38 | override fun toString(): String = "Accuracy: %.2f%%".format(100.0 * this.accuracy) 39 | } 40 | 41 | /** 42 | * The overall accuracy of the model validated, in the range [0.0, 1.0]. 43 | */ 44 | var accuracy: Double by Delegates.observable(0.0) { _, _, newValue -> require(newValue in 0.0 .. 1.0) } 45 | 46 | /** 47 | * Reset the metrics. 48 | */ 49 | abstract fun reset() 50 | } 51 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/simplemath/ndarray/NDArrayMask.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.simplemath.ndarray 9 | 10 | /** 11 | * A mask for [NDArray]s. 12 | * 13 | * @property dim1 an array of Int which contains the indices of the active values in the first dimension 14 | * @property dim2 an array of Int which contains the indices of the active values in the second dimension 15 | */ 16 | class NDArrayMask(val dim1: IntArray, val dim2: IntArray) : Iterable { 17 | 18 | init { 19 | require(dim1.size == dim2.size) 20 | } 21 | 22 | /** 23 | * The number of active values. 24 | */ 25 | val size = dim1.size 26 | 27 | /** 28 | * 29 | */ 30 | private inner class NDArrayMaskIterator : Iterator { 31 | 32 | /** 33 | * 34 | */ 35 | private var curIndex = 0 36 | 37 | /** 38 | * 39 | */ 40 | override fun hasNext(): Boolean = curIndex < this@NDArrayMask.dim1.size 41 | 42 | /** 43 | * 44 | */ 45 | override fun next(): Indices { 46 | 47 | val next = Pair(this@NDArrayMask.dim1[curIndex], this@NDArrayMask.dim2[curIndex]) 48 | 49 | this.curIndex++ 50 | 51 | return next 52 | } 53 | } 54 | 55 | /** 56 | * 57 | */ 58 | override fun iterator(): Iterator = this.NDArrayMaskIterator() 59 | } 60 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/simplemath/ndarray/Shape.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.simplemath.ndarray 9 | 10 | import java.io.Serializable 11 | 12 | /** 13 | * The shape of an bi-dimensional NDArray containing its dimensions (first and second). 14 | */ 15 | data class Shape(val dim1: Int, val dim2: Int = 1) : Serializable { 16 | 17 | companion object { 18 | 19 | /** 20 | * Private val used to serialize the class (needed by Serializable). 21 | */ 22 | @Suppress("unused") 23 | private const val serialVersionUID: Long = 1L 24 | } 25 | 26 | /** 27 | * The inverse [Shape] of this. 28 | */ 29 | val inverse: Shape get() = Shape(this.dim2, this.dim1) 30 | 31 | /** 32 | * @param other any object 33 | * 34 | * @return a Boolean indicating if this [Shape] is equal to the given [other] object 35 | */ 36 | override fun equals(other: Any?): Boolean { 37 | return (other is Shape && other.dim1 == this.dim1 && other.dim2 == this.dim2) 38 | } 39 | 40 | /** 41 | * @return the hash code representation of this [Shape] 42 | */ 43 | override fun hashCode(): Int { 44 | 45 | var hash = 7 46 | hash = 83 * hash + this.dim1 47 | hash = 83 * hash + this.dim2 48 | 49 | return hash 50 | } 51 | } 52 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/simplemath/ndarray/TypeAliases.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.simplemath.ndarray 9 | 10 | internal typealias Indices = Pair 11 | internal typealias SparseEntry = Pair 12 | internal typealias VectorIndices = MutableList 13 | internal typealias VectorsMap = MutableMap 14 | internal typealias VectorsMapEntry = MutableMap.MutableEntry 15 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/utils/scheduling/BatchScheduling.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.utils.scheduling 9 | 10 | /** 11 | * BatchScheduling beats the time of each batch 12 | */ 13 | interface BatchScheduling { 14 | 15 | /** 16 | * Beat the occurrence of a new batch 17 | */ 18 | fun newBatch() 19 | } 20 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/utils/scheduling/EpochScheduling.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.utils.scheduling 9 | 10 | /** 11 | * EpochScheduling beats the time of each epoch 12 | */ 13 | interface EpochScheduling { 14 | 15 | /** 16 | * Beat the occurrence of a new epoch 17 | */ 18 | fun newEpoch() 19 | } 20 | -------------------------------------------------------------------------------- /src/main/kotlin/com/kotlinnlp/simplednn/utils/scheduling/ExampleScheduling.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package com.kotlinnlp.simplednn.utils.scheduling 9 | 10 | /** 11 | * ExampleScheduling beats the time of each example 12 | */ 13 | interface ExampleScheduling { 14 | 15 | /** 16 | * Beat the occurrence of a new example 17 | */ 18 | fun newExample() 19 | } 20 | -------------------------------------------------------------------------------- /src/test/kotlin/core/attention/AttentionParametersSpec.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.attention 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.initializers.RandomInitializer 11 | import com.kotlinnlp.simplednn.core.functionalities.randomgenerators.RandomGenerator 12 | import com.kotlinnlp.simplednn.core.layers.models.attention.attentionmechanism.AttentionMechanismLayerParameters 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 14 | import com.nhaarman.mockito_kotlin.mock 15 | import com.nhaarman.mockito_kotlin.whenever 16 | import org.spekframework.spek2.Spek 17 | import org.spekframework.spek2.style.specification.describe 18 | import kotlin.test.assertTrue 19 | 20 | /** 21 | * 22 | */ 23 | class AttentionParametersSpec : Spek({ 24 | 25 | describe("an AttentionParameters") { 26 | 27 | context("initialization") { 28 | 29 | val randomGenerator = mock() 30 | var i = 0.0 31 | whenever(randomGenerator.next()).thenAnswer { i++ } 32 | 33 | val params = AttentionMechanismLayerParameters(inputSize = 2, weightsInitializer = RandomInitializer(randomGenerator)) 34 | 35 | it("should have a context vector with the expected initialized values") { 36 | assertTrue { 37 | params.contextVector.values.equals( 38 | DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 1.0)), 39 | tolerance = 1.0e-06 40 | ) 41 | } 42 | } 43 | } 44 | } 45 | }) 46 | -------------------------------------------------------------------------------- /src/test/kotlin/core/functionalities/activations/HardSigmoidSpec.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.functionalities.activations 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.HardSigmoid 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | import org.spekframework.spek2.Spek 13 | import org.spekframework.spek2.style.specification.describe 14 | import kotlin.test.assertTrue 15 | 16 | /** 17 | * 18 | */ 19 | 20 | class HardSigmoidSpec : Spek({ 21 | 22 | describe("an HardSigmoid activation function") { 23 | 24 | val activationFunction = HardSigmoid 25 | val array = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.1, 0.01, -0.1, -0.01, 1.0, 10.0, -1.0, -10.0)) 26 | val activatedArray = activationFunction.f(array) 27 | 28 | context("f") { 29 | 30 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 31 | 0.5, 0.52, 0.502, 0.48, 0.498, 0.7, 1.0, 0.3, 0.0 32 | )) 33 | 34 | it("should return the expected values") { 35 | assertTrue { expectedArray.equals(activatedArray, tolerance = 1.0e-08) } 36 | } 37 | } 38 | 39 | context("dfOptimized") { 40 | 41 | val dfArray = activationFunction.dfOptimized(activatedArray) 42 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 43 | 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.0, 0.2, 0.0 44 | )) 45 | 46 | it("should return the expected values") { 47 | assertTrue { expectedArray.equals(dfArray, tolerance = 1.0e-08) } 48 | } 49 | } 50 | } 51 | }) 52 | -------------------------------------------------------------------------------- /src/test/kotlin/core/functionalities/activations/HardTanhSpec.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.functionalities.activations 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.HardTanh 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | import org.spekframework.spek2.Spek 13 | import org.spekframework.spek2.style.specification.describe 14 | import kotlin.test.assertTrue 15 | 16 | /** 17 | * 18 | */ 19 | 20 | class HardTanhSpec: Spek({ 21 | 22 | describe("an HardTanh activation function") { 23 | 24 | val activationFunction = HardTanh 25 | val array = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.1, 0.01, -0.1, -0.01, 1.0, 10.0, -1.0, -10.0)) 26 | val activatedArray = activationFunction.f(array) 27 | 28 | context("f") { 29 | 30 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 31 | 0.0, 0.1, 0.01, -0.1, -0.01, 1.0, 1.0, -1.0, -1.0 32 | )) 33 | 34 | it("should return the expected values") { 35 | assertTrue { expectedArray.equals(activatedArray, tolerance = 1.0e-08) } 36 | } 37 | } 38 | 39 | context("dfOptimized") { 40 | 41 | val dfArray = activationFunction.dfOptimized(activatedArray) 42 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 43 | 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 0.0, 0.0 44 | )) 45 | 46 | it("should return the expected values") { 47 | assertTrue { expectedArray.equals(dfArray, tolerance = 1.0e-08) } 48 | } 49 | } 50 | } 51 | }) 52 | -------------------------------------------------------------------------------- /src/test/kotlin/core/functionalities/activations/LeakyRELUSpec.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.functionalities.activations 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.LeakyRELU 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | import org.spekframework.spek2.Spek 13 | import org.spekframework.spek2.style.specification.describe 14 | import kotlin.test.assertTrue 15 | 16 | /** 17 | * 18 | */ 19 | 20 | class LeakyRELUSpec: Spek({ 21 | 22 | describe("a LeakyReLU activation function") { 23 | 24 | val activationFunction = LeakyRELU() 25 | val array = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.1, 0.01, -0.1, -0.01, 1.0, 10.0, -1.0, -10.0)) 26 | val activatedArray = activationFunction.f(array) 27 | 28 | context("f") { 29 | 30 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 31 | 0.0, 0.1, 0.01, -0.001, -0.0001, 1.0, 10.0, -0.01, -0.1 32 | )) 33 | 34 | it("should return the expected values") { 35 | assertTrue { expectedArray.equals(activatedArray, tolerance = 1.0e-08) } 36 | } 37 | } 38 | 39 | context("dfOptimized") { 40 | 41 | val dfArray = activationFunction.dfOptimized(activatedArray) 42 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 43 | 0.01, 1.0, 1.0, 0.01, 0.01, 1.0, 1.0, 0.01, 0.01 44 | )) 45 | 46 | it("should return the expected values") { 47 | assertTrue { expectedArray.equals(dfArray, tolerance = 1.0e-08) } 48 | } 49 | } 50 | } 51 | }) 52 | -------------------------------------------------------------------------------- /src/test/kotlin/core/functionalities/activations/ReLUSpec.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.functionalities.activations 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.ReLU 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | import org.spekframework.spek2.Spek 13 | import org.spekframework.spek2.style.specification.describe 14 | import kotlin.test.assertTrue 15 | 16 | /** 17 | * 18 | */ 19 | 20 | class ReLUSpec : Spek({ 21 | 22 | describe("a ReLU activation function") { 23 | 24 | val activationFunction = ReLU 25 | val array = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.1, 0.01, -0.1, -0.01, 1.0, 10.0, -1.0, -10.0)) 26 | val activatedArray = activationFunction.f(array) 27 | 28 | context("f") { 29 | 30 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 31 | 0.0, 0.1, 0.01, 0.0, 0.0, 1.0, 10.0, 0.0, 0.0 32 | )) 33 | 34 | it("should return the expected values") { 35 | assertTrue { expectedArray.equals(activatedArray, tolerance = 1.0e-08) } 36 | } 37 | } 38 | 39 | context("dfOptimized") { 40 | 41 | val dfArray = activationFunction.dfOptimized(activatedArray) 42 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 43 | 0.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0 44 | )) 45 | 46 | it("should return the expected values") { 47 | assertTrue { expectedArray.equals(dfArray, tolerance = 1.0e-08) } 48 | } 49 | } 50 | } 51 | }) 52 | -------------------------------------------------------------------------------- /src/test/kotlin/core/functionalities/activations/SoftshrinkSpec.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.functionalities.activations 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.Softshrink 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | import org.spekframework.spek2.Spek 13 | import org.spekframework.spek2.style.specification.describe 14 | import kotlin.test.assertTrue 15 | 16 | /** 17 | * 18 | */ 19 | 20 | class SoftshrinkSpec : Spek({ 21 | 22 | describe("a Softshrink activation function") { 23 | 24 | val activationFunction = Softshrink() 25 | val array = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.1, 0.01, -0.1, -0.01, 1.0, 10.0, -1.0, -10.0)) 26 | val activatedArray = activationFunction.f(array) 27 | 28 | context("f") { 29 | 30 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 31 | 0.0, 0.0, 0.0, 0.0, 0.0, 0.5, 9.5, -0.5, -9.5 32 | )) 33 | 34 | it("should return the expected values") { 35 | assertTrue { expectedArray.equals(activatedArray, tolerance = 1.0e-08) } 36 | } 37 | } 38 | 39 | context("dfOptimized") { 40 | 41 | val dfArray = activationFunction.dfOptimized(activatedArray) 42 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 43 | 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0 44 | )) 45 | 46 | it("should return the expected values") { 47 | assertTrue { expectedArray.equals(dfArray, tolerance = 1.0e-08) } 48 | } 49 | } 50 | } 51 | }) 52 | -------------------------------------------------------------------------------- /src/test/kotlin/core/functionalities/activations/SoftsignSpec.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.functionalities.activations 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.Softsign 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | import org.spekframework.spek2.Spek 13 | import org.spekframework.spek2.style.specification.describe 14 | import kotlin.test.assertTrue 15 | 16 | /** 17 | * 18 | */ 19 | 20 | class SoftsignSpec : Spek({ 21 | 22 | describe("a Softsign activation function") { 23 | 24 | val activationFunction = Softsign 25 | val array = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.1, 0.01, -0.1, -0.01, 1.0, 10.0, -1.0, -10.0)) 26 | val activatedArray = activationFunction.f(array) 27 | 28 | context("f") { 29 | 30 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 31 | 0.0, 0.09090909, 0.00990099, -0.09090909, -0.00990099, 0.5, 0.90909090, -0.5, -0.9090909 32 | )) 33 | 34 | it("should return the expected values") { 35 | assertTrue { expectedArray.equals(activatedArray, tolerance = 1.0e-08) } 36 | } 37 | } 38 | 39 | context("dfOptimized") { 40 | 41 | val dfArray = activationFunction.dfOptimized(activatedArray) 42 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 43 | 1.0, 0.82644628, 0.98029605, 0.82644628, 0.98029605, 0.25, 0.00826446, 0.25, 0.00826446 44 | )) 45 | 46 | it("should return the expected values") { 47 | assertTrue { expectedArray.equals(dfArray, tolerance = 1.0e-08) } 48 | } 49 | } 50 | } 51 | }) 52 | -------------------------------------------------------------------------------- /src/test/kotlin/core/functionalities/activations/TanhSpec.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.functionalities.activations 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.Tanh 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | import org.spekframework.spek2.Spek 13 | import org.spekframework.spek2.style.specification.describe 14 | import kotlin.test.assertTrue 15 | 16 | /** 17 | * 18 | */ 19 | 20 | class TanhSpec : Spek({ 21 | 22 | describe("a Tanh activation function") { 23 | 24 | val activationFunction = Tanh 25 | val array = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.1, 0.01, -0.1, -0.01, 1.0, 10.0, -1.0, -10.0)) 26 | val activatedArray = activationFunction.f(array) 27 | 28 | context("f") { 29 | 30 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 31 | 0.0, 0.099668, 0.00999967, -0.099668, -0.00999967, 0.76159416, 1.0, -0.76159416, -1.0 32 | )) 33 | 34 | it("should return the expected values") { 35 | assertTrue { expectedArray.equals(activatedArray, tolerance = 1.0e-08) } 36 | } 37 | } 38 | 39 | context("dfOptimized") { 40 | 41 | val dfArray = activationFunction.dfOptimized(activatedArray) 42 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 43 | 1.0, 0.99006629, 0.99990001, 0.99006629, 0.999900007, 0.41997434, 8.2e-09, 0.41997434, 8.2e-09 44 | )) 45 | 46 | it("should return the expected values") { 47 | assertTrue { expectedArray.equals(dfArray, tolerance = 1.0e-08) } 48 | } 49 | } 50 | } 51 | }) 52 | -------------------------------------------------------------------------------- /src/test/kotlin/core/functionalities/activations/ThresholdSpec.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.functionalities.activations 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.activations.Threshold 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | import org.spekframework.spek2.Spek 13 | import org.spekframework.spek2.style.specification.describe 14 | import kotlin.test.assertTrue 15 | 16 | /** 17 | * 18 | */ 19 | 20 | class ThresholdSpec : Spek({ 21 | 22 | describe("a Threshold activation function") { 23 | 24 | val activationFunction = Threshold() 25 | val array = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.1, 0.01, -0.1, -0.01, 1.0, 10.0, -1.0, -10.0)) 26 | val activatedArray = activationFunction.f(array) 27 | 28 | context("f") { 29 | 30 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 31 | 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 10.0, 0.0, 0.0 32 | )) 33 | 34 | it("should return the expected values") { 35 | assertTrue { expectedArray.equals(activatedArray, tolerance = 1.0e-08) } 36 | } 37 | } 38 | 39 | context("dfOptimized") { 40 | 41 | val dfArray = activationFunction.dfOptimized(activatedArray) 42 | val expectedArray = DenseNDArrayFactory.arrayOf(doubleArrayOf( 43 | 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 0.0 44 | )) 45 | 46 | it("should return the expected values") { 47 | assertTrue { expectedArray.equals(dfArray, tolerance = 1.0e-08) } 48 | } 49 | } 50 | } 51 | }) 52 | -------------------------------------------------------------------------------- /src/test/kotlin/core/functionalities/losses/MulticlassMSECalculatorSpec.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.functionalities.losses 9 | 10 | import com.kotlinnlp.simplednn.core.functionalities.losses.MulticlassMSECalculator 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | import org.spekframework.spek2.Spek 13 | import org.spekframework.spek2.style.specification.describe 14 | import kotlin.test.assertTrue 15 | 16 | /** 17 | * 18 | */ 19 | class MulticlassMSECalculatorSpec : Spek({ 20 | 21 | describe("a MulticlassMSECalculator") { 22 | 23 | val outputValues = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.7, 0.2, 0.1)) 24 | val goldValues = DenseNDArrayFactory.arrayOf(doubleArrayOf(1.0, 0.0, 0.0, 0.0)) 25 | 26 | context("calculateErrors") { 27 | val errors = MulticlassMSECalculator.calculateErrors(outputValues, goldValues) 28 | 29 | it("should calculate the expected errors") { 30 | assertTrue(DenseNDArrayFactory.arrayOf(doubleArrayOf(-1.0, 0.7, 0.2, 0.1)).equals(errors)) 31 | } 32 | } 33 | } 34 | }) 35 | -------------------------------------------------------------------------------- /src/test/kotlin/core/layers/feedforward/norm/NormLayerStructureUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2020-present Simone Cangialosi. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.layers.feedforward.norm 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | import com.kotlinnlp.simplednn.core.layers.models.feedforward.norm.NormLayer 13 | import com.kotlinnlp.simplednn.core.layers.models.feedforward.norm.NormLayerParameters 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 15 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 16 | 17 | /** 18 | * 19 | */ 20 | internal object NormLayerStructureUtils { 21 | 22 | /** 23 | * 24 | */ 25 | fun buildLayer(): NormLayer = NormLayer( 26 | inputArray = AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.4, 0.8, -0.7, -0.5))), 27 | outputArray = AugmentedArray.zeros(size = 4), 28 | inputType = LayerType.Input.Dense, 29 | params = buildParams()) 30 | 31 | /** 32 | * 33 | */ 34 | fun buildParams() = NormLayerParameters(inputSize = 4).apply { 35 | g.values.assignValues(DenseNDArrayFactory.arrayOf(doubleArrayOf(0.4, 0.0, -0.3, 0.8))) 36 | b.values.assignValues(DenseNDArrayFactory.arrayOf(doubleArrayOf(0.9, 0.2, -0.9, 0.2))) 37 | } 38 | 39 | /** 40 | * 41 | */ 42 | fun getOutputErrors() = DenseNDArrayFactory.arrayOf(doubleArrayOf(-1.0, -0.2, 0.4, 0.6)) 43 | } 44 | -------------------------------------------------------------------------------- /src/test/kotlin/core/layers/merge/avg/AvgLayerUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.layers.merge.avg 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | import com.kotlinnlp.simplednn.core.layers.models.merge.avg.AvgLayerParameters 13 | import com.kotlinnlp.simplednn.core.layers.models.merge.avg.AvgLayer 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 15 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 16 | 17 | /** 18 | * 19 | */ 20 | internal object AvgLayerUtils { 21 | 22 | /** 23 | * 24 | */ 25 | fun buildLayer(): AvgLayer = AvgLayer( 26 | inputArrays = listOf( 27 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.9, 0.9, 0.6))), 28 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.5, -0.5))), 29 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.7, -0.7, 0.8))), 30 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.5, -0.4, -0.8))) 31 | ), 32 | inputType = LayerType.Input.Dense, 33 | outputArray = AugmentedArray(size = 3), 34 | params = AvgLayerParameters(inputSize = 3, nInputs = 4) 35 | ) 36 | 37 | /** 38 | * 39 | */ 40 | fun getOutputErrors() = DenseNDArrayFactory.arrayOf(doubleArrayOf(-1.0, -0.2, 0.4)) 41 | } 42 | -------------------------------------------------------------------------------- /src/test/kotlin/core/layers/merge/concat/ConcatLayerUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.layers.merge.concat 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | import com.kotlinnlp.simplednn.core.layers.models.merge.concat.ConcatLayerParameters 13 | import com.kotlinnlp.simplednn.core.layers.models.merge.concat.ConcatLayer 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 15 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 16 | 17 | /** 18 | * 19 | */ 20 | internal object ConcatLayerUtils { 21 | 22 | /** 23 | * 24 | */ 25 | fun buildLayer(): ConcatLayer = ConcatLayer( 26 | inputArrays = listOf( 27 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.9, 0.9, 0.6, 0.1))), 28 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.5))), 29 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.7, -0.7, 0.8))) 30 | ), 31 | inputType = LayerType.Input.Dense, 32 | outputArray = AugmentedArray(size = 9), 33 | params = ConcatLayerParameters(inputsSize = listOf(4, 2, 3)) 34 | ) 35 | 36 | /** 37 | * 38 | */ 39 | fun getOutputErrors() = DenseNDArrayFactory.arrayOf(doubleArrayOf(-1.0, -0.2, 0.4, -0.2, 0.0, -0.7, 0.2, -0.1, -0.7)) 40 | } 41 | -------------------------------------------------------------------------------- /src/test/kotlin/core/layers/merge/cosinesimilarity/CosineLayerUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.layers.merge.cosinesimilarity 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.core.layers.models.merge.cosinesimilarity.CosineLayer 12 | import com.kotlinnlp.simplednn.core.layers.models.merge.cosinesimilarity.CosineLayerParameters 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 14 | 15 | /** 16 | * 17 | */ 18 | internal object CosineLayerUtils { 19 | 20 | /** 21 | * 22 | */ 23 | fun buildLayer(): CosineLayer = CosineLayer( 24 | inputArray1 = AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.7, -0.7, 0.8, 0.6))), 25 | inputArray2 = AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.5, -0.4, 0.8, -0.7))), 26 | params = CosineLayerParameters(inputSize = 4)) 27 | 28 | /** 29 | * 30 | */ 31 | fun getOutputErrors() = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.95)) 32 | } 33 | -------------------------------------------------------------------------------- /src/test/kotlin/core/layers/merge/distance/DistanceLayerUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.layers.merge.distance 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.core.layers.models.merge.distance.DistanceLayer 12 | import com.kotlinnlp.simplednn.core.layers.models.merge.distance.DistanceLayerParameters 13 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 14 | 15 | /** 16 | * 17 | */ 18 | internal object DistanceLayerUtils { 19 | 20 | /** 21 | * 22 | */ 23 | fun buildLayer(): DistanceLayer = DistanceLayer( 24 | inputArray1 = AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.7, -0.7, 0.8, 0.6))), 25 | inputArray2 = AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.5, -0.4, 0.8, -0.7))), 26 | params = DistanceLayerParameters(inputSize = 4)) 27 | 28 | /** 29 | * 30 | */ 31 | fun getOutputErrors() = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.8)) 32 | } 33 | -------------------------------------------------------------------------------- /src/test/kotlin/core/layers/merge/product/ProductLayerUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.layers.merge.product 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | import com.kotlinnlp.simplednn.core.layers.models.merge.product.ProductLayerParameters 13 | import com.kotlinnlp.simplednn.core.layers.models.merge.product.ProductLayer 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 15 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 16 | 17 | /** 18 | * 19 | */ 20 | internal object ProductLayerUtils { 21 | 22 | /** 23 | * 24 | */ 25 | fun buildLayer(): ProductLayer = ProductLayer( 26 | inputArrays = listOf( 27 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.9, 0.9, 0.6))), 28 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.5, -0.5))), 29 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.7, -0.7, 0.8))), 30 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.5, -0.4, -0.8))), 31 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.5, -0.9, -0.5))) 32 | ), 33 | inputType = LayerType.Input.Dense, 34 | outputArray = AugmentedArray(size = 3), 35 | params = ProductLayerParameters(inputSize = 3, nInputs = 5) 36 | ) 37 | 38 | /** 39 | * 40 | */ 41 | fun getOutputErrors() = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.7, -0.5, -0.8)) 42 | } 43 | -------------------------------------------------------------------------------- /src/test/kotlin/core/layers/merge/sub/SubLayerUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.layers.merge.sub 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | import com.kotlinnlp.simplednn.core.layers.models.merge.sub.SubLayer 13 | import com.kotlinnlp.simplednn.core.layers.models.merge.sub.SubLayerParameters 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 15 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 16 | 17 | /** 18 | * 19 | */ 20 | internal object SubLayerUtils { 21 | 22 | /** 23 | * 24 | */ 25 | fun buildLayer(): SubLayer = SubLayer( 26 | inputArray1 = AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.4, 0.4, -0.3, 0.2))), 27 | inputArray2 = AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.8, 0.2, -0.6, 0.9))), 28 | inputType = LayerType.Input.Dense, 29 | outputArray = AugmentedArray(size = 4), 30 | params = SubLayerParameters(inputSize = 4)) 31 | 32 | /** 33 | * 34 | */ 35 | fun getOutputErrors() = DenseNDArrayFactory.arrayOf(doubleArrayOf(-1.0, -0.2, 0.4, 0.0)) 36 | } 37 | -------------------------------------------------------------------------------- /src/test/kotlin/core/layers/merge/sum/SumLayerUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.layers.merge.sum 9 | 10 | import com.kotlinnlp.simplednn.core.arrays.AugmentedArray 11 | import com.kotlinnlp.simplednn.core.layers.LayerType 12 | import com.kotlinnlp.simplednn.core.layers.models.merge.sum.SumLayerParameters 13 | import com.kotlinnlp.simplednn.core.layers.models.merge.sum.SumLayer 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 15 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 16 | 17 | /** 18 | * 19 | */ 20 | internal object SumLayerUtils { 21 | 22 | /** 23 | * 24 | */ 25 | fun buildLayer(): SumLayer = SumLayer( 26 | inputArrays = listOf( 27 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.9, 0.9, 0.6))), 28 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.0, 0.5, -0.5))), 29 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.7, -0.7, 0.8))), 30 | AugmentedArray(values = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.5, -0.4, -0.8))) 31 | ), 32 | inputType = LayerType.Input.Dense, 33 | outputArray = AugmentedArray(size = 3), 34 | params = SumLayerParameters(inputSize = 3, nInputs = 4) 35 | ) 36 | 37 | /** 38 | * 39 | */ 40 | fun getOutputErrors() = DenseNDArrayFactory.arrayOf(doubleArrayOf(-1.0, -0.2, 0.4)) 41 | } 42 | -------------------------------------------------------------------------------- /src/test/kotlin/core/neuralnetwork/utils/FeedforwardNetworkStructureUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.neuralnetwork.utils 9 | 10 | import com.kotlinnlp.simplednn.core.layers.LayerInterface 11 | import com.kotlinnlp.simplednn.core.layers.models.feedforward.simple.FeedforwardLayerParameters 12 | import com.kotlinnlp.simplednn.core.layers.StackedLayersParameters 13 | import core.layers.feedforward.simple.FeedforwardLayerStructureUtils 14 | 15 | 16 | /** 17 | * 18 | */ 19 | object FeedforwardNetworkStructureUtils { 20 | 21 | fun buildParams(layersConfiguration: List): StackedLayersParameters { 22 | 23 | val params = StackedLayersParameters(layersConfiguration) 24 | val inputParams = (params.paramsPerLayer[0] as FeedforwardLayerParameters) 25 | val outputParams = (params.paramsPerLayer[1] as FeedforwardLayerParameters) 26 | 27 | inputParams.unit.weights.values.assignValues(FeedforwardLayerStructureUtils.getParams45().unit.weights.values) 28 | inputParams.unit.biases.values.assignValues(FeedforwardLayerStructureUtils.getParams45().unit.biases.values) 29 | outputParams.unit.weights.values.assignValues(FeedforwardLayerStructureUtils.getParams53().unit.weights.values) 30 | outputParams.unit.biases.values.assignValues(FeedforwardLayerStructureUtils.getParams53().unit.biases.values) 31 | 32 | return params 33 | } 34 | } 35 | -------------------------------------------------------------------------------- /src/test/kotlin/core/optimizer/ParamsErrorsAccumulatorUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.optimizer 9 | 10 | import com.kotlinnlp.simplednn.core.layers.models.feedforward.simple.FeedforwardLayerParameters 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | 13 | /** 14 | * 15 | */ 16 | object ParamsErrorsAccumulatorUtils { 17 | 18 | /** 19 | * 20 | */ 21 | fun buildEmptyParams() = FeedforwardLayerParameters(inputSize = 4, outputSize = 2).also { 22 | 23 | it.unit.weights.values.assignValues(DenseNDArrayFactory.arrayOf(listOf( 24 | doubleArrayOf(0.0, 0.0, 0.0, 0.0), 25 | doubleArrayOf(0.0, 0.0, 0.0, 0.0) 26 | ))) 27 | 28 | it.unit.biases.values.assignValues(DenseNDArrayFactory.arrayOf( 29 | doubleArrayOf(0.0, 0.0) 30 | )) 31 | } 32 | 33 | /** 34 | * 35 | */ 36 | fun buildWeightsErrorsValues1() = DenseNDArrayFactory.arrayOf(listOf( 37 | doubleArrayOf(0.3, 0.4, 0.2, -0.2), 38 | doubleArrayOf(0.2, -0.1, 0.1, 0.6) 39 | )) 40 | 41 | /** 42 | * 43 | */ 44 | fun buildBiasesErrorsValues1() = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.3, -0.4)) 45 | 46 | /** 47 | * 48 | */ 49 | fun buildWeightsErrorsValues2() = DenseNDArrayFactory.arrayOf(listOf( 50 | doubleArrayOf(0.7, -0.8, 0.1, -0.6), 51 | doubleArrayOf(0.8, 0.6, -0.9, -0.2) 52 | )) 53 | 54 | /** 55 | * 56 | */ 57 | fun buildBiasesErrorsValues2() = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.9, 0.1)) 58 | } 59 | -------------------------------------------------------------------------------- /src/test/kotlin/core/optimizer/ParamsOptimizerUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package core.optimizer 9 | 10 | import com.kotlinnlp.simplednn.core.layers.models.feedforward.simple.FeedforwardLayerParameters 11 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArrayFactory 12 | 13 | /** 14 | * 15 | */ 16 | object ParamsOptimizerUtils { 17 | 18 | /** 19 | * 20 | */ 21 | fun buildParams() = FeedforwardLayerParameters(inputSize = 4, outputSize = 2).also { 22 | 23 | it.unit.weights.values.assignValues(DenseNDArrayFactory.arrayOf(listOf( 24 | doubleArrayOf(0.3, 0.4, 0.2, -0.2), 25 | doubleArrayOf(0.2, -0.1, 0.1, 0.6) 26 | ))) 27 | 28 | it.unit.biases.values.assignValues(DenseNDArrayFactory.arrayOf( 29 | doubleArrayOf(0.3, -0.4) 30 | )) 31 | } 32 | 33 | /** 34 | * 35 | */ 36 | fun buildWeightsErrorsValues1() = DenseNDArrayFactory.arrayOf(listOf( 37 | doubleArrayOf(0.3, 0.4, 0.2, -0.2), 38 | doubleArrayOf(0.2, -0.1, 0.1, 0.6) 39 | )) 40 | 41 | /** 42 | * 43 | */ 44 | fun buildBiasesErrorsValues1() = DenseNDArrayFactory.arrayOf(doubleArrayOf(0.3, -0.4)) 45 | 46 | /** 47 | * 48 | */ 49 | fun buildWeightsErrorsValues2() = DenseNDArrayFactory.arrayOf(listOf( 50 | doubleArrayOf(0.7, -0.8, 0.1, -0.6), 51 | doubleArrayOf(0.8, 0.6, -0.9, -0.2) 52 | )) 53 | 54 | /** 55 | * 56 | */ 57 | fun buildBiasesErrorsValues2() = DenseNDArrayFactory.arrayOf(doubleArrayOf(-0.9, 0.1)) 58 | } 59 | -------------------------------------------------------------------------------- /src/test/kotlin/deeplearning/attention/AttentionNetworkUtils.kt: -------------------------------------------------------------------------------- 1 | /* Copyright 2016-present The KotlinNLP Authors. All Rights Reserved. 2 | * 3 | * This Source Code Form is subject to the terms of the Mozilla Public 4 | * License, v. 2.0. If a copy of the MPL was not distributed with this 5 | * file, you can obtain one at http://mozilla.org/MPL/2.0/. 6 | * ------------------------------------------------------------------*/ 7 | 8 | package deeplearning.attention 9 | 10 | import com.kotlinnlp.simplednn.core.layers.LayerType 11 | import com.kotlinnlp.simplednn.core.layers.models.feedforward.simple.FeedforwardLayerParameters 12 | import com.kotlinnlp.simplednn.deeplearning.attention.attentionnetwork.AttentionNetworkParameters 13 | import com.kotlinnlp.simplednn.deeplearning.attention.attentionnetwork.AttentionNetwork 14 | import com.kotlinnlp.simplednn.simplemath.ndarray.dense.DenseNDArray 15 | import core.attention.AttentionLayerUtils 16 | 17 | /** 18 | * 19 | */ 20 | internal object AttentionNetworkUtils { 21 | 22 | /** 23 | * 24 | */ 25 | fun buildNetwork() = AttentionNetwork( 26 | inputType = LayerType.Input.Dense, 27 | model = buildAttentionNetworkParams(), 28 | propagateToInput = true) 29 | 30 | /** 31 | * 32 | */ 33 | private fun buildAttentionNetworkParams() = AttentionNetworkParameters(inputSize = 4, attentionSize = 2).apply { 34 | 35 | val transformParams = AttentionLayerUtils.buildTransformLayerParams() 36 | val attentionParams = AttentionLayerUtils.buildAttentionParams() 37 | 38 | transform.getLayerParams(0).unit.weights.values.assignValues( 39 | transformParams.unit.weights.values) 40 | 41 | transform.getLayerParams(0).unit.biases.values.assignValues( 42 | transformParams.unit.biases.values) 43 | 44 | attention.contextVector.values.assignValues(attentionParams.contextVector.values) 45 | } 46 | } 47 | -------------------------------------------------------------------------------- /src/test/resources/mockito-extensions/org.mockito.plugins.MockMaker: -------------------------------------------------------------------------------- 1 | mock-maker-inline 2 | --------------------------------------------------------------------------------