public void LogisticCorrectRun() { Layer layer = new Layer( new WeightsMatrix( Matrix <double> .Build.DenseOfArray(new double[, ] { { 2, 3, 5 } }) ), new BiasesVector(1), NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative ); VectorBatch input = new VectorBatch( Matrix <double> .Build.DenseOfArray(new double[, ] { { 7, 11, 13 } }) ); VectorBatch outputcheck = new VectorBatch( Matrix <double> .Build.DenseOfArray(new double[, ] { { NeuralFunction.__Logistic(112) } }) ); VectorBatch result = layer.Run(input); Assert.AreEqual(outputcheck, result); }
public void LogisticInputGradient() { Layer layer = new Layer( new WeightsMatrix( Matrix <double> .Build.DenseOfArray(new double[, ] { { 1 } }) ), new BiasesVector(1), NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative ); DataVector zeroVector = new DataVector(1); VectorBatch result = layer.Run(zeroVector); DataVector oneVector = new DataVector( Vector <double> .Build.DenseOfArray(new double[] { 1 }) ); VectorBatch inputGradient = layer.BackPropagate(oneVector); DataVector inputGradientCheck = new DataVector( Vector <double> .Build.DenseOfArray( new double[] { NeuralFunction.__LogisticDerivative(0, NeuralFunction.__Logistic(0)) }) ); Assert.AreEqual(inputGradientCheck, inputGradient); }