public void LogisticInputGradient() { Layer layer = new Layer( new WeightsMatrix( Matrix <double> .Build.DenseOfArray(new double[, ] { { 1 } }) ), new BiasesVector(1), NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative ); DataVector zeroVector = new DataVector(1); VectorBatch result = layer.Run(zeroVector); DataVector oneVector = new DataVector( Vector <double> .Build.DenseOfArray(new double[] { 1 }) ); VectorBatch inputGradient = layer.BackPropagate(oneVector); DataVector inputGradientCheck = new DataVector( Vector <double> .Build.DenseOfArray( new double[] { NeuralFunction.__LogisticDerivative(0, NeuralFunction.__Logistic(0)) }) ); Assert.AreEqual(inputGradientCheck, inputGradient); }
public void LogisticCorrectRun() { Layer layer = new Layer( new WeightsMatrix( Matrix <double> .Build.DenseOfArray(new double[, ] { { 2, 3, 5 } }) ), new BiasesVector(1), NeuralFunction.__Logistic, NeuralFunction.__LogisticDerivative ); VectorBatch input = new VectorBatch( Matrix <double> .Build.DenseOfArray(new double[, ] { { 7, 11, 13 } }) ); VectorBatch outputcheck = new VectorBatch( Matrix <double> .Build.DenseOfArray(new double[, ] { { NeuralFunction.__Logistic(112) } }) ); VectorBatch result = layer.Run(input); Assert.AreEqual(outputcheck, result); }
public void CanAddFixed() { NeuralFunction nf = new NeuralFunction(1); NetComponentChain layerlist = new NetComponentChain(); layerlist.AddFixed(nf); List <NetComponent> allComponents = new List <NetComponent>(layerlist.ForwardEnumeration); List <NetComponent> trainableComponents = new List <NetComponent>(layerlist.ForwardTrainableComponentsEnumeration); Assert.AreEqual(1, layerlist.NumberOfComponents); Assert.IsTrue(allComponents.Contains(nf)); Assert.IsFalse(trainableComponents.Contains(nf)); }
public virtual void setFunctions(NeuralFunction neuralFunction, WeigthingFunction weigthingFunction) { this.neuralFunction = neuralFunction; this.weigthingFunction = weigthingFunction; }