public Perceptron(int batching, double learningRate, ErrorFunction.ErrorFunction errorFunction) { if (batching < 1) { throw new ArgumentException("Parameter batching must be a positive integer."); } if (learningRate <= 0) { throw new ArgumentException("Parameter learningRate must be a positive real number."); } this.Batching = batching; this.LearningRate = learningRate; this.Layers = new List <Layer>(); this.ErrorFunction = errorFunction; }
internal void BackPropagate(double error, double[] labels, ErrorFunction.ErrorFunction lossFunction) { for (int j = 0; j < CostDerivatives.Length; j++) { CostDerivatives[j] = lossFunction.GetDerivativeValue(labels[j], Activations[j]); //2.0 * (this.Layers[l].Activations[j] - labels[j]); } for (int j = 0; j < Activations.Length; j++) { BiasVectorChangeRecord[j] += ActivationFunction.GetDerivativeValue(WeightedSum[j]) * CostDerivatives[j]; for (int k = 0; k < WeightMatrix.GetLength(1); k++) { WeightMatrixChangeRecord[j, k] += Previous.Activations[k] * ActivationFunction.GetDerivativeValue(WeightedSum[j]) * CostDerivatives[j]; } } }
public static Perceptron Train(DataSet.DataSet dataSet, int batching, int epochs, double learningRate, int hiddenLayers, int hiddenLayersSize, ActivationFunction.ActivationFunction activationFunction, ErrorFunction.ErrorFunction errorFunction) { Perceptron p = new Perceptron(batching, learningRate, errorFunction); p.Layer(dataSet.FeatureSize, activationFunction); for (int i = 0; i < hiddenLayers; i++) { p.Layer(hiddenLayersSize, activationFunction); } p.Layer(dataSet.LabelSize, activationFunction); p.Train2(dataSet, epochs); return(p); }