public Perceptron Layer(int size, ActivationFunction.ActivationFunction activationFunction) { if (Layers.Count == 0) { Layer layer = new Layer(activationFunction, size); this.Layers.Add(layer); } else { Layer lastLayer = Layers[Layers.Count - 1]; Layer layer = new Layer(activationFunction, size); this.Layers.Add(layer); lastLayer.Next = layer; layer.Previous = lastLayer; layer.BuildWeightMatrix(); } return(this); }
public Layer(ActivationFunction.ActivationFunction activationFunction, int size) { this.ActivationFunction = activationFunction; this.Size = size; BiasVector = new double[size]; Random rand = new Random(); double prob = rand.NextDouble(); for (int i = 0; i < BiasVector.Length; i++) { if (prob <= 0.5) { BiasVector[i] = -rand.NextDouble(); } else { BiasVector[i] = rand.NextDouble(); } } BiasVectorChangeRecord = new double[size]; Activations = new double[size]; WeightedSum = new double[size]; CostDerivatives = new double[size]; }
public static Perceptron Train(DataSet.DataSet dataSet, int batching, int epochs, double learningRate, int hiddenLayers, int hiddenLayersSize, ActivationFunction.ActivationFunction activationFunction, ErrorFunction.ErrorFunction errorFunction) { Perceptron p = new Perceptron(batching, learningRate, errorFunction); p.Layer(dataSet.FeatureSize, activationFunction); for (int i = 0; i < hiddenLayers; i++) { p.Layer(hiddenLayersSize, activationFunction); } p.Layer(dataSet.LabelSize, activationFunction); p.Train2(dataSet, epochs); return(p); }