public NeuralTrainSet(NeuralNetwork neuralNetwork, float[] input, float[] target) { NeuralNetwork = neuralNetwork; Layers = new NeuralTrainLayer[neuralNetwork.Layers.Length]; for (int i = 0; i < Layers.Length; i++) { if (i == 0) { Layers[i] = new NeuralTrainLayer(neuralNetwork.Layers[i], input); } else { Layers[i] = new NeuralTrainLayer(neuralNetwork.Layers[i], Layers[i - 1].Output); } } output = Layers.Last().Output; this.target = target; bool firstLayer = true; for (int i = Layers.Length - 1; i >= 0; i--) { NeuralTrainLayer layer = Layers[i]; target = layer.CalculateBackpropagateValues(target, firstLayer); firstLayer = false; } }
internal void Apply(NeuralTrainLayer neuralTrainLayer, float learningRate) { if (neuralTrainLayer.TrainingCount == 0) { return; } float multiplyer = learningRate / neuralTrainLayer.TrainingCount; float[] bShift = neuralTrainLayer.DCDB.Multiply(multiplyer); float[] wShift = neuralTrainLayer.DCDW.Multiply(multiplyer); Bias = Bias.Minus(bShift); Weight = Weight.Minus(wShift); }
public NeuralTrainSet(NeuralTrainSet a, NeuralTrainSet b) { if (a.NeuralNetwork != b.NeuralNetwork) { throw new Exception("Cannot blend sets that belong to a different network."); } NeuralNetwork = a.NeuralNetwork; int layerCount = a.Layers.Length; Layers = new NeuralTrainLayer[layerCount]; for (int i = 0; i < layerCount; i++) { Layers[i] = new NeuralTrainLayer(a.Layers[i], b.Layers[i]); } }
public NeuralTrainLayer(NeuralTrainLayer a, NeuralTrainLayer b) { DCDB = a.DCDB.Plus(b.DCDB); DCDW = a.DCDW.Plus(b.DCDW); TrainingCount = a.TrainingCount + b.TrainingCount; }