public void PropagateBackward(NNBackpropagationData backpropagationData) { NNTrainingData trainingData = backpropagationData.TrainingData; // Validate training data dimensions if (trainingData.InputDataWidth != InputLayer.Width || trainingData.InputDataHeight != InputLayer.Height || trainingData.InputDataDepth != InputLayer.Depth) { throw new ArgumentException(); } if (trainingData.TargetDataWidth != OutputLayer.Width || trainingData.TargetDataHeight != OutputLayer.Height || trainingData.TargetDataDepth != OutputLayer.Depth) { throw new ArgumentException(); } for (int trainingDataIndex = 0; trainingDataIndex < trainingData.DataSize; trainingDataIndex++) { backpropagationData.BatchTrainingStartingCallback?.Invoke(trainingDataIndex, trainingData.DataSize); // Initialize backpropagation run NNDetailedBackpropagationData detailedBackpropagationData = new NNDetailedBackpropagationData(backpropagationData, trainingDataIndex); // Feed forward through the network and gather neccessary data NNFeedData feedForwardInputData = trainingData.GetInputData(trainingDataIndex); for (int i = 0; i < this.layers.Length; i++) { Layer layer = this.layers[i]; NNDetailedFeedData feedData = layer.FeedForward(feedForwardInputData); detailedBackpropagationData.FeedForwardData[layer] = feedData; feedForwardInputData = feedData.OutputData; } // Propagate error backwards through the network for (int i = this.layers.Length - 1; i >= 0; i--) { Layer layer = this.layers[i]; layer.PropagateBackward(detailedBackpropagationData); } // Update weights for each layer foreach (KeyValuePair <Layer, float[]> updatedWeight in detailedBackpropagationData.UpdatedWeights) { Layer layer = updatedWeight.Key; float[] weights = updatedWeight.Value; for (int i = 0; i < weights.Length; i++) { layer.SetWeight(i, weights[i]); } } backpropagationData.BatchTrainingFinishedCallback?.Invoke(trainingDataIndex, trainingData.DataSize); } }
internal static void MaxPoolingTest() { InputLayer inputLayer = new InputLayer(20, 20, 2); MaxPoolingLayer mPLayer = new MaxPoolingLayer(2, 2, inputLayer); Debug.WriteLine(mPLayer.Width + " " + mPLayer.Height + " " + mPLayer.Depth); float[,,] inputValues = new float[20, 20, 2]; int i = 0; for (int z = 0; z < 2; z++) { Debug.WriteLine($"{z} --------------------"); for (int y = 0; y < 20; y++) { StringBuilder sb = new StringBuilder(); for (int x = 0; x < 20; x++, i++) { if (x != 0) { sb.Append(", "); } inputValues[x, y, z] = i; sb.Append(i); } Debug.WriteLine(sb.ToString()); } } NNFeedData inputData = new NNFeedData(inputValues); NNDetailedFeedData outputData = inputLayer.FeedForward(inputData); outputData = mPLayer.FeedForward(outputData.OutputData); for (int z = 0; z < mPLayer.Depth; z++) { Debug.WriteLine($"{z} --------------------"); for (int y = 0; y < mPLayer.Height; y++) { StringBuilder sb = new StringBuilder(); for (int x = 0; x < mPLayer.Width; x++) { if (x != 0) { sb.Append(", "); } sb.Append($"{outputData.OutputData[outputData.OutputData.ToNeuronIndex(x, y, z)]}"); } Debug.WriteLine(sb.ToString()); } } }
public NNFeedData FeedForward(NNFeedData input) { NNFeedData data = input; for (int i = 0; i < this.layers.Length; i++) { Layer layer = this.layers[i]; NNDetailedFeedData feedData = layer.FeedForward(data); data = feedData.OutputData; } return(data); }
public void TestFeedForward() { float[] data = new float[4 * 4 * 2]; for (int i = 0; i < data.Length; i++) { data[i] = i + 1; } LayerStub prevLayer = new LayerStub(4, 4, 2); MaxPoolingLayer layer = new MaxPoolingLayer(2, 2, prevLayer); NNFeedData feedData = new NNFeedData(4, 4, 2, data); NNDetailedFeedData result = layer.FeedForward(feedData); Assert.AreEqual(6, result.OutputData[result.OutputData.ToNeuronIndex(0, 0, 0)]); Assert.AreEqual(8, result.OutputData[result.OutputData.ToNeuronIndex(1, 0, 0)]); Assert.AreEqual(14, result.OutputData[result.OutputData.ToNeuronIndex(0, 1, 0)]); Assert.AreEqual(16, result.OutputData[result.OutputData.ToNeuronIndex(1, 1, 0)]); }
internal override NNDetailedFeedData FeedForward(NNFeedData input) { float[] output = new float[Neurons]; int[] maxIndices = new int[Neurons]; for (int zIdx = 0; zIdx < PreviousLayer.Depth; zIdx++) { for (int yiIdx = 0, yoIdx = 0; yoIdx < Height; yiIdx += this.Stride, yoIdx++) { for (int xiIdx = 0, xoIdx = 0; xoIdx < Width; xiIdx += this.Stride, xoIdx++) { float max = float.MinValue; int maxIdx = -1; for (int fyIdx = 0; fyIdx < this.FilterSize; fyIdx++) { for (int fxIdx = 0; fxIdx < this.FilterSize; fxIdx++) { int idx = PreviousLayer.ConvertToNeuronIndex(xiIdx + fxIdx, yiIdx + fyIdx, zIdx); if (input[idx] > max) { max = input[idx]; maxIdx = idx; } } } int i = ConvertToNeuronIndex(xoIdx, yoIdx, zIdx); output[i] = max; maxIndices[i] = maxIdx; } } } NNDetailedFeedData feedData = new NNDetailedFeedData(this, output, output); feedData.CustomData[nameof(maxIndices)] = maxIndices; return(feedData); }