コード例 #1
0
        public void PropagateBackward(NNBackpropagationData backpropagationData)
        {
            NNTrainingData trainingData = backpropagationData.TrainingData;

            // Validate training data dimensions
            if (trainingData.InputDataWidth != InputLayer.Width || trainingData.InputDataHeight != InputLayer.Height || trainingData.InputDataDepth != InputLayer.Depth)
            {
                throw new ArgumentException();
            }

            if (trainingData.TargetDataWidth != OutputLayer.Width || trainingData.TargetDataHeight != OutputLayer.Height || trainingData.TargetDataDepth != OutputLayer.Depth)
            {
                throw new ArgumentException();
            }

            for (int trainingDataIndex = 0; trainingDataIndex < trainingData.DataSize; trainingDataIndex++)
            {
                backpropagationData.BatchTrainingStartingCallback?.Invoke(trainingDataIndex, trainingData.DataSize);

                // Initialize backpropagation run
                NNDetailedBackpropagationData detailedBackpropagationData = new NNDetailedBackpropagationData(backpropagationData, trainingDataIndex);

                // Feed forward through the network and gather neccessary data
                NNFeedData feedForwardInputData = trainingData.GetInputData(trainingDataIndex);
                for (int i = 0; i < this.layers.Length; i++)
                {
                    Layer layer = this.layers[i];

                    NNDetailedFeedData feedData = layer.FeedForward(feedForwardInputData);
                    detailedBackpropagationData.FeedForwardData[layer] = feedData;

                    feedForwardInputData = feedData.OutputData;
                }

                // Propagate error backwards through the network
                for (int i = this.layers.Length - 1; i >= 0; i--)
                {
                    Layer layer = this.layers[i];

                    layer.PropagateBackward(detailedBackpropagationData);
                }

                // Update weights for each layer
                foreach (KeyValuePair <Layer, float[]> updatedWeight in detailedBackpropagationData.UpdatedWeights)
                {
                    Layer   layer   = updatedWeight.Key;
                    float[] weights = updatedWeight.Value;

                    for (int i = 0; i < weights.Length; i++)
                    {
                        layer.SetWeight(i, weights[i]);
                    }
                }

                backpropagationData.BatchTrainingFinishedCallback?.Invoke(trainingDataIndex, trainingData.DataSize);
            }
        }
コード例 #2
0
        private static void Train(NeuralNetwork nn, float learningRate, int runs)
        {
            Console.WriteLine("Preparing Training...");
            NNFeedData[] trainingInputData  = new NNFeedData[training.Count];
            NNFeedData[] trainingTargetData = new NNFeedData[training.Count];
            for (int i = 0; i < training.Count; i++)
            {
                MnistEntry entry = training[i];

                trainingInputData[i]  = new NNFeedData(28, 28, 1, ConvertArray(entry.Image));
                trainingTargetData[i] = new NNFeedData(10, 1, 1, LabelToArray(entry.Label));
            }
            NNTrainingData trainingData = new NNPreloadedTrainingData(trainingInputData, trainingTargetData);

            NNBackpropagationData backpropagationData = new NNBackpropagationData(trainingData, learningRate, (o, t) => o - t);

            double totalTime = 0;

            Console.WriteLine("Starting Training...");
            for (int trainingRuns = 0; trainingRuns < runs; trainingRuns++)
            {
                DateTime start = DateTime.UtcNow;

                backpropagationData.BatchTrainingStartingCallback = (trainingDataIndex, trainingSets) => {
                    start = DateTime.UtcNow;
                };
                backpropagationData.BatchTrainingFinishedCallback = (trainingDataIndex, trainingSets) => {
                    totalTime += (DateTime.UtcNow - start).TotalMilliseconds;
                    double   avgTime       = totalTime / (trainingDataIndex + 1);
                    double   remainingTime = avgTime * (trainingSets - trainingDataIndex);
                    TimeSpan avgTSpan      = TimeSpan.FromMilliseconds(avgTime);
                    TimeSpan remTSpan      = TimeSpan.FromMilliseconds(remainingTime);
                    TimeSpan totTSpan      = TimeSpan.FromMilliseconds(totalTime);
                    string   avgTS         = $"{avgTSpan:ss\\.ffff}";
                    string   remTS         = $"{remTSpan:hh\\:mm\\:ss}";
                    string   totTS         = $"{totTSpan:hh\\:mm\\:ss}";

                    Save(nn, $"primary_{trainingDataIndex}");

                    Console.WriteLine($"Finished Training {trainingDataIndex}/{trainingSets} Passed:{totTS} Remaining:{remTS} Avg:{avgTS}");
                };


                nn.PropagateBackward(backpropagationData);
            }
        }
コード例 #3
0
ファイル: Test_Sin.cs プロジェクト: gartoks/SimpleNN
        private static void Train(NeuralNetwork nn, float learningRate, int runs)
        {
            NNFeedData[] trainingInputData  = new NNFeedData[360];
            NNFeedData[] trainingTargetData = new NNFeedData[360];
            for (int i = 0; i < 360; i++)
            {
                float x = i * (float)Math.PI / 180f;

                trainingInputData[i]  = new NNFeedData(1, height: 1, depth: 1, data: new float[] { x });
                trainingTargetData[i] = new NNFeedData(1, height: 1, depth: 1, data: new float[] { (float)Math.Sin(x) / 2f + 0.5f });
            }
            NNTrainingData trainingData = new NNPreloadedTrainingData(trainingInputData, trainingTargetData);

            NNBackpropagationData backpropagationData = new NNBackpropagationData(trainingData, learningRate, (o, t) => o - t);

            for (int trainingRuns = 0; trainingRuns < runs; trainingRuns++)
            {
                nn.PropagateBackward(backpropagationData);
            }
        }
コード例 #4
0
ファイル: Test_XOR.cs プロジェクト: gartoks/SimpleNN
        private static void TrainXOR(NeuralNetwork nn, float learningRate, int runs)
        {
            NNFeedData[] trainingInputData  = new NNFeedData[(int)Math.Pow(2, 2)];
            NNFeedData[] trainingTargetData = new NNFeedData[(int)Math.Pow(2, 2)];
            for (int i = 0; i < Math.Pow(2, 2); i++)
            {
                int a = i % 2;
                int b = i / 2;
                int r = a ^ b;

                trainingInputData[i]  = new NNFeedData(2, 1, 1, a, b);
                trainingTargetData[i] = new NNFeedData(1, 1, 1, r);
            }
            NNTrainingData trainingData = new NNPreloadedTrainingData(trainingInputData, trainingTargetData);

            NNBackpropagationData backpropagationData = new NNBackpropagationData(trainingData, learningRate, (o, t) => o - t);

            for (int trainingRuns = 0; trainingRuns < runs; trainingRuns++)
            {
                nn.PropagateBackward(backpropagationData);
            }
        }
コード例 #5
0
ファイル: Test_Convolution.cs プロジェクト: gartoks/SimpleNN
        internal static void ConvolutionTest()
        {
            InputLayer iL = new InputLayer(3, 3, 1);

            ConvolutionalLayer cL = new ConvolutionalLayer(1, 2, 1, 0, iL, ActivationFunctions.None());
            //Debug.WriteLine("Filter ---------------");
            int i = 0;

            for (int y = 0; y < 2; y++)
            {
                StringBuilder sb = new StringBuilder();
                for (int x = 0; x < 2; x++, i++)
                {
                    if (x != 0)
                    {
                        sb.Append(", ");
                    }

                    float w = 0.1f * (i + 1);

                    cL.SetWeight(i, w);
                    sb.Append(w);
                }
                //Debug.WriteLine(sb.ToString());
            }

            FullyConnectedLayer fcL = new FullyConnectedLayer(1, cL, ActivationFunctions.None());
            //Debug.WriteLine("Weights ---------------");
            StringBuilder SB = new StringBuilder();

            for (int j = 0; j < fcL.Weights; j++, i++)
            {
                if (j != 0)
                {
                    SB.Append(", ");
                }

                float w = 1f;

                fcL.SetWeight(j, w);
                SB.Append(w);
            }
            //Debug.WriteLine(SB.ToString());


            NeuralNetwork nn = new NeuralNetwork(iL, cL, fcL);


            float[,,] inputValues = new float[3, 3, 1];
            i = 0;
            for (int z = 0; z < 1; z++)
            {
                //Debug.WriteLine($"{z} --------------------");
                for (int y = 0; y < 3; y++)
                {
                    StringBuilder sb = new StringBuilder();
                    for (int x = 0; x < 3; x++, i++)
                    {
                        //if (x != 0)
                        //    sb.Append(",\t");

                        inputValues[x, y, z] = i + 1;
                        //sb.Append(inputValues[x, y, z]);
                    }
                    //Debug.WriteLine(sb.ToString());
                }
            }
            NNFeedData inputData = new NNFeedData(inputValues);

            Debug.WriteLine(nn.FeedForward(inputData)[0]);

            NNFeedData     targetData   = new NNFeedData(1, 1, 1, -3.2f);
            NNTrainingData trainingData = new NNPreloadedTrainingData(new[] { inputData }, new [] { targetData });

            NNBackpropagationData backpropagationData = new NNBackpropagationData(trainingData, 0.2f, (o, t) => o - t);

            //for (int j = 0; j < 1000; j++) {
            nn.PropagateBackward(backpropagationData);
            //}

            Debug.WriteLine(nn.FeedForward(inputData)[0]);

            //NNDetailedFeedData outputData = iL.FeedForward(inputData);
            //outputData = cL.FeedForward(outputData.OutputData);

            //Debug.WriteLine("Convolution Out");
            //for (int z = 0; z < cL.Depth; z++) {
            //    Debug.WriteLine($"{z} --------------------");
            //    for (int y = 0; y < cL.Height; y++) {
            //        StringBuilder sb = new StringBuilder();
            //        for (int x = 0; x < cL.Width; x++) {
            //            if (x != 0)
            //                sb.Append(", ");

            //            sb.Append($"{outputData.OutputData[outputData.OutputData.ToNeuronIndex(x, y, z)]}");
            //        }
            //        Debug.WriteLine(sb.ToString());
            //    }
            //}

            //outputData = fcL.FeedForward(outputData.OutputData);
            //Debug.WriteLine("Fully Connected Out");
            //Debug.WriteLine(outputData.OutputData[0]);
        }