コード例 #1
0
        internal override NNDetailedFeedData FeedForward(NNFeedData input)
        {
            float[] rawOutput = new float[Neurons];

            for (int oIdx = 0; oIdx < Neurons; oIdx++)
            {
                // Add bias
                float rawOut = this.biases[oIdx];

                // Add up weights times previous output
                for (int iIdx = 0; iIdx < input.Size; iIdx++)
                {
                    rawOut += this.weights[ToWeightIndex(iIdx, oIdx)] * input[iIdx];
                }

                rawOutput[oIdx] = rawOut;
            }

            // Apply activation function
            float[] output = new float[Neurons];
            for (int oIdx = 0; oIdx < Neurons; oIdx++)
            {
                output[oIdx] = Activation.Function(rawOutput[oIdx], rawOutput);
            }

            return(new NNDetailedFeedData(this, output, rawOutput));
        }
コード例 #2
0
        private static void Test(NeuralNetwork nn, int tests = -1)
        {
            tests = tests == -1 ? testing.Count : tests;
            tests = Math.Min(testing.Count, tests);

            int score = 0;

            for (int i = 0; i < tests; i++)
            {
                Console.WriteLine($"Starting Test {i}/{tests}");

                MnistEntry entry = testing[i];

                NNFeedData inputData  = new NNFeedData(3, 5, ConvertArray(entry.Image));
                NNFeedData outputData = nn.FeedForward(inputData);

                (int label, float value)guess = ArrayToLabel(outputData.CopyData());
                if (guess.label == entry.Label)
                {
                    score++;
                }

                Console.WriteLine($"{entry.Label} | {guess.label} ({guess.value:F2})");
            }

            Console.WriteLine($"{score}/{tests}");
        }
コード例 #3
0
        public void PropagateBackward(NNBackpropagationData backpropagationData)
        {
            NNTrainingData trainingData = backpropagationData.TrainingData;

            // Validate training data dimensions
            if (trainingData.InputDataWidth != InputLayer.Width || trainingData.InputDataHeight != InputLayer.Height || trainingData.InputDataDepth != InputLayer.Depth)
            {
                throw new ArgumentException();
            }

            if (trainingData.TargetDataWidth != OutputLayer.Width || trainingData.TargetDataHeight != OutputLayer.Height || trainingData.TargetDataDepth != OutputLayer.Depth)
            {
                throw new ArgumentException();
            }

            for (int trainingDataIndex = 0; trainingDataIndex < trainingData.DataSize; trainingDataIndex++)
            {
                backpropagationData.BatchTrainingStartingCallback?.Invoke(trainingDataIndex, trainingData.DataSize);

                // Initialize backpropagation run
                NNDetailedBackpropagationData detailedBackpropagationData = new NNDetailedBackpropagationData(backpropagationData, trainingDataIndex);

                // Feed forward through the network and gather neccessary data
                NNFeedData feedForwardInputData = trainingData.GetInputData(trainingDataIndex);
                for (int i = 0; i < this.layers.Length; i++)
                {
                    Layer layer = this.layers[i];

                    NNDetailedFeedData feedData = layer.FeedForward(feedForwardInputData);
                    detailedBackpropagationData.FeedForwardData[layer] = feedData;

                    feedForwardInputData = feedData.OutputData;
                }

                // Propagate error backwards through the network
                for (int i = this.layers.Length - 1; i >= 0; i--)
                {
                    Layer layer = this.layers[i];

                    layer.PropagateBackward(detailedBackpropagationData);
                }

                // Update weights for each layer
                foreach (KeyValuePair <Layer, float[]> updatedWeight in detailedBackpropagationData.UpdatedWeights)
                {
                    Layer   layer   = updatedWeight.Key;
                    float[] weights = updatedWeight.Value;

                    for (int i = 0; i < weights.Length; i++)
                    {
                        layer.SetWeight(i, weights[i]);
                    }
                }

                backpropagationData.BatchTrainingFinishedCallback?.Invoke(trainingDataIndex, trainingData.DataSize);
            }
        }
コード例 #4
0
        internal override NNDetailedFeedData FeedForward(NNFeedData input)
        {
            if (input.Width != Width || input.Height != Height || input.Depth != Depth)
            {
                throw new ArgumentOutOfRangeException(nameof(input));
            }

            float[] output = input.CopyData();
            return(new NNDetailedFeedData(this, output, output));
        }
コード例 #5
0
ファイル: Test_Convolution.cs プロジェクト: gartoks/SimpleNN
        internal static void MaxPoolingTest()
        {
            InputLayer inputLayer = new InputLayer(20, 20, 2);

            MaxPoolingLayer mPLayer = new MaxPoolingLayer(2, 2, inputLayer);

            Debug.WriteLine(mPLayer.Width + " " + mPLayer.Height + " " + mPLayer.Depth);

            float[,,] inputValues = new float[20, 20, 2];
            int i = 0;

            for (int z = 0; z < 2; z++)
            {
                Debug.WriteLine($"{z} --------------------");
                for (int y = 0; y < 20; y++)
                {
                    StringBuilder sb = new StringBuilder();
                    for (int x = 0; x < 20; x++, i++)
                    {
                        if (x != 0)
                        {
                            sb.Append(", ");
                        }

                        inputValues[x, y, z] = i;
                        sb.Append(i);
                    }
                    Debug.WriteLine(sb.ToString());
                }
            }
            NNFeedData inputData = new NNFeedData(inputValues);

            NNDetailedFeedData outputData = inputLayer.FeedForward(inputData);

            outputData = mPLayer.FeedForward(outputData.OutputData);

            for (int z = 0; z < mPLayer.Depth; z++)
            {
                Debug.WriteLine($"{z} --------------------");
                for (int y = 0; y < mPLayer.Height; y++)
                {
                    StringBuilder sb = new StringBuilder();
                    for (int x = 0; x < mPLayer.Width; x++)
                    {
                        if (x != 0)
                        {
                            sb.Append(", ");
                        }

                        sb.Append($"{outputData.OutputData[outputData.OutputData.ToNeuronIndex(x, y, z)]}");
                    }
                    Debug.WriteLine(sb.ToString());
                }
            }
        }
コード例 #6
0
        private float GetInputValue(NNFeedData input, int cx, int cy, int fx, int fy, int d)
        {
            int x = cx * Stride + fx - ZeroPadding;
            int y = cy * Stride + fy - ZeroPadding;

            if (x < 0 || x >= input.Width || y < 0 || y >= input.Height)
            {
                return(0);
            }

            return(input[input.ToNeuronIndex(cx * Stride + fx, cy * Stride + fy, d)]);
        }
コード例 #7
0
        public NNFeedData FeedForward(NNFeedData input)
        {
            NNFeedData data = input;

            for (int i = 0; i < this.layers.Length; i++)
            {
                Layer layer = this.layers[i];

                NNDetailedFeedData feedData = layer.FeedForward(data);
                data = feedData.OutputData;
            }

            return(data);
        }
コード例 #8
0
        private static void Train(NeuralNetwork nn, float learningRate, int runs)
        {
            Console.WriteLine("Preparing Training...");
            NNFeedData[] trainingInputData  = new NNFeedData[training.Count];
            NNFeedData[] trainingTargetData = new NNFeedData[training.Count];
            for (int i = 0; i < training.Count; i++)
            {
                MnistEntry entry = training[i];

                trainingInputData[i]  = new NNFeedData(28, 28, 1, ConvertArray(entry.Image));
                trainingTargetData[i] = new NNFeedData(10, 1, 1, LabelToArray(entry.Label));
            }
            NNTrainingData trainingData = new NNPreloadedTrainingData(trainingInputData, trainingTargetData);

            NNBackpropagationData backpropagationData = new NNBackpropagationData(trainingData, learningRate, (o, t) => o - t);

            double totalTime = 0;

            Console.WriteLine("Starting Training...");
            for (int trainingRuns = 0; trainingRuns < runs; trainingRuns++)
            {
                DateTime start = DateTime.UtcNow;

                backpropagationData.BatchTrainingStartingCallback = (trainingDataIndex, trainingSets) => {
                    start = DateTime.UtcNow;
                };
                backpropagationData.BatchTrainingFinishedCallback = (trainingDataIndex, trainingSets) => {
                    totalTime += (DateTime.UtcNow - start).TotalMilliseconds;
                    double   avgTime       = totalTime / (trainingDataIndex + 1);
                    double   remainingTime = avgTime * (trainingSets - trainingDataIndex);
                    TimeSpan avgTSpan      = TimeSpan.FromMilliseconds(avgTime);
                    TimeSpan remTSpan      = TimeSpan.FromMilliseconds(remainingTime);
                    TimeSpan totTSpan      = TimeSpan.FromMilliseconds(totalTime);
                    string   avgTS         = $"{avgTSpan:ss\\.ffff}";
                    string   remTS         = $"{remTSpan:hh\\:mm\\:ss}";
                    string   totTS         = $"{totTSpan:hh\\:mm\\:ss}";

                    Save(nn, $"primary_{trainingDataIndex}");

                    Console.WriteLine($"Finished Training {trainingDataIndex}/{trainingSets} Passed:{totTS} Remaining:{remTS} Avg:{avgTS}");
                };


                nn.PropagateBackward(backpropagationData);
            }
        }
コード例 #9
0
        public void TestFeedForward()
        {
            float[] data = new float[4 * 4 * 2];
            for (int i = 0; i < data.Length; i++)
            {
                data[i] = i + 1;
            }
            LayerStub       prevLayer = new LayerStub(4, 4, 2);
            MaxPoolingLayer layer     = new MaxPoolingLayer(2, 2, prevLayer);
            NNFeedData      feedData  = new NNFeedData(4, 4, 2, data);

            NNDetailedFeedData result = layer.FeedForward(feedData);

            Assert.AreEqual(6, result.OutputData[result.OutputData.ToNeuronIndex(0, 0, 0)]);
            Assert.AreEqual(8, result.OutputData[result.OutputData.ToNeuronIndex(1, 0, 0)]);
            Assert.AreEqual(14, result.OutputData[result.OutputData.ToNeuronIndex(0, 1, 0)]);
            Assert.AreEqual(16, result.OutputData[result.OutputData.ToNeuronIndex(1, 1, 0)]);
        }
コード例 #10
0
ファイル: Test_XOR.cs プロジェクト: gartoks/SimpleNN
        private static void CalculateXOR(NeuralNetwork nn)
        {
            float score = 0;

            for (int i = 0; i < Math.Pow(2, 2); i++)
            {
                int a = i % 2;
                int b = i / 2;

                int r = a ^ b;

                NNFeedData inputData  = new NNFeedData(2, new float[] { a, b });
                NNFeedData outputData = nn.FeedForward(inputData);
                score += (float)Math.Abs(outputData[0] - r);

                Console.WriteLine($"{a} ^ {b} => {r} | {outputData[0]}");
            }

            Console.WriteLine(score / 4);
        }
コード例 #11
0
ファイル: Test_Sin.cs プロジェクト: gartoks/SimpleNN
        private static void Train(NeuralNetwork nn, float learningRate, int runs)
        {
            NNFeedData[] trainingInputData  = new NNFeedData[360];
            NNFeedData[] trainingTargetData = new NNFeedData[360];
            for (int i = 0; i < 360; i++)
            {
                float x = i * (float)Math.PI / 180f;

                trainingInputData[i]  = new NNFeedData(1, height: 1, depth: 1, data: new float[] { x });
                trainingTargetData[i] = new NNFeedData(1, height: 1, depth: 1, data: new float[] { (float)Math.Sin(x) / 2f + 0.5f });
            }
            NNTrainingData trainingData = new NNPreloadedTrainingData(trainingInputData, trainingTargetData);

            NNBackpropagationData backpropagationData = new NNBackpropagationData(trainingData, learningRate, (o, t) => o - t);

            for (int trainingRuns = 0; trainingRuns < runs; trainingRuns++)
            {
                nn.PropagateBackward(backpropagationData);
            }
        }
コード例 #12
0
        internal override NNDetailedFeedData FeedForward(NNFeedData input)
        {
            float[] output     = new float[Neurons];
            int[]   maxIndices = new int[Neurons];

            for (int zIdx = 0; zIdx < PreviousLayer.Depth; zIdx++)
            {
                for (int yiIdx = 0, yoIdx = 0; yoIdx < Height; yiIdx += this.Stride, yoIdx++)
                {
                    for (int xiIdx = 0, xoIdx = 0; xoIdx < Width; xiIdx += this.Stride, xoIdx++)
                    {
                        float max    = float.MinValue;
                        int   maxIdx = -1;

                        for (int fyIdx = 0; fyIdx < this.FilterSize; fyIdx++)
                        {
                            for (int fxIdx = 0; fxIdx < this.FilterSize; fxIdx++)
                            {
                                int idx = PreviousLayer.ConvertToNeuronIndex(xiIdx + fxIdx, yiIdx + fyIdx, zIdx);

                                if (input[idx] > max)
                                {
                                    max    = input[idx];
                                    maxIdx = idx;
                                }
                            }
                        }

                        int i = ConvertToNeuronIndex(xoIdx, yoIdx, zIdx);
                        output[i]     = max;
                        maxIndices[i] = maxIdx;
                    }
                }
            }

            NNDetailedFeedData feedData = new NNDetailedFeedData(this, output, output);

            feedData.CustomData[nameof(maxIndices)] = maxIndices;

            return(feedData);
        }
コード例 #13
0
ファイル: Test_Sin.cs プロジェクト: gartoks/SimpleNN
        private static void Calculate(NeuralNetwork nn)
        {
            float error = 0;

            Random r = new Random(42);

            for (int i = 0; i < 10; i++)
            {
                float x = (float)(2.0 * Math.PI * r.NextDouble());

                NNFeedData inputData  = new NNFeedData(1, new float[] { x });
                NNFeedData outputData = nn.FeedForward(inputData);

                float t = (float)Math.Sin(x) / 2f + 0.5f;
                error += (float)Math.Abs(outputData[0] - t);

                Console.WriteLine($"sin({x/Math.PI*180f:F1}) => {Math.Sin(x):F2}\t| {(outputData[0] * 2f - 1f):F2}");
            }

            Console.WriteLine(error / 10f);
        }
コード例 #14
0
ファイル: Test_XOR.cs プロジェクト: gartoks/SimpleNN
        private static void TrainXOR(NeuralNetwork nn, float learningRate, int runs)
        {
            NNFeedData[] trainingInputData  = new NNFeedData[(int)Math.Pow(2, 2)];
            NNFeedData[] trainingTargetData = new NNFeedData[(int)Math.Pow(2, 2)];
            for (int i = 0; i < Math.Pow(2, 2); i++)
            {
                int a = i % 2;
                int b = i / 2;
                int r = a ^ b;

                trainingInputData[i]  = new NNFeedData(2, 1, 1, a, b);
                trainingTargetData[i] = new NNFeedData(1, 1, 1, r);
            }
            NNTrainingData trainingData = new NNPreloadedTrainingData(trainingInputData, trainingTargetData);

            NNBackpropagationData backpropagationData = new NNBackpropagationData(trainingData, learningRate, (o, t) => o - t);

            for (int trainingRuns = 0; trainingRuns < runs; trainingRuns++)
            {
                nn.PropagateBackward(backpropagationData);
            }
        }
コード例 #15
0
        internal override NNDetailedFeedData FeedForward(NNFeedData input)
        {
            float[] rawOutput = new float[Neurons];

            for (int fIdx = 0; fIdx < FilterCount; fIdx++)
            {
                for (int yIdx = 0; yIdx < Height; yIdx++)
                {
                    for (int xIdx = 0; xIdx < Width; xIdx++)
                    {
                        float sum = this.biases[fIdx];

                        for (int fzIdx = 0; fzIdx < PreviousLayer.Depth; fzIdx++)
                        {
                            for (int fyIdx = 0; fyIdx < FilterSize; fyIdx++)
                            {
                                for (int fxIdx = 0; fxIdx < FilterSize; fxIdx++)
                                {
                                    float weight = this.weights[ToWeightIndex(fxIdx, fyIdx, fzIdx, fIdx)];

                                    sum += GetInputValue(input, xIdx, yIdx, fxIdx, fyIdx, fzIdx) * weight;
                                }
                            }
                        }

                        rawOutput[ConvertToNeuronIndex(xIdx, yIdx, fIdx)] = sum + this.biases[fIdx];
                    }
                }
            }

            // Apply activation function
            float[] output = new float[Neurons];
            for (int oIdx = 0; oIdx < Neurons; oIdx++)
            {
                output[oIdx] = Activation.Function(rawOutput[oIdx], rawOutput);
            }

            return(new NNDetailedFeedData(this, output, rawOutput));
        }
コード例 #16
0
 internal abstract NNDetailedFeedData FeedForward(NNFeedData input);
コード例 #17
0
 internal override NNDetailedFeedData FeedForward(NNFeedData input)
 {
     return(null);
 }
コード例 #18
0
ファイル: Test_Convolution.cs プロジェクト: gartoks/SimpleNN
        internal static void ConvolutionTest()
        {
            InputLayer iL = new InputLayer(3, 3, 1);

            ConvolutionalLayer cL = new ConvolutionalLayer(1, 2, 1, 0, iL, ActivationFunctions.None());
            //Debug.WriteLine("Filter ---------------");
            int i = 0;

            for (int y = 0; y < 2; y++)
            {
                StringBuilder sb = new StringBuilder();
                for (int x = 0; x < 2; x++, i++)
                {
                    if (x != 0)
                    {
                        sb.Append(", ");
                    }

                    float w = 0.1f * (i + 1);

                    cL.SetWeight(i, w);
                    sb.Append(w);
                }
                //Debug.WriteLine(sb.ToString());
            }

            FullyConnectedLayer fcL = new FullyConnectedLayer(1, cL, ActivationFunctions.None());
            //Debug.WriteLine("Weights ---------------");
            StringBuilder SB = new StringBuilder();

            for (int j = 0; j < fcL.Weights; j++, i++)
            {
                if (j != 0)
                {
                    SB.Append(", ");
                }

                float w = 1f;

                fcL.SetWeight(j, w);
                SB.Append(w);
            }
            //Debug.WriteLine(SB.ToString());


            NeuralNetwork nn = new NeuralNetwork(iL, cL, fcL);


            float[,,] inputValues = new float[3, 3, 1];
            i = 0;
            for (int z = 0; z < 1; z++)
            {
                //Debug.WriteLine($"{z} --------------------");
                for (int y = 0; y < 3; y++)
                {
                    StringBuilder sb = new StringBuilder();
                    for (int x = 0; x < 3; x++, i++)
                    {
                        //if (x != 0)
                        //    sb.Append(",\t");

                        inputValues[x, y, z] = i + 1;
                        //sb.Append(inputValues[x, y, z]);
                    }
                    //Debug.WriteLine(sb.ToString());
                }
            }
            NNFeedData inputData = new NNFeedData(inputValues);

            Debug.WriteLine(nn.FeedForward(inputData)[0]);

            NNFeedData     targetData   = new NNFeedData(1, 1, 1, -3.2f);
            NNTrainingData trainingData = new NNPreloadedTrainingData(new[] { inputData }, new [] { targetData });

            NNBackpropagationData backpropagationData = new NNBackpropagationData(trainingData, 0.2f, (o, t) => o - t);

            //for (int j = 0; j < 1000; j++) {
            nn.PropagateBackward(backpropagationData);
            //}

            Debug.WriteLine(nn.FeedForward(inputData)[0]);

            //NNDetailedFeedData outputData = iL.FeedForward(inputData);
            //outputData = cL.FeedForward(outputData.OutputData);

            //Debug.WriteLine("Convolution Out");
            //for (int z = 0; z < cL.Depth; z++) {
            //    Debug.WriteLine($"{z} --------------------");
            //    for (int y = 0; y < cL.Height; y++) {
            //        StringBuilder sb = new StringBuilder();
            //        for (int x = 0; x < cL.Width; x++) {
            //            if (x != 0)
            //                sb.Append(", ");

            //            sb.Append($"{outputData.OutputData[outputData.OutputData.ToNeuronIndex(x, y, z)]}");
            //        }
            //        Debug.WriteLine(sb.ToString());
            //    }
            //}

            //outputData = fcL.FeedForward(outputData.OutputData);
            //Debug.WriteLine("Fully Connected Out");
            //Debug.WriteLine(outputData.OutputData[0]);
        }