Exemple #1
0
        public void SoftmaxPropagateTest()
        {
            int     nbInput = 10;
            NNArray input   = Utils.OneHot(nbInput, 5);
            NNArray output  = Utils.OneHot(nbInput, 3);

            IActivation activation;

            activation = new Softmax();

            var network = new Network(
                new IdentityLayer(nbInput),
                new DenseLayerNoBias(nbInput, nbInput, activation, new SquaredDistance()));

            network.Initialize();

            DateTime start;
            DateTime end;

            int    epoc = 0, maxEpoc = 10000;
            double error = double.MaxValue;

            start = DateTime.Now;
            while (++epoc < maxEpoc && error > 0.05)
            {
                error = network.Train(input, output, 0.01);
            }
            end = DateTime.Now;

            var duration = (end - start).TotalMilliseconds / 1000;

            Console.WriteLine($"Duration for activation {activation.Name}: {duration} \t epoc: {epoc}\terror: {error}");

            Assert.IsTrue(epoc < maxEpoc);
        }
Exemple #2
0
        private static void TrainNetwork(IActivation activation, NNArray input, NNArray output)
        {
            var nbInput = input.Length;
            var network = new Network(
                new IdentityLayer(nbInput),
                new DenseLayerNoBias(3, 2, activation, new SquaredDistance()));

            network.AddLayer(new DenseLayerNoBias(nbInput, 3, activation, new SquaredDistance()));

            network.Initialize();

            DateTime start;
            DateTime end;

            int    epoc = 0, maxEpoc = 10000;
            double error = double.MaxValue;

            start = DateTime.Now;
            while (++epoc < maxEpoc && error > 0.05)
            {
                error = network.Train(input, output, 0.01);
            }
            end = DateTime.Now;

            var duration = (end - start).TotalMilliseconds / 1000;

            Console.WriteLine($"Duration for activation {activation.Name}: {duration} \t epoc: {epoc}\terror: {error}");

            Assert.IsTrue(epoc < maxEpoc);
        }
Exemple #3
0
        public double Train(double[] input, NNArray expectedOutput, double learningRate)
        {
            this.Evaluate(input);

            double[] outputError = new double[this.OutputLayer.NbOutput];

            var error = this.OutputLayer.LossFunction.Evaluate(this.OutputLayer.Output, expectedOutput, outputError);

            double[] weightedError = HiddenLayers.Any() ? new double[this.OutputLayer.NbInput] : null;

            this.OutputLayer.BackPropagate(outputError, learningRate, weightedError);
            for (int n = this.HiddenLayers.Count - 1; n >= 0; n--)
            {
                var layer = this.HiddenLayers[n];

                // Apply derivative to weightedErrors to calculate next layer output error
                outputError = new double[layer.NbOutput];
                var derivative = layer.Activation.Derivative(layer.NonActivatedOutput);
                for (int i = 0; i < outputError.Length; i++)
                {
                    outputError[i] = weightedError[i] * derivative[i];
                }

                weightedError = n > 0 ? new double[layer.NbInput] : null;
                this.HiddenLayers[n].BackPropagate(outputError, learningRate, weightedError);
            }

            return(error);
        }
Exemple #4
0
        public void BackPropagateNoBias_2NeuronTest()
        {
            double expectedWeight = 3;
            double actualWeight   = 2;

            double inputData = 2;

            double expectedOutput = inputData * expectedWeight;
            double actualOutput   = inputData * actualWeight;

            var layer = new DenseLayerNoBias(1, 1, new IdentityActivation(), new Distance());

            layer.Initialize();
            layer.Weights[0, 0] = actualWeight;
            var input = new double[] { inputData };

            layer.Evaluate(input);

            Assert.AreEqual(actualOutput, layer.Output[0]);

            var inputError = new NNArray(1);

            layer.BackPropagate(input, new double[] { expectedOutput }, 1, inputError);

            double expectedErrorInput = (expectedOutput - actualOutput) * actualWeight; // I don't understand should be (expectedOutput - actualOutput) / actualWeight but docmentation show different

            Assert.AreEqual(expectedErrorInput, inputError[0]);
        }
Exemple #5
0
        public void EvaluateTest()
        {
            var layer = new DenseLayer(6, 3, new IdentityActivation(), new Distance());

            layer.Initialize();

            var input = NNArray.Random(6);

            layer.Evaluate(input);
        }
Exemple #6
0
        public void MNISTPropagateTest()
        {
            MnistReader.RootPath = Path.GetFullPath(Path.Combine(Environment.CurrentDirectory, @"..\..\..\MNIST"));

            var images = MnistReader.ReadTestData().ToList();

            Assert.IsTrue(images.Count() > 0);

            var     image  = images.ElementAt(0);
            NNArray input  = image.Values;
            NNArray output = Utils.OneHot(10, image.Label);

            var nbInput = input.Length;

            IActivation activation;

            activation = new Softmax();

            var network = new Network(
                new NormalizedLayer(nbInput, 1),
                new DenseLayerNoBias(nbInput, 10, activation, new CrossEntropyOneHot()));

            // network.AddLayer(new DenseLayerNoBias(nbInput, 28, activation, new SquaredDistance()));

            network.Initialize();

            DateTime start;
            DateTime end;

            int    epoc = 0, maxEpoc = 10000;
            double error = double.MaxValue;

            start = DateTime.Now;
            while (++epoc < maxEpoc && error > 0.01)
            {
                error = network.Train(input, output, 0.01);
            }
            end = DateTime.Now;

            var duration = (end - start).TotalMilliseconds / 1000;

            Console.WriteLine($"Duration for activation {activation.Name}: {duration} \t epoc: {epoc}\terror: {error}");

            Assert.AreEqual(image.Label, network.OutputLayer.Output.ArgMax());
            Assert.IsTrue(epoc < maxEpoc);

            foreach (var img in images.Where(i => i.Label == image.Label))
            {
                network.Evaluate(img.Values);
                Console.WriteLine($"{network.OutputLayer.Output.ArgMax()}");
            }
        }
Exemple #7
0
        public void MatrixCalc()
        {
            //  W*I+B  test calculattin Input*Weights + Biases
            int nbInput = 6;
            int nbOutput = 3;
            NNArray I = NNArray.Random(nbInput);
            NNMatrix W = NNMatrix.Random(nbInput, nbOutput);
            NNArray B = NNArray.Random(nbOutput);

            var tmp = W * I;
            var O = tmp + B;

            Assert.AreEqual(O.Length, nbOutput);
        }
Exemple #8
0
        public void BackPropagateTest()
        {
            int nbInput = 6;
            var input   = NNArray.Random(nbInput);
            var output  = new double[] { 0, 1 };

            IActivation activation;

            activation = new IdentityActivation();
            TrainNetwork(activation, input, output);
            activation = new Sigmoid();
            TrainNetwork(activation, input, output);
            activation = new Tanh();
            TrainNetwork(activation, input, output);
            activation = new Relu();
            TrainNetwork(activation, input, output);
        }
Exemple #9
0
        public void MatrixPerformance()
        {
            //  W*I+B  test calculattin Input*Weights + Biases
            int nbInput = 300;
            int nbOutput = 200;

            NNArray I = NNArray.Random(nbInput);
            NNMatrix W = NNMatrix.Random(nbInput, nbOutput);
            NNArray B = NNArray.Random(nbOutput);

            int count = 200;
            DateTime start;
            DateTime end;
            double duration;

            // Not optimized
            start = DateTime.Now;
            for (int i = 0; i < count; i++)
            {
                NNArray res = W * I + B;
            }
            end = DateTime.Now;
            duration = (end - start).TotalMilliseconds / 1000;
            Console.WriteLine("Duration Non Optimized: "+duration);

            // Optimized
            NNArray O = NNArray.Random(nbOutput);
            start = DateTime.Now;
            for (int i = 0; i < count; i++)
            {
                W.Multiply(I,O);
                O.Add(B,O);
            }
            end = DateTime.Now;
            duration = (end - start).TotalMilliseconds / 1000;
            Console.WriteLine("Duration Optimized: " + duration);
        }