Esempio n. 1
0
    static void ThirdNN()
    {
        var r = new Random();

        var data = new Tensor((Matrix) new double[, ] {
            { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
        }, true);
        var target = new Tensor((Matrix) new double[, ] {
            { 0 }, { 1 }, { 0 }, { 1 }
        }, true);

        var seq = new Sequential();

        seq.Layers.Add(new Linear(2, 3, r));
        seq.Layers.Add(new Linear(3, 1, r));

        var sgd = new StochasticGradientDescent(seq.Parameters, 0.1f);

        for (var i = 0; i < 10; i++)
        {
            var pred = seq.Forward(data);

            var diff = pred.Sub(target);
            var loss = diff.Mul(diff).Sum(AxisZero.vertical);

            loss.Backward(new Tensor(Matrix.Ones(loss.Data.X, loss.Data.Y)));
            sgd.Step();

            Console.WriteLine($"Epoch: {i} Loss: {loss}");
        }
    }
        public void ShouldTrainUsingRegularizationParam()
        {
            var stochasticGradientDescent = new StochasticGradientDescent(new CrossEntropy(), _layers, 3000, 4, 5D, 0.01D);

            stochasticGradientDescent.Train(_trainingData);

            var neuralNetwork = new NeuralNetwork(_layers, _layers.First().PrimaryNeuronsCount);
            var result1       = neuralNetwork.Run(_trainingData[0].Inputs);

            result1.Should().HaveCount(1);
            Math.Round(result1[0], 10).Should().Be(0.0285179059D);

            var result2 = neuralNetwork.Run(_trainingData[1].Inputs);

            result2.Should().HaveCount(1);
            Math.Round(result2[0], 10).Should().Be(0.9714820792D);

            var result3 = neuralNetwork.Run(_trainingData[2].Inputs);

            result3.Should().HaveCount(1);
            Math.Round(result3[0], 10).Should().Be(0.9714820797D);

            var result4 = neuralNetwork.Run(_trainingData[3].Inputs);

            result4.Should().HaveCount(1);
            Math.Round(result4[0], 10).Should().Be(0.0285163624D);
        }
Esempio n. 3
0
    static void FourthNN()
    {
        var r = new Random();

        var data = new Tensor((Matrix) new double[, ] {
            { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
        }, true);
        var target = new Tensor((Matrix) new double[, ] {
            { 0 }, { 1 }, { 0 }, { 1 }
        }, true);

        var seq = new Sequential();

        seq.Layers.Add(new Linear(2, 3, r));
        seq.Layers.Add(new Linear(3, 1, r));

        var sgd = new StochasticGradientDescent(seq.Parameters, 0.1f);

        var mse = new MeanSquaredError();

        for (var i = 0; i < 10; i++)
        {
            var pred = seq.Forward(data);

            var loss = mse.Forward(pred, target);

            loss.Backward(new Tensor(Matrix.Ones(loss.Data.X, loss.Data.Y)));
            sgd.Step();

            Console.WriteLine($"Epoch: {i} Loss: {loss}");
        }
    }
Esempio n. 4
0
        public void Run()
        {
            Console.WriteLine("Building random neural network");
            var layers = new ILayer[] { new FullyConnectedLayer(new Sigmoid(), 30, 784), new FullyConnectedLayer(new Sigmoid(), 10, 30) };

            Randomiser.Randomise(layers);

            Console.WriteLine("Evaluating untrained neural network");
            var untrainedAccuracy = Statistics.GetAccuracyByMax(_validationData, new NeuralNetwork(layers, 784));

            Console.WriteLine($"Untrained network accuracy: {untrainedAccuracy.ToString("N2")}%");

            var stochasticGradientDescent = new StochasticGradientDescent(new CrossEntropy(), layers, 1, 20, 1D, 0D);

            var maxAccuracy = 0D;

            for (int i = 0; i < _epochs; i++)
            {
                Console.WriteLine($"Epoch {i + 1} started");
                var trainingLength   = Statistics.GetTrainingLength(stochasticGradientDescent, _trainingData);
                var trainingAccuracy = Statistics.GetAccuracyByMax(_validationData, new NeuralNetwork(layers, 784));
                Console.WriteLine($"Results after epoch {i + 1}:");
                Console.WriteLine($"Training length in miliseconds: {trainingLength}, Accuracy: {trainingAccuracy.ToString("N2")}%");

                if (maxAccuracy < trainingAccuracy)
                {
                    maxAccuracy = trainingAccuracy;
                }
            }

            Console.WriteLine($"End of training. Best accuracy {maxAccuracy.ToString("N2")}%");
        }
        public void ShouldTrainUsingQuadraticCostFunction()
        {
            var stochasticGradientDescent = new StochasticGradientDescent(new Quadratic(), _layers, 3000, 4, 5D, 0);

            stochasticGradientDescent.Train(_trainingData);

            var neuralNetwork = new NeuralNetwork(_layers, _layers.First().PrimaryNeuronsCount);
            var result1       = neuralNetwork.Run(_trainingData[0].Inputs);

            result1.Should().HaveCount(1);
            Math.Round(result1[0], 10).Should().Be(0.0245579310D);

            var result2 = neuralNetwork.Run(_trainingData[1].Inputs);

            result2.Should().HaveCount(1);
            Math.Round(result2[0], 10).Should().Be(0.9661695582D);

            var result3 = neuralNetwork.Run(_trainingData[2].Inputs);

            result3.Should().HaveCount(1);
            Math.Round(result3[0], 10).Should().Be(0.9852113647D);

            var result4 = neuralNetwork.Run(_trainingData[3].Inputs);

            result4.Should().HaveCount(1);
            Math.Round(result4[0], 10).Should().Be(0.0320611480D);
        }
Esempio n. 6
0
        /// <summary>
        /// Initializes a new Optimizer using the default values.
        /// <param name="theta">Theta to optimize.</param>
        /// <param name="maxIterations">Maximum number of iterations.</param>
        /// <param name="learningRate">Learning Rate (alpha) (Optional).</param>
        /// <param name="momentum">Momentum parameter for use in accelerated methods (Optional).</param>
        /// <param name="optimizationMethod">Type of optimization method to use (Optional).</param>
        /// <param name="optimizer">An external typed optimization method to use (Optional).</param>
        /// </summary>
        public Optimizer(Vector theta, int maxIterations, double learningRate = 1.0, double momentum = 0.9,
                         OptimizationMethods optimizationMethod = OptimizationMethods.StochasticGradientDescent, OptimizationMethod optimizer = null)
        {
            this.Completed = false;
            if (optimizationMethod != OptimizationMethods.External)
            {
                switch (optimizationMethod)
                {
                case OptimizationMethods.FastGradientDescent: optimizer = new FastGradientDescent()
                {
                        Momentum = momentum
                }; break;

                case OptimizationMethods.StochasticGradientDescent: optimizer = new StochasticGradientDescent(); break;

                case OptimizationMethods.NAGDescent: optimizer = new NAGDescent()
                {
                        Momentum = momentum
                }; break;
                }
            }

            this.OpimizationMethod = optimizer;

            this.Properties = new OptimizerProperties()
            {
                Iteration     = 0,
                MaxIterations = maxIterations,
                Cost          = double.MaxValue,
                Gradient      = Vector.Zeros(theta.Length),
                Theta         = theta,
                LearningRate  = learningRate,
                Momentum      = momentum
            };
        }
        public void ShouldTrainUsingCrossEntropyCostFunction()
        {
            var stochasticGradientDescent = new StochasticGradientDescent(new CrossEntropy(), _layers, 3000, 4, 5D, 0);

            stochasticGradientDescent.Train(_trainingData);

            var neuralNetwork = new NeuralNetwork(_layers, _layers.First().PrimaryNeuronsCount);
            var result1       = neuralNetwork.Run(_trainingData[0].Inputs);

            result1.Should().HaveCount(1);
            Math.Round(result1[0], 10).Should().Be(0.0005468953D);

            var result2 = neuralNetwork.Run(_trainingData[1].Inputs);

            result2.Should().HaveCount(1);
            Math.Round(result2[0], 10).Should().Be(0.9993728892D);

            var result3 = neuralNetwork.Run(_trainingData[2].Inputs);

            result3.Should().HaveCount(1);
            Math.Round(result3[0], 10).Should().Be(0.9994636693D);

            var result4 = neuralNetwork.Run(_trainingData[3].Inputs);

            result4.Should().HaveCount(1);
            Math.Round(result4[0], 10).Should().Be(0.0008765251D);
        }
Esempio n. 8
0
    static void SecondNN()
    {
        var r = new Random();

        var data = new Tensor((Matrix) new double[, ] {
            { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 }
        }, true);
        var target = new Tensor((Matrix) new double[, ] {
            { 0 }, { 1 }, { 0 }, { 1 }
        }, true);

        var weights = new List <Tensor> ();

        weights.Add(new Tensor(Matrix.Random(2, 3, r), true));
        weights.Add(new Tensor(Matrix.Random(3, 1, r), true));

        var sgd = new StochasticGradientDescent(weights, 0.1f);

        for (var i = 0; i < 10; i++)
        {
            var pred = data.MatMul(weights[0]).Add(weights[1]);

            var diff = pred.Sub(target);
            var loss = diff.Mul(diff).Sum(AxisZero.vertical);

            loss.Backward(new Tensor(Matrix.Ones(loss.Data.X, loss.Data.Y)));
            sgd.Step();

            Console.WriteLine($"Epoch: {i} Loss: {loss}");
        }
    }
Esempio n. 9
0
        private void Learn(double[][] input, double[] output)
        {
            //LinearDualCoordinateDescent
            //StochasticGradientDescent
            //ProbabilisticDualCoordinateDescent
            var teacher = new StochasticGradientDescent()
            {
                //Loss = Loss.L1,
                //Complexity = 1000,
                //Tolerance = .1
            };

            _svm = teacher.Learn(input, output);
        }
Esempio n. 10
0
    // Start is called before the first frame update
    IEnumerator Start()
    {
        var r = new System.Random(2);

        var x = (Matrix) new double[1000, 1];

        Matrix.MatrixLoop((i, j) =>
        {
            x[i, 0] = i;
        }, x.X, x.Y);

        var y = (Matrix) new double[1000, 1];

        Matrix.MatrixLoop((i, j) =>
        {
            y[i, 0] = i * 12 + 15 + r.Next(10);
        }, x.X, x.Y);

        // var x = new double[,] { { 0, 0 }, { 0, 1 }, { 1, 0 }, { 1, 1 } };
        // var y = new double[,] { { 0 }, { 1 }, { 0 }, { 1 } };

        var X = new Tensor(x, true);
        var Y = new Tensor(y, true);

        var seq = new Sequential();

        seq.Layers.Add(new Linear(1, 1, r));

        var sgd = new StochasticGradientDescent(seq.Parameters, 0.001);

        var mse = new MeanSquaredError();

        for (var i = 0; i < 10000; i++)
        {
            yield return(null);

            var pred = seq.Forward(X);
            print(pred.Data.Size);
            var loss = mse.Forward(pred, Y);

            loss.Backward();
            sgd.Step();
            print($"Epoch: {i} Loss: {loss.Data[0, 0]}");
            print(Y);
            print(pred);
        }

        print(seq.Forward(new Tensor(x)));
    }
Esempio n. 11
0
        private static void Train(BasicNetwork network, BasicMLDataSet trainingSet)
        {
            var propagation = new StochasticGradientDescent(network, trainingSet);

            var epoch = 1;

            for (var i = 0; i < 200; i++)
            {
                propagation.Iteration(1000);
                Console.WriteLine($"Epoch: {epoch} Error: {propagation.Error}");
                epoch++;
            }

            propagation.FinishTraining();
        }
Esempio n. 12
0
    private void Start()
    {
        r   = new System.Random(seed);
        seq = new Sequential();

        seq.Layers.Add(new Linear(4, 100, r));
        seq.Layers.Add(new ReLuLayer());
        seq.Layers.Add(new Linear(100, 10, r));
        seq.Layers.Add(new ReLuLayer());
        seq.Layers.Add(new Linear(10, 1, r));

        sgd = new StochasticGradientDescent(seq.Parameters, learningRate);
        mse = new MeanSquaredError();

        cartPole = GetComponent <DirectMovementCartPole>();
    }
Esempio n. 13
0
        public void LearnTest()
        {
            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] xor =
            {
                -1,
                1,
                1,
                -1
            };

            var kernel = new Polynomial(2, 0.0);

            double[][] augmented = new double[inputs.Length][];
            for (int i = 0; i < inputs.Length; i++)
            {
                augmented[i] = kernel.Transform(inputs[i]);
            }

            // Create the Least Squares Support Vector Machine teacher
            var learn = new StochasticGradientDescent()
            {
                LearningRate = 1e-3
            };

            // Run the learning algorithm
            var svm = learn.Learn(augmented, xor);

            bool[] predicted = svm.Decide(augmented);
            double error     = new ZeroOneLoss(xor).Loss(predicted);

            Assert.AreEqual(0, error);

            int[] output = augmented.Apply(p => Math.Sign(svm.Compute(p)));
            for (int i = 0; i < output.Length; i++)
            {
                Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i]));
            }
        }
Esempio n. 14
0
        public void ComputeGradient()
        {
            Random random = new Random();
            StochasticGradientDescent <char> trainer = new StochasticGradientDescent <char>(this.net)
            {
                LearningRate = 0.0001,
                Momentum     = 0.0,
                BatchSize    = 1,
                L2Decay      = 0.0
            };

            // here we only test the gradient at data, but if this is
            // right then that's comforting, because it is a function
            // of all gradients above, for all layers.

            Volume volume = new Volume(new float[] { random.NextDouble() * 2 - 1, random.NextDouble() * 2 - 1 });
            int    gti    = (int)Math.Floor(random.NextDouble() * 3);                     // ground truth index

            trainer.Learn(Enumerable.Repeat(Tuple.Create(volume, this.classes[gti]), 1)); // computes gradients at all layers, and at x

            Volume gradient = this.net.Layers[0].InputGradient;

            float delta = 0.000001;

            for (int i = 0; i < volume.Length; i++)
            {
                float gradAnalytic = gradient[i];

                float xold = volume[i];
                volume[i] += delta;
                float c0 = this.net.CostLoss(this.net.Compute(volume, false), this.classes[gti]);
                volume[i] -= 2 * delta;
                float c1 = this.net.CostLoss(this.net.Compute(volume, false), this.classes[gti]);
                volume[i] = xold; // reset

                float gradNumeric = (c0 - c1) / (2 * delta);
                float relError    = Math.Abs(gradAnalytic - gradNumeric) / Math.Abs(gradAnalytic + gradNumeric);

                Console.WriteLine("step: {0}, numeric: {1}, analytic: {2}, => relError: {3}", i, gradNumeric, gradAnalytic, relError);
                Assert.IsTrue(relError < 1e-2);
            }
        }
Esempio n. 15
0
        void ItWorks()
        {
            var network2 = new BasicNetwork();

            var data    = LoadData();
            var label   = LoadLabel();
            var network = new BasicNetwork();

            network.AddLayer(new BasicLayer(null, true, 4));
            network.AddLayer(new BasicLayer(new ActivationReLU(), true, 8));
            network.AddLayer(new BasicLayer(new ActivationSigmoid(), false, 1));
            network.Structure.FinalizeStructure();
            network.Reset();

            var trainingSet = new BasicMLDataSet(data, label);

            var propagation = new StochasticGradientDescent(network, trainingSet);

            var epoch = 1;

            for (var i = 0; i < 5000; i++)
            {
                propagation.Iteration();
                Console.WriteLine($"Epoch: {epoch} Error: {propagation.Error}");
                epoch++;
            }

            propagation.FinishTraining();


            var weights = network.Flat.Weights;
            var biases  = network.Flat.BiasActivation;

            foreach (var pair in trainingSet)
            {
                var output = network.Compute(pair.Input);
                Debug.Log(pair.Input[0] + "," + pair.Input[1]
                          + ", actual=" + output[0] + ",ideal=" + pair.Ideal[0]);
            }
        }
Esempio n. 16
0
        public void Run()
        {
            var layer = new ILayer[] { new FullyConnectedLayer(new Sigmoid(), 3, 2), new FullyConnectedLayer(new Sigmoid(), 1, 3) };

            Randomiser.Randomise(layer, new Random(5));

            Console.WriteLine("Evaluationg untrained neural network");
            DisplayEvaluation(layer);

            var stochasticGradientDescent = new StochasticGradientDescent(new CrossEntropy(), layer, 3000, 4, 5D, 0D);
            var trainingData = new List <TrainingElement>
            {
                new TrainingElement
                {
                    Inputs          = new double[] { 0D, 0D },
                    ExpectedOutputs = new double[] { 0D }
                },
                new TrainingElement
                {
                    Inputs          = new double[] { 1D, 0D },
                    ExpectedOutputs = new double[] { 1D }
                },
                new TrainingElement
                {
                    Inputs          = new double[] { 0D, 1D },
                    ExpectedOutputs = new double[] { 1D }
                },
                new TrainingElement
                {
                    Inputs          = new double[] { 1D, 1d },
                    ExpectedOutputs = new double[] { 0D }
                }
            };

            stochasticGradientDescent.Train(trainingData);

            Console.WriteLine("Evaluationg trained neural network");
            DisplayEvaluation(layer);
        }
Esempio n. 17
0
 public void Init(StochasticGradientDescent theTraining)
 {
     _training = theTraining;
     _cache    = new double[theTraining.Flat.Weights.Length];
 }
Esempio n. 18
0
 public SimplePerceptron(Vector <double> initialWeights, double learningRate = 0.01, int maxIters = 100)
 {
     _sgd = new StochasticGradientDescent(this, initialWeights, learningRate, maxIters);
 }
Esempio n. 19
0
        static void Main(string[] args)
        {
            //Set the path of the file containing the data set
            //string dataFilePath = @"C:\Users\kevin\Desktop\squaredtest.csv"; NutrioxDataset
            string dataFilePath = @"C:\Users\Bruker\Desktop\NutrioxDataset.csv";

            //string dataFilePath = @"C:\Users\Bruker\Desktop\-5to5-200Rows.csv";

            //Create a new data set
            DataSet.DataSet dataSet = new DataSet.DataSet(dataFilePath, true);

            //Apply desired data preprocessing to the data set
            dataSet.PreProcessDataSet(NormalizationType.MinMax, 2, EncodingType.None, null);

            //Create a model hyperparameter layer structure
            LayerStructure layerStructure = new LayerStructure()
            {
                numberOfInputNodes = 2, HiddenLayerList = new List <int> {
                    5, 5
                }, numberOfOutputNodes = 1
            };

            //Create an instance of the desired optimalization strategy to use

            var regularizationStrategyFactory = new RegularizationStrategyFactory();
            StochasticGradientDescent SGD     = new StochasticGradientDescent(new SigmoidFunction(), new IdentityFunction(), new MeanSquaredError(), RegularizationType.None, regularizationStrategyFactory);

            //Create training hyperparameters
            TrainingParameters trainingParams = new TrainingParameters()
            {
                epochs = 500, learningRate = 0.01, momentum = 0.01, RegularizationLambda = 0.00
            };

            //Create an instance of a neural network
            //ArtificialNeuralNetwork ann = new ArtificialNeuralNetwork(layerStructure, trainingParams, dataSet, SGD, new GaussianDistribution());

            //Or Load a Network from XML

            XML xml = new XML();

            ArtificialNeuralNetwork ann = xml.LoadNetwork(@"C:\Users\Bruker\Desktop\BestNet.xml", dataSet) as ArtificialNeuralNetwork;

            //Apply the desired training/test data set split ratios.
            ann.SplitDataSetIntoTrainAndTestSets(0.7);

            //Initiate network training
            //ann.TrainNetwork();

            var crossValidationStrategyFactory = new CrossValidationStrategyFactory();
            NetworkEvaluator evaluator         = new NetworkEvaluator(ann);
            CrossValidator   crossValidator    = new CrossValidator(ann, evaluator, crossValidationStrategyFactory);

            //Cross-validate the fitted model
            //crossValidator.KFold(10, 0.007);

            //Evaluate the fitted model on the test set
            evaluator.EvaluateNetwork(0.007);


            //--Optional--//

            //Serialize and save the fitted model

            //XML xml = new XML();
            //xml.SaveNetwork(dataFilePath, ann);

            //Extract model information

            //ann.SaveListOfErrors();

            //ann.GetApproximatedFunction(ann.SavePath + "/Function.txt");

            Console.ReadLine();
        }