public LogialXORBenchmarks()
        {
            _trainingData = new List <TrainingElement>
            {
                new TrainingElement
                {
                    Inputs          = new double[] { 0D, 0D },
                    ExpectedOutputs = new double[] { 0D }
                },
                new TrainingElement
                {
                    Inputs          = new double[] { 1D, 0D },
                    ExpectedOutputs = new double[] { 1D }
                },
                new TrainingElement
                {
                    Inputs          = new double[] { 0D, 1D },
                    ExpectedOutputs = new double[] { 1D }
                },
                new TrainingElement
                {
                    Inputs          = new double[] { 1D, 1d },
                    ExpectedOutputs = new double[] { 0D }
                }
            };

            var layer = new ILayer[] { new FullyConnectedLayer(new Sigmoid(), 3, 2), new FullyConnectedLayer(new Sigmoid(), 1, 3) };

            Randomiser.Randomise(layer, new Random(1));

            _trainer = new StochasticGradientDescent(new CrossEntropy(), layer, 1000, 4, 5D, 0D);
        }
 public void Initialize()
 {
     _layers = new ILayer[] { new FullyConnectedLayer(new Sigmoid(), 3, 2), new FullyConnectedLayer(new Sigmoid(), 1, 3) };
     Randomiser.Randomise(_layers, new Random(5));
     _trainingData = new List <TrainingElement>
     {
         new TrainingElement
         {
             Inputs          = new double[] { 0D, 0D },
             ExpectedOutputs = new double[] { 0D }
         },
         new TrainingElement
         {
             Inputs          = new double[] { 1D, 0D },
             ExpectedOutputs = new double[] { 1D }
         },
         new TrainingElement
         {
             Inputs          = new double[] { 0D, 1D },
             ExpectedOutputs = new double[] { 1D }
         },
         new TrainingElement
         {
             Inputs          = new double[] { 1D, 1d },
             ExpectedOutputs = new double[] { 0D }
         }
     };
 }
Beispiel #3
0
        public void Run()
        {
            Console.WriteLine("Building random neural network");
            var layers = new ILayer[] { new FullyConnectedLayer(new Sigmoid(), 30, 784), new FullyConnectedLayer(new Sigmoid(), 10, 30) };

            Randomiser.Randomise(layers);

            Console.WriteLine("Evaluating untrained neural network");
            var untrainedAccuracy = Statistics.GetAccuracyByMax(_validationData, new NeuralNetwork(layers, 784));

            Console.WriteLine($"Untrained network accuracy: {untrainedAccuracy.ToString("N2")}%");

            var stochasticGradientDescent = new StochasticGradientDescent(new CrossEntropy(), layers, 1, 20, 1D, 0D);

            var maxAccuracy = 0D;

            for (int i = 0; i < _epochs; i++)
            {
                Console.WriteLine($"Epoch {i + 1} started");
                var trainingLength   = Statistics.GetTrainingLength(stochasticGradientDescent, _trainingData);
                var trainingAccuracy = Statistics.GetAccuracyByMax(_validationData, new NeuralNetwork(layers, 784));
                Console.WriteLine($"Results after epoch {i + 1}:");
                Console.WriteLine($"Training length in miliseconds: {trainingLength}, Accuracy: {trainingAccuracy.ToString("N2")}%");

                if (maxAccuracy < trainingAccuracy)
                {
                    maxAccuracy = trainingAccuracy;
                }
            }

            Console.WriteLine($"End of training. Best accuracy {maxAccuracy.ToString("N2")}%");
        }
        public DigitsRecognitionBenchmarks()
        {
            _trainingData = TrainingDataLoader.Load("NeuralNetworks.ConsoleSamples.Resources.digits-image-validation-set.json");

            var layers = new ILayer[] { new FullyConnectedLayer(new Sigmoid(), 30, 784), new FullyConnectedLayer(new Sigmoid(), 10, 30) };

            Randomiser.Randomise(layers, new Random(1));
            _trainer = new StochasticGradientDescent(new Quadratic(), layers, 3, 10, 1D, 0);
        }
        private void ExecuteRandomise()
        {
            var number = RandomiseView.ShowRandomise();

            if (number == -1)
            {
                return;
            }
            var randomiser = new Randomiser(_accidentDAO, _carDAO, _personDAO);

            randomiser.Randomise(number);
        }
Beispiel #6
0
        private void ExecuteRandomise()
        {
            var number = RandomiseView.ShowRandomise();

            if (number == -1)
            {
                return;
            }
            var randomiser = new Randomiser(_userDao, _chatDao,
                                            _userChatDao, _messageDao);

            randomiser.Randomise(number);
        }
Beispiel #7
0
        public void Run()
        {
            var layer = new ILayer[] { new FullyConnectedLayer(new Sigmoid(), 3, 2), new FullyConnectedLayer(new Sigmoid(), 1, 3) };

            Randomiser.Randomise(layer, new Random(5));

            Console.WriteLine("Evaluationg untrained neural network");
            DisplayEvaluation(layer);

            var stochasticGradientDescent = new StochasticGradientDescent(new CrossEntropy(), layer, 3000, 4, 5D, 0D);
            var trainingData = new List <TrainingElement>
            {
                new TrainingElement
                {
                    Inputs          = new double[] { 0D, 0D },
                    ExpectedOutputs = new double[] { 0D }
                },
                new TrainingElement
                {
                    Inputs          = new double[] { 1D, 0D },
                    ExpectedOutputs = new double[] { 1D }
                },
                new TrainingElement
                {
                    Inputs          = new double[] { 0D, 1D },
                    ExpectedOutputs = new double[] { 1D }
                },
                new TrainingElement
                {
                    Inputs          = new double[] { 1D, 1d },
                    ExpectedOutputs = new double[] { 0D }
                }
            };

            stochasticGradientDescent.Train(trainingData);

            Console.WriteLine("Evaluationg trained neural network");
            DisplayEvaluation(layer);
        }