예제 #1
0
        public void Evaluate(NetworkConfiguration configuration)
        {
            RecurrentNetwork        network = new RecurrentNetwork(configuration, 144, 20, 20, 20);
            RecurrentNetworkTrainer trainer = new RecurrentNetworkTrainer(network);

            trainer.LearningCoefficient = configuration.LearningCoefficient;
            trainer.NoDetectionValue    = configuration.NoDetectionValue;
            trainer.DetectionValue      = configuration.DetectionValue;
            trainer.Momentum            = configuration.Momentum;

            string       path   = Path.Combine(Paths.LogsDirectory, configuration.Name + ".csv");
            StreamWriter writer = new StreamWriter(path);

            configuration.WriteConfiguration(writer);

            writer.WriteLine("Time,Epoch,Mean Abs. Weight,Train F-Score,Train Recall,Train Precision,Validation F-Score,Validation Recall,Validation Precision,Test F-Score,Test Recall, Test Precision");

            NetworkScorer trainingScorer   = new NetworkScorer();
            NetworkScorer validationScorer = new NetworkScorer();
            NetworkScorer testingScorer    = new NetworkScorer();

            try
            {
                for (int i = 1; i < configuration.Epochs; i++)
                {
                    foreach (TrainingSample sample in dataset.TrainingSamples)
                    {
                        trainer.Train(sample, trainingScorer);
                    }
                    foreach (TrainingSample sample in dataset.ValidationSamples)
                    {
                        trainer.FeedForward(sample, validationScorer);
                    }
                    foreach (TrainingSample sample in dataset.TestingSamples)
                    {
                        trainer.FeedForward(sample, testingScorer);
                    }
                    writer.WriteLine("{0},{1},{2},{3},{4},{5},{6},{7},{8},{9},{10},{11}", DateTime.Now, i, network.MeanAbsoluteWeight,
                                     trainingScorer.FScore, trainingScorer.Recall, trainingScorer.Precision,
                                     validationScorer.FScore, validationScorer.Recall, validationScorer.Precision,
                                     testingScorer.FScore, testingScorer.Recall, testingScorer.Precision
                                     );
                    writer.Flush();

                    if (LogToConsole)
                    {
                        Console.WriteLine("{0:D3}    F-Score:{1:0.0000}   Recall:{2:0.0000}   Precision:{3:0.0000}   Mean Abs. Weight:{4:0.0000}", i, trainingScorer.FScore, trainingScorer.Recall, trainingScorer.Precision, network.MeanAbsoluteWeight);
                    }
                    trainingScorer.ResetScores();
                    validationScorer.ResetScores();
                    testingScorer.ResetScores();
                }
            }
            catch (Exception ex)
            {
                writer.WriteLine("{0},Aborted {1} at {2}", DateTime.Now, ex.Message, ex.StackTrace);
            }
            //Console.ReadLine();
        }
예제 #2
0
        public void CreationTest()
        {
            var(weights, feedbacks) = GetTestNetworkStructure();
            var network = new RecurrentNetwork(weights, feedbacks);

            Assert.That.Value(network.LayersCount).IsEqual(weights.Length);
            Assert.That.Value(network.InputsCount).IsEqual(weights[0].GetLength(1));
            Assert.That.Value(network.OutputsCount).IsEqual(weights[^ 1].GetLength(0));
예제 #3
0
        public void RecurrentAddition()
        {
            var trainingSet = BinaryIntegers.Addition(10, false).Select(l => l.ToArray()).ToList();

            const int HIDDEN_SIZE = 16, NUM_EPOCHS = 100, BATCH_SIZE = 32;
            var       errorMetric = ErrorMetricType.BinaryClassification.Create();

            var layerTemplate = new LayerDescriptor(0.1f)
            {
                Activation           = ActivationType.LeakyRelu,
                WeightInitialisation = WeightInitialisationType.Gaussian,
                DecayRate            = 0.99f
            };

            var recurrentTemplate = layerTemplate.Clone();

            recurrentTemplate.WeightInitialisation = WeightInitialisationType.Gaussian;

            var trainingDataProvider = _lap.NN.CreateSequentialTrainingDataProvider(trainingSet);
            var layers = new INeuralNetworkRecurrentLayer[] {
                _lap.NN.CreateSimpleRecurrentLayer(trainingDataProvider.InputSize, HIDDEN_SIZE, recurrentTemplate),
                _lap.NN.CreateFeedForwardRecurrentLayer(HIDDEN_SIZE, trainingDataProvider.OutputSize, layerTemplate)
            };
            RecurrentNetwork networkData = null;

            using (var trainer = _lap.NN.CreateRecurrentBatchTrainer(layers)) {
                var memory          = Enumerable.Range(0, HIDDEN_SIZE).Select(i => 0f).ToArray();
                var trainingContext = _lap.NN.CreateTrainingContext(errorMetric, 0.1f, BATCH_SIZE);
                trainingContext.RecurrentEpochComplete += (tc, rtc) => {
                    Debug.WriteLine(tc.LastTrainingError);
                };
                trainer.Train(trainingDataProvider, memory, NUM_EPOCHS, _lap.NN.CreateRecurrentTrainingContext(trainingContext));
                networkData        = trainer.NetworkInfo;
                networkData.Memory = new FloatArray {
                    Data = memory
                };
            }

            var network = _lap.NN.CreateRecurrent(networkData);

            foreach (var sequence in trainingSet)
            {
                var result = network.Execute(sequence.Select(d => d.Input).ToList());
            }
        }
예제 #4
0
        public void Train(ISequentialTrainingDataProvider trainingData, int numEpochs, ITrainingContext context, IRecurrentTrainingContext recurrentContext = null)
        {
            if (recurrentContext == null)
            {
                recurrentContext = new RecurrentContext(_trainer.LinearAlgebraProvider, context);
            }

            _bestScore = _GetScore(_testData, _trainer, _memory, recurrentContext);
            Console.WriteLine(context.ErrorMetric.DisplayAsPercentage ? "Initial score: {0:P}" : "Initial score: {0}", _bestScore);

            _bestOutput = null;
            recurrentContext.TrainingContext.RecurrentEpochComplete += OnEpochComplete;
            _memory = _trainer.Train(trainingData, _memory, numEpochs, recurrentContext);
            recurrentContext.TrainingContext.RecurrentEpochComplete -= OnEpochComplete;

            // ensure best values are current
            ApplyBestParams();
        }
예제 #5
0
        static void Main(string[] args)
        {
            Console.WriteLine("Setting up the test.");
            RecurrentNetwork net = new RecurrentNetwork(
                new List <LayerDesc>()
            {
                new LayerDesc(2), new LayerDesc(3, LayerType.recurrent), new LayerDesc(1)
            });

            net.TrainingUpdateEvent += OnTrainingUpdate;

            int iterations = 5000;

            List <List <double> > sampleIn = new List <List <double> >()
            {
                new List <double>()
                {
                    1, 0
                },
                new List <double>()
                {
                    0, 1
                }
            };

            List <List <double> > sampleOut = new List <List <double> >()
            {
                new List <double>()
                {
                    1
                },
                new List <double>()
                {
                    0
                }
            };

            Console.WriteLine("Setup complete!");
            Console.WriteLine("Running tests...");

            net.Train(iterations, sampleIn, sampleOut, Reset: true);

            Console.ReadKey();
        }
예제 #6
0
        protected bool _CalculateTestScore(ITrainingContext context, float[] memory, ISequentialTrainingDataProvider data, INeuralNetworkRecurrentBatchTrainer network, IRecurrentTrainingContext recurrentContext, ref double bestScore, ref RecurrentNetwork output)
        {
            bool flag        = false;
            var  score       = _GetScore(data, network, memory, recurrentContext);
            var  errorMetric = recurrentContext.TrainingContext.ErrorMetric;

            if ((errorMetric.HigherIsBetter && score > bestScore) || (!errorMetric.HigherIsBetter && score < bestScore))
            {
                bestScore     = score;
                output        = network.NetworkInfo;
                output.Memory = new FloatArray {
                    Data = memory
                };
                flag = true;
            }
            context.WriteScore(score, errorMetric.DisplayAsPercentage, flag);

            if (flag)
            {
                _noChange = 0;
            }
            else
            {
                ++_noChange;
            }

            if (_autoAdjustOnNoChangeCount.HasValue && _noChange >= _autoAdjustOnNoChangeCount.Value)
            {
                context.ReduceTrainingRate();
                Console.WriteLine("Reducing training rate to " + context.TrainingRate);
                ApplyBestParams();
                _noChange = 0;
            }
            return(flag);
        }
예제 #7
0
        public static void IntegerAddition()
        {
            // generate 1000 random integer additions
            var dataSet = BinaryIntegers.Addition(1000, false)
                          .Select(l => l.ToArray())
                          .ToList()
            ;

            // split the numbers into training and test sets
            int split        = Convert.ToInt32(dataSet.Count * 0.8);
            var trainingData = dataSet.Take(split).ToList();
            var testData     = dataSet.Skip(split).ToList();

            // neural network hyper parameters
            const int   HIDDEN_SIZE = 32, NUM_EPOCHS = 25, BATCH_SIZE = 16;
            const float TRAINING_RATE = 0.001f;
            var         errorMetric   = ErrorMetricType.BinaryClassification.Create();
            var         layerTemplate = new LayerDescriptor(0.3f)
            {
                Activation           = ActivationType.Relu,
                WeightInitialisation = WeightInitialisationType.Xavier,
                WeightUpdate         = WeightUpdateType.RMSprop
            };
            var recurrentTemplate = layerTemplate.Clone();

            recurrentTemplate.WeightInitialisation = WeightInitialisationType.Gaussian;

            using (var lap = Provider.CreateLinearAlgebra()) {
                // create training data providers
                var trainingDataProvider = lap.NN.CreateSequentialTrainingDataProvider(trainingData);
                var testDataProvider     = lap.NN.CreateSequentialTrainingDataProvider(testData);
                var layers = new INeuralNetworkRecurrentLayer[] {
                    lap.NN.CreateSimpleRecurrentLayer(trainingDataProvider.InputSize, HIDDEN_SIZE, recurrentTemplate),
                    lap.NN.CreateFeedForwardRecurrentLayer(HIDDEN_SIZE, trainingDataProvider.OutputSize, layerTemplate)
                };

                // train the network
                RecurrentNetwork networkData = null;
                using (var trainer = lap.NN.CreateRecurrentBatchTrainer(layers)) {
                    var memory          = Enumerable.Range(0, HIDDEN_SIZE).Select(i => 0f).ToArray();
                    var trainingContext = lap.NN.CreateTrainingContext(errorMetric, TRAINING_RATE, BATCH_SIZE);
                    trainingContext.RecurrentEpochComplete += (tc, rtc) => {
                        var testError = trainer.Execute(testDataProvider, memory, rtc).SelectMany(s => s.Select(d => errorMetric.Compute(d.Output, d.ExpectedOutput))).Average();
                        Console.WriteLine($"Epoch {tc.CurrentEpoch} - score: {testError:P}");
                    };
                    trainer.Train(trainingDataProvider, memory, NUM_EPOCHS, lap.NN.CreateRecurrentTrainingContext(trainingContext));
                    networkData        = trainer.NetworkInfo;
                    networkData.Memory = new FloatArray {
                        Data = memory
                    };
                }

                // evaluate the network on some freshly generated data
                var network = lap.NN.CreateRecurrent(networkData);
                foreach (var sequence in BinaryIntegers.Addition(8, true))
                {
                    var result = network.Execute(sequence.Select(d => d.Input).ToList());
                    Console.Write("First:     ");
                    foreach (var item in sequence)
                    {
                        _WriteBinary(item.Input[0]);
                    }
                    Console.WriteLine();

                    Console.Write("Second:    ");
                    foreach (var item in sequence)
                    {
                        _WriteBinary(item.Input[1]);
                    }
                    Console.WriteLine();
                    Console.WriteLine("           --------------------------------");

                    Console.Write("Expected:  ");
                    foreach (var item in sequence)
                    {
                        _WriteBinary(item.Output[0]);
                    }
                    Console.WriteLine();

                    Console.Write("Predicted: ");
                    foreach (var item in result)
                    {
                        _WriteBinary(item.Output[0]);
                    }
                    Console.WriteLine();
                    Console.WriteLine();
                }
            }
        }
예제 #8
0
        public IRecurrentExecution CreateRecurrent(RecurrentNetwork network)
        {
            var layer = network.Layer.Select(l => _GetRecurrentExecution(l)).ToList();

            return(new RecurrentExecution(_lap, layer, _lap.Create(network.Memory.Data)));
        }