Beispiel #1
0
        /// <summary>
        /// Convenience function to train a vanilla feed forward neural network
        /// </summary>
        /// <param name="trainingContext">The training context to use</param>
        /// <param name="lap">Linear algebra provider</param>
        /// <param name="trainingData">Training data provider</param>
        /// <param name="testData">Test data provider</param>
        /// <param name="layerDescriptor">The layer descriptor</param>
        /// <param name="hiddenLayerSize">The size of the single hidden layer</param>
        /// <param name="numEpochs">Number of epochs to train for</param>
        /// <returns>A trained feed forward model</returns>
        public static FeedForwardNetwork TrainNeuralNetwork(
            this ITrainingContext trainingContext,
            ILinearAlgebraProvider lap,
            ITrainingDataProvider trainingData,
            ITrainingDataProvider testData,
            LayerDescriptor layerDescriptor,
            int hiddenLayerSize,
            int numEpochs
            )
        {
            Console.WriteLine($"Training a {trainingData.InputSize}x{hiddenLayerSize}x{trainingData.OutputSize} neural network...");
            FeedForwardNetwork bestModel = null;

            using (var trainer = lap.NN.CreateBatchTrainer(layerDescriptor, trainingData.InputSize, hiddenLayerSize, trainingData.OutputSize)) {
                float bestScore = 0;
                trainingContext.EpochComplete += c => {
                    var testError = trainer.Execute(testData).Select(d => trainingContext.ErrorMetric.Compute(d.Output, d.ExpectedOutput)).Average();
                    var flag      = false;
                    if (testError > bestScore)
                    {
                        bestScore = testError;
                        bestModel = trainer.NetworkInfo;
                        flag      = true;
                    }
                    trainingContext.WriteScore(testError, trainingContext.ErrorMetric.DisplayAsPercentage, flag);
                };
                trainer.Train(trainingData, numEpochs, trainingContext);
            }
            return(bestModel);
        }
Beispiel #2
0
        public IReadOnlyList <IFeedForwardOutput> Execute(ITrainingDataProvider data)
        {
            IMatrix curr = null;
            var     ret  = new List <IFeedForwardOutput>();

            foreach (var miniBatch in _GetMiniBatches(data, false, DEFAULT_BATCH_SIZE))
            {
                var garbage = new List <IMatrix>();
                garbage.Add(curr = miniBatch.Input);
                garbage.Add(miniBatch.ExpectedOutput);

                // feed forward
                foreach (var layer in _layer)
                {
                    garbage.Add(curr = layer.FeedForward(curr, false));
                }

                // break the output into rows
                ret.AddRange(curr.AsIndexable().Rows.Zip(miniBatch.ExpectedOutput.AsIndexable().Rows, (a, e) => new FeedForwardOutput(a, e)));

                // clear memory
                garbage.ForEach(m => m.Dispose());
            }
            return(ret);
        }
Beispiel #3
0
        public IEnumerable <IIndexableVector[]> ExecuteToLayer(ITrainingDataProvider data, int layerDepth)
        {
            IMatrix curr = null;

            foreach (var miniBatch in _GetMiniBatches(data, false, DEFAULT_BATCH_SIZE))
            {
                var garbage = new List <IMatrix>();
                garbage.Add(curr = miniBatch.Input);
                garbage.Add(miniBatch.ExpectedOutput);

                // feed forward
                for (var i = 0; i < layerDepth; i++)
                {
                    var layer = _layer[i];
                    garbage.Add(curr = layer.FeedForward(curr, false));
                }

                var ret = curr.AsIndexable().Rows.ToList();

                // clear memory
                garbage.ForEach(m => m.Dispose());

                yield return(ret.ToArray());
            }
        }
Beispiel #4
0
 internal NetworkTrainer(ITrainingDataProvider trainingDataProvider, INetworkFactory networkFactory, INormalizeStrategy normalizeStrategy, IDynamicReorderingAlgorithm dynamicReorderingAlgorithm)
 {
     this.trainingDataProvider       = trainingDataProvider;
     this.networkFactory             = networkFactory;
     this.normalizeStrategy          = normalizeStrategy;
     this.dynamicReorderingAlgorithm = dynamicReorderingAlgorithm;
     networkPerformanceMeter         = new NetworkPerformanceMeter();
 }
Beispiel #5
0
        public void Train(ITrainingDataProvider trainingData, int numEpochs, ITrainingContext context)
        {
            IMatrix curr = null;
            var     additionalBackpropagation = trainingData as ICanBackpropagate;

            for (int i = 0; i < numEpochs && context.ShouldContinue; i++)
            {
                context.StartEpoch(trainingData.Count);
                trainingData.StartEpoch();
                var batchErrorList = new List <double>();

                // iterate over each mini batch
                foreach (var miniBatch in _GetMiniBatches(trainingData, _stochastic, context.MiniBatchSize))
                {
                    var garbage = new List <IMatrix>();
                    garbage.Add(curr = miniBatch.Input);
                    _lap.PushLayer();

                    // set up the layer stack
                    var layerStack = new Stack <ICanBackpropagate>();
                    if (additionalBackpropagation != null)
                    {
                        layerStack.Push(additionalBackpropagation);
                    }

                    // feed forward
                    foreach (var layer in _layer)
                    {
                        garbage.Add(curr = layer.FeedForward(curr, true));
                        layerStack.Push(layer);
                    }

                    // calculate the error against the training examples
                    using (var expectedOutput = miniBatch.ExpectedOutput) {
                        garbage.Add(curr = context.ErrorMetric.CalculateDelta(curr, expectedOutput));

                        // calculate the training error for this mini batch
                        if (_calculateTrainingError)
                        {
                            batchErrorList.Add(curr.AsIndexable().Values.Select(v => Math.Pow(v, 2)).Average() / 2);
                        }

                        // backpropagate the error
                        while (layerStack.Any())
                        {
                            var currentLayer = layerStack.Pop();
                            garbage.Add(curr = currentLayer.Backpropagate(curr, context, layerStack.Any()));
                        }
                    }

                    // clear memory
                    context.EndBatch();
                    garbage.ForEach(m => m?.Dispose());
                    _lap.PopLayer();
                }
                context.EndEpoch(_calculateTrainingError ? batchErrorList.Average() : 0f);
            }
        }
 public IFeedForwardTrainingManager CreateFeedForwardManager(
     INeuralNetworkTrainer trainer,
     string dataFile,
     ITrainingDataProvider testData,
     int?autoAdjustOnNoChangeCount = null
     )
 {
     return(new FeedForwardManager(trainer, dataFile, testData, autoAdjustOnNoChangeCount));
 }
Beispiel #7
0
        IEnumerable <IMiniBatch> _GetMiniBatches(ITrainingDataProvider data, bool shuffle, int batchSize)
        {
            // shuffle the training data order
            var range          = Enumerable.Range(0, data.Count);
            var iterationOrder = (shuffle ? range.Shuffle() : range).ToList();

            for (var j = 0; j < data.Count; j += batchSize)
            {
                var maxRows = Math.Min(iterationOrder.Count, batchSize + j) - j;
                var rows    = iterationOrder.Skip(j).Take(maxRows).ToList();
                yield return(data.GetTrainingData(rows));
            }
        }
Beispiel #8
0
        public void Train(ITrainingDataProvider trainingData, int numEpochs, ITrainingContext context)
        {
            _bestScore = _GetScore(context);
            Console.WriteLine(context.ErrorMetric.DisplayAsPercentage ? "Initial score: {0:P}" : "Initial score: {0}", _bestScore);

            _bestOutput            = null;
            context.EpochComplete += OnEpochComplete;
            _trainer.Train(trainingData, numEpochs, context);
            context.EpochComplete -= OnEpochComplete;

            // ensure best values are current
            ApplyBestParams();
        }
Beispiel #9
0
        public FeedForwardManager(INeuralNetworkTrainer trainer, string dataFile, ITrainingDataProvider testData, int?autoAdjustOnNoChangeCount = 5, int reportCadence = 1)
        {
            _reportCadence             = reportCadence;
            _trainer                   = trainer;
            _dataFile                  = dataFile;
            _testData                  = testData;
            _autoAdjustOnNoChangeCount = autoAdjustOnNoChangeCount;

            if (File.Exists(_dataFile))
            {
                using (var stream = new FileStream(_dataFile, FileMode.Open, FileAccess.Read))
                    _trainer.NetworkInfo = Serializer.Deserialize <FeedForwardNetwork>(stream);
            }
        }
Beispiel #10
0
 public BaggingTrainingDataProvider(ITrainingDataProvider dataProvider, Func <float[]> weightFunc)
 {
     _dataProvider = dataProvider;
     _weightFunc   = weightFunc;
 }
 public TrainingDataValidator()
 {
     _dataProvider  = new TrainingDataProvider();
     _errorMessages = new List <string>();
 }
 public TroopDistributionCalculator()
 {
     _dataProvider = new TrainingDataProvider();
 }
Beispiel #13
0
 public float CalculateCost(ITrainingDataProvider data, ITrainingContext trainingContext)
 {
     return(Execute(data).Select(r => trainingContext.ErrorMetric.Compute(r.Output, r.ExpectedOutput)).Average());
 }