예제 #1
0
 public static void SaveAll()
 {
     if (BatchStart != null)
     {
         BatchStart.Invoke(All, EventArgs.Empty);
     }
     All.ForEach(m => m.Save());
     if (BatchEnd != null)
     {
         BatchEnd.Invoke(All, EventArgs.Empty);
     }
 }
예제 #2
0
        private double TrainBatches(Matrix trainingData, Matrix labels, int batchSize, int epoch)
        {
            int           currentIndex = 0;
            int           currentBatch = 1;
            List <double> batchLosses  = new List <double>();;

            //Loop untill the data is exhauted for every batch selected
            while (trainingData.CanSliceRows(currentIndex, batchSize))
            {
                //Get the batch data based on the specified batch size
                var xtrain = trainingData.SliceRows(currentIndex, batchSize);
                var ytrain = labels.SliceRows(currentIndex, batchSize);

                //Run forward for all the layers to predict the value for the training set
                var ypred = Forward(xtrain);

                //Find the loss/cost value for the prediction wrt expected result
                var costVal = Cost.Forward(ypred, ytrain);
                batchLosses.Add(costVal.Data[0]);

                //Get the gradient of the cost function which is then passed to the layers during back-propagation
                var grad = Cost.Backward(ypred, ytrain);
                //Run back-propagation accross all the layers
                Backward(grad);
                //Now time to update the neural network weights using the specified optimizer function
                foreach (var layer in Layers)
                {
                    Optimiser.Update(layer);
                }
                currentIndex = currentIndex + batchSize;
                double batchLossAvg = Math.Round(costVal.Data[0], 3);

                BatchEndEventArgs eventArgs1 = new BatchEndEventArgs(epoch, currentBatch, batchLossAvg);
                BatchEnd?.Invoke(epoch, eventArgs1);
                currentBatch += 1;
            }

            return(Math.Round(batchLosses.Average(), 3));
        }
예제 #3
0
        /// <summary>
        /// Train the model with training dataset, for certain number of iterations and using batch size
        /// </summary>
        /// <param name="x"></param>
        /// <param name="y"></param>
        /// <param name="numIterations"></param>
        /// <param name="batchSize"></param>
        public void Train(NDArray x, NDArray y, int numIterations, int batchSize)
        {
            //Initialise bacch loss and metric list for temporary holding of result
            List <double> batchLoss    = new List <double>();
            List <double> batchMetrics = new List <double>();

            //Loop through till the end of specified iterations
            for (int i = 1; i <= numIterations; i++)
            {
                //Initialize local variables
                int currentIndex = 0;
                batchLoss.Clear();
                batchMetrics.Clear();

                //Loop untill the data is exhauted for every batch selected
                while (x.Next(currentIndex, batchSize))
                {
                    //Get the batch data based on the specified batch size
                    var xtrain = x.Slice(currentIndex, batchSize);
                    var ytrain = y.Slice(currentIndex, batchSize);

                    //Run forward for all the layers to predict the value for the training set
                    var ypred = Forward(xtrain);

                    //Find the loss/cost value for the prediction wrt expected result
                    var costVal = Cost.Forward(ypred, ytrain);
                    batchLoss.AddRange(costVal.Data);

                    //Find the metric value for the prediction wrt expected result
                    if (Metric != null)
                    {
                        var metric = Metric.Calculate(ypred, ytrain);
                        batchMetrics.AddRange(metric.Data);
                    }

                    //Get the gradient of the cost function which is the passed to the layers during back-propagation
                    var grad = Cost.Backward(ypred, ytrain);

                    //Run back-propagation accross all the layers
                    Backward(grad);

                    //Now time to update the neural network weights using the specified optimizer function
                    foreach (var layer in Layers)
                    {
                        Optimizer.Update(i, layer);
                    }

                    currentIndex = currentIndex + batchSize;;
                }

                //Collect the result and fire the event
                double batchLossAvg = Math.Round(batchLoss.Average(), 2);

                double batchMetricAvg = Metric != null?Math.Round(batchMetrics.Average(), 2) : 0;

                TrainingLoss.Add(batchLossAvg);

                if (batchMetrics.Count > 0)
                {
                    TrainingMetrics.Add(batchMetricAvg);
                }

                EpochEndEventArgs eventArgs = new EpochEndEventArgs(i, batchLossAvg, batchMetricAvg);
                BatchEnd?.Invoke(i, eventArgs);
            }
        }
 /// <summary>
 /// Called when [batch end].
 /// </summary>
 /// <param name="epoch">The epoch.</param>
 /// <param name="batch">The batch.</param>
 /// <param name="loss">The loss.</param>
 /// <param name="metric">The metric.</param>
 protected void OnBatchEnd(int epoch, int batch, float loss, float metric)
 {
     BatchEnd?.Invoke(this, new BatchEndEventArgs(epoch, batch, loss, metric));
 }