/// <summary> /// Called when [epoch end]. /// </summary> /// <param name="epoch">The epoch.</param> /// <param name="samplesSeenPerSec">The samples seen per sec.</param> /// <param name="loss">The loss.</param> /// <param name="validationLoss">The validation loss.</param> /// <param name="metric">The metric.</param> /// <param name="validationMetric">The validation metric.</param> /// <param name="duration">The duration.</param> protected void OnEpochEnd( int epoch, long samplesSeenPerSec, double loss, double validationLoss, double metric, double validationMetric, long duration) { EpochEnd?.Invoke(this, new EpochEndEventArgs(epoch, samplesSeenPerSec, loss, validationLoss, metric, validationMetric, duration)); }
public void Train(Matrix trainingData, Matrix labels, int epochs, int batchSize) { List <double> batchLoss = new List <double>(); for (int epoch = 1; epoch <= epochs; epoch++) { var epochLossAvg = TrainBatches(trainingData, labels, batchSize, epoch); TrainingLoss.Add(epochLossAvg); EpochEndEventArgs eventArgs = new EpochEndEventArgs(epoch, epochLossAvg); EpochEnd?.Invoke(epoch, eventArgs); } }
/// <summary> /// Train the model with training dataset, for certain number of iterations and using batch size /// </summary> /// <param name="x"></param> /// <param name="y"></param> /// <param name="numIterations"></param> /// <param name="batchSize"></param> public void Train(NDArray x, NDArray y, int numIterations, int batchSize) { //Initialise bacch loss and metric list for temporary holding of result List <float> batchLoss = new List <float>(); List <float> batchMetrics = new List <float>(); Stopwatch sw = new Stopwatch(); //Loop through till the end of specified iterations for (int i = 1; i <= numIterations; i++) { sw.Start(); //Initialize local variables int currentIndex = 0; batchLoss.Clear(); batchMetrics.Clear(); //Loop untill the data is exhauted for every batch selected while (x.Next(currentIndex, batchSize)) { //Get the batch data based on the specified batch size var xtrain = x.Slice(currentIndex, batchSize); var ytrain = y.Slice(currentIndex, batchSize); //Run forward for all the layers to predict the value for the training set var ypred = Forward(xtrain); //Find the loss/cost value for the prediction wrt expected result var costVal = Cost.Forward(ypred, ytrain); batchLoss.AddRange(costVal.Data); //Find the metric value for the prediction wrt expected result if (Metric != null) { var metric = Metric.Calculate(ypred, ytrain); batchMetrics.AddRange(metric.Data); } //Get the gradient of the cost function which is the passed to the layers during back-propagation var grad = Cost.Backward(ypred, ytrain); //Run back-propagation accross all the layers Backward(grad); //Now time to update the neural network weights using the specified optimizer function foreach (var layer in Layers) { Optimizer.Update(i, layer); } currentIndex = currentIndex + batchSize;; } sw.Stop(); //Collect the result and fire the event float batchLossAvg = (float)Math.Round(batchLoss.Average(), 2); float batchMetricAvg = Metric != null ? (float)Math.Round(batchMetrics.Average(), 2) : 0; TrainingLoss.Add(batchLossAvg); if (batchMetrics.Count > 0) { TrainingMetrics.Add(batchMetricAvg); } EpochEndEventArgs eventArgs = new EpochEndEventArgs(i, batchLossAvg, batchMetricAvg, sw.ElapsedMilliseconds); EpochEnd?.Invoke(i, eventArgs); sw.Reset(); } }