protected override void BeginProcessing() { if (Context == null) { Context = Context.CurrentContext; } var stopWatch = new Stopwatch(); stopWatch.Start(); for (var epoch = 1; epoch <= MaxEpoch; ++epoch) { TrainingData.Reset(); var totalLoss = 0.0f; var dataSize = 0; while (!TrainingData.End()) { var batch = TrainingData.Next(); var data = batch.Data[0].AsInContext(Context); var label = batch.Label[0].AsInContext(Context); using (Autograd.Record()) { var output = Model.Call(data); var loss = (NDArray)LossFunction.Call(output, label); loss.Backward(); totalLoss += loss.Sum(); dataSize += data.Shape[0]; Trainer.Step(batch.Data[0].Shape[0]); } } if (epoch % DisplayStep == 0 || epoch == MaxEpoch) { totalLoss /= dataSize; ValidationData.Reset(); var totalValidLoss = 0.0f; var validDataSize = 0; if (MetricFunction != null) { MetricFunction.Reset(); } while (!ValidationData.End()) { var batch = ValidationData.Next(); var data = batch.Data[0].AsInContext(Context); var label = batch.Label[0].AsInContext(Context); var output = Model.Call(data); var validLoss = (NDArray)LossFunction.Call(output, label); totalValidLoss += validLoss.Sum(); validDataSize += data.Shape[0]; if (MetricFunction != null) { MetricFunction.Update(label, output); } } totalValidLoss /= validDataSize; string metricName = null; float metric = float.NaN; if (MetricFunction != null) { (metricName, metric) = MetricFunction.Get(); } var status = new TrainingStatus(epoch, (float)Math.Round(totalLoss, DisplayDigits), (float)Math.Round(totalValidLoss, DisplayDigits), metricName, (float)Math.Round(metric, DisplayDigits), stopWatch.Elapsed); WriteObject(status); } } }