public void PrintTrainingProgress(Trainer trainer, int minibatchIdx) { if (trainer.PreviousMinibatchSampleCount() != 0) { float trainLossValue = (float)trainer.PreviousMinibatchLossAverage(); float evaluationValue = (float)trainer.PreviousMinibatchEvaluationAverage(); Debug.WriteLine($"Minibatch: {minibatchIdx} CrossEntropyLoss = {trainLossValue}, EvaluationCriterion = {evaluationValue}"); } }
public void PrintTrainingProgress(Trainer trainer, int minibatchIdx) { if (trainer.PreviousMinibatchSampleCount() != 0) { float trainLossValue = (float)trainer.PreviousMinibatchLossAverage(); float evaluationValue = (float)trainer.PreviousMinibatchEvaluationAverage(); Debug.WriteLine($"{minibatchIdx};{trainLossValue};{evaluationValue};"); } }
public static void PrintTrainingProgress(Trainer trainer, int minibatchIdx, int outputFrequencyInMinibatches) { if ((minibatchIdx % outputFrequencyInMinibatches) == 0 && trainer.PreviousMinibatchSampleCount() != 0) { float trainLossValue = (float)trainer.PreviousMinibatchLossAverage(); float evaluationValue = (float)trainer.PreviousMinibatchEvaluationAverage(); Console.WriteLine($"Minibatch: {minibatchIdx} CrossEntropyLoss = {trainLossValue}, EvaluationCriterion = {evaluationValue}"); } }
private void RunTraining(Trainer trainer, GenericMinibatchSequenceSource minibatchSource, int numMinibatchesToTrain, DeviceDescriptor device) { double aggregate_metric = 0; for (int minibatchCount = 0; minibatchCount < numMinibatchesToTrain; minibatchCount++) { IDictionary <Variable, MinibatchData> data = minibatchSource.GetNextRandomMinibatch(); trainer.TrainMinibatch(data, device); double samples = trainer.PreviousMinibatchSampleCount(); double avg = trainer.PreviousMinibatchEvaluationAverage(); aggregate_metric += avg * samples; double nbSampleSeen = trainer.TotalNumberOfSamplesSeen(); double train_error = aggregate_metric / nbSampleSeen; Debug.WriteLine($"{minibatchCount} Average training error: {train_error:p2}"); } }