public StocasticDualCoordianteAscent(double[][] inputs, double[] labels) { IDataView data_in = context.Data.LoadFromEnumerable <_data>(GetSampleData(inputs, labels)); DataOperationsCatalog.TrainTestData partitions = context.Data.TrainTestSplit(data_in); Microsoft.ML.Transforms.ColumnConcatenatingEstimator pipeline = context.Transforms.Concatenate("Features", nameof(_data.Features)); pipeline.AppendCacheCheckpoint(context); pipeline.Append(context.MulticlassClassification.Trainers.SdcaNonCalibrated()); ColumnConcatenatingTransformer model = pipeline.Fit(partitions.TrainSet); //var engine = ModelOperationsCatalog.CreatePredictionEngine<Digit, DigitPrediction>(model); Console.WriteLine("Evaluating model...."); IDataView predictions = model.Transform(partitions.TestSet); // evaluate the predictions MulticlassClassificationMetrics metrics = context.MulticlassClassification.Evaluate(predictions); // show evaluation metrics Console.WriteLine($"Evaluation metrics"); Console.WriteLine($" MicroAccuracy: {metrics.MicroAccuracy:0.###}"); Console.WriteLine($" MacroAccuracy: {metrics.MacroAccuracy:0.###}"); Console.WriteLine($" LogLoss: {metrics.LogLoss:#.###}"); Console.WriteLine($" LogLossReduction: {metrics.LogLossReduction:#.###}"); Console.WriteLine(); }
/// <summary> /// Check that a <see cref="MulticlassClassificationMetrics"/> object is valid. /// </summary> /// <param name="metrics">The metrics object.</param> public static void AssertMetrics(MulticlassClassificationMetrics metrics) { Assert.InRange(metrics.MacroAccuracy, 0, 1); Assert.InRange(metrics.MicroAccuracy, 0, 1); Assert.True(metrics.LogLoss >= 0); Assert.InRange(metrics.TopKAccuracy, 0, 1); }
public static Bitmap GetNormalizedConfusionMatrix(MulticlassClassificationMetrics metrics) { double[,] matrix = new double[metrics.ConfusionMatrix.NumberOfClasses, metrics.ConfusionMatrix.NumberOfClasses]; double[] matrixCount = new double[metrics.ConfusionMatrix.NumberOfClasses]; for (int c = 0; c < metrics.ConfusionMatrix.NumberOfClasses; c++) { for (int p = 0; p < metrics.ConfusionMatrix.NumberOfClasses; p++) { matrix[c, p] = metrics.ConfusionMatrix.GetCountForClassPair(p, c); matrixCount[p] += matrix[c, p]; } } for (int c = 0; c < metrics.ConfusionMatrix.NumberOfClasses; c++) { for (int p = 0; p < metrics.ConfusionMatrix.NumberOfClasses; p++) { matrix[c, p] = metrics.ConfusionMatrix.GetCountForClassPair(p, c) / Math.Max(1, matrixCount[p]); } } return(DrawConfusionMatrix(matrix)); }
///<summary> ///Produce a set of metrics of a Model by running a set of Test data against it. ///</summary> public MulticlassClassificationMetrics Evaluate() { var testData = Model.Transform(SplitDataView.TestSet); MulticlassClassificationMetrics mcMetrics = objContext.MulticlassClassification.Evaluate(testData); return(mcMetrics); }
public static void MulticlassClassificationMetrics(string algorithm, MulticlassClassificationMetrics metrics) { var separator = "+=======================================================+"; Console.WriteLine(separator); Console.WriteLine($"|=========== {algorithm} ===========|"); Console.WriteLine(separator); Console.WriteLine($"| LogLoss => {metrics.LogLoss}"); Console.WriteLine($"| LogLossReduction => {metrics.LogLossReduction}"); Console.WriteLine($"| MacroAccuracy => {metrics.MacroAccuracy}"); Console.WriteLine($"| MicroAccuracy => {metrics.MicroAccuracy}"); Console.WriteLine($"| TopKAccuracy => {metrics.TopKPredictionCount}"); Console.WriteLine($"| PerClassLogLoss"); for (int i = 0; i < metrics.PerClassLogLoss.Count; i++) { Console.WriteLine($"| - Class {i} => {metrics.PerClassLogLoss[i]}"); } Console.WriteLine($"| ConfusionMatrix"); Console.WriteLine($"| - NumberOfClasses => {metrics.ConfusionMatrix.NumberOfClasses}"); Console.WriteLine($"| - PerClassPrecision => {string.Join('|', metrics.ConfusionMatrix.PerClassPrecision)}"); Console.WriteLine($"| - PerClassRecall => {string.Join('|', metrics.ConfusionMatrix.PerClassRecall)}"); Console.WriteLine($"| - Counts"); PrintMatrix(metrics.ConfusionMatrix.Counts); Console.WriteLine(separator); Console.WriteLine(""); Console.WriteLine(""); }
/// <summary> /// Pretty-print MulticlassClassificationMetrics objects. /// </summary> /// <param name="metrics"><see cref="MulticlassClassificationMetrics"/> object.</param> public static void PrintMetrics(MulticlassClassificationMetrics metrics) { Console.WriteLine($"Micro Accuracy: {metrics.MicroAccuracy:F2}"); Console.WriteLine($"Macro Accuracy: {metrics.MacroAccuracy:F2}"); Console.WriteLine($"Log Loss: {metrics.LogLoss:F2}"); Console.WriteLine($"Log Loss Reduction: {metrics.LogLossReduction:F2}"); }
public async Task CreateMetricsFileAsync(string mlModelFileName, MulticlassClassificationMetrics metrics) { if (string.IsNullOrWhiteSpace(mlModelFileName)) { throw new ArgumentException(); } _ = metrics ?? throw new ArgumentNullException(); if (!ModelFilesStorage.Exists(mlModelFileName)) { throw new InvalidOperationException(); } var str = "LogLoss: " + metrics.LogLoss + System.Environment.NewLine + "LogLossReduction: " + metrics.LogLossReduction + System.Environment.NewLine + "MacroAccuracy: " + metrics.MacroAccuracy + System.Environment.NewLine + "MicroAccuracy: " + metrics.MacroAccuracy + System.Environment.NewLine + "TopKAccuracy: " + metrics.TopKAccuracy + System.Environment.NewLine + "TopKPredictionCount: " + metrics.TopKPredictionCount + System.Environment.NewLine + "TopKAccuracyForAllK: " + metrics.TopKAccuracyForAllK + System.Environment.NewLine + "PerClassLogLoss: " + metrics.PerClassLogLoss + System.Environment.NewLine + "ConfusionMatrix: " + metrics.ConfusionMatrix + System.Environment.NewLine; await TextFilesStorage.CreateFileAsync(mlModelFileName, new byte[0]); using var st = TextFilesStorage.GetWriteStream(mlModelFileName); await st.WriteAsync(Encoding.UTF8.GetBytes(str)); await st.DisposeAsync(); }
/// <summary> /// Evaluate model by making predictions in bulk. /// If you run it without running pipeline, it will find and load the existed trained model, and then prepare the dataset. /// Maybe the evaluation result different in each running. /// </summary> public void EvaluateModel() { if (trainedModel == null) { if (File.Exists(OutputModelFilePath)) { LoadTrainedModel(); PrepareDataset(useValidationSet); } else { throw new Exception("Please run the pipeline before evaluating!"); } } Console.WriteLine("Making predictions in bulk for evaluating model's quality..."); // Begin evaluating Stopwatch watch = Stopwatch.StartNew(); IDataView predictionsDataView = trainedModel.Transform(testDataset); MulticlassClassificationMetrics metrics = mlContext.MulticlassClassification.Evaluate(predictionsDataView, labelColumnName: KeyColumn, predictedLabelColumnName: PredictedLabelColumn); ConsoleHelper.PrintMultiClassClassificationMetrics("TensorFlow DNN Transfer Learning", metrics); watch.Stop(); // End evaluating long milliseconds = watch.ElapsedMilliseconds; Console.WriteLine($"Predicting and Evaluation took: {milliseconds / 1000} seconds"); // Save confusion matrix metrics to file string confusionPath = Path.Combine(Directory.GetParent(OutputModelFilePath).FullName, "ConfusionMatrix.csv"); ConsoleHelper.Export_ConfusionMatrix(metrics.ConfusionMatrix, confusionPath, Path.GetFileNameWithoutExtension(OutputModelFilePath)); }
private static void ConsoleMulticlassMetricsUpdated(MulticlassClassificationMetrics metrics) { Console.WriteLine(new string('=', 30)); Console.WriteLine("Log-loss: {0}", metrics.LogLoss); Console.WriteLine("Log-loss measures the performance of a classifier with respect to how much the predicted probabilities diverge from the true class label. Lower log-loss indicates a better model. A perfect model, which predicts a probability of 1 for the true class, will have a log-loss of 0."); Console.WriteLine("Log-loss Reduction: {0}", metrics.LogLossReduction); Console.WriteLine("It gives a measure of how much a model improves on a model that gives random predictions. Log-loss reduction closer to 1 indicates a better model."); Console.WriteLine("Macro Accuracy: {0}", metrics.MacroAccuracy); Console.WriteLine("The accuracy for each class is computed and the macro-accuracy is the average of these accuracies. The macro-average metric gives the same weight to each class, no matter how many instances from that class the dataset contains."); Console.WriteLine("Micro Accuracy: {0}", metrics.MicroAccuracy); Console.WriteLine("The micro-average is the fraction of instances predicted correctly across all classes. Micro-average can be a more useful metric than macro-average if class imbalance is suspected."); Console.WriteLine("This is the relative number of examples where the true label one of the top K predicted labels by the predictor."); Console.WriteLine("Top K Prediction Count: {0}", metrics.TopKPredictionCount); Console.WriteLine("Top K Accuracy: {0}", metrics.TopKAccuracy); Console.WriteLine("If positive, this indicates the K in Top K Accuracy and Top K Accuracy for all K."); if (metrics.TopKAccuracyForAllK?.Count > 0) { Console.WriteLine("Top K Accuracy for all K: ({0})", string.Join(", ", metrics.TopKAccuracyForAllK)); } if (metrics.PerClassLogLoss?.Count > 0) { Console.WriteLine("Per Class Log-loss: ({0})", string.Join(", ", metrics.PerClassLogLoss)); } Console.WriteLine(); Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable()); Console.WriteLine(); Console.WriteLine(new string('=', 30)); }
public static void PrintMulticlassClassificationMetrics(MulticlassClassificationMetrics metrics, string[] classNames) { Console.WriteLine($"************************************************************"); Console.WriteLine($"* Metrics for multi-class classification model "); Console.WriteLine($"*-----------------------------------------------------------"); Console.WriteLine($"Accuracy (micro-avg): {metrics.MicroAccuracy:0.0000} # 0..1, higher is better"); Console.WriteLine($"Accuracy (macro): {metrics.MacroAccuracy:0.0000} # 0..1, higher is better"); Console.WriteLine($"Top-K accuracy: [{string.Join(", ", metrics?.TopKAccuracyForAllK?.Select(a => $"{a:0.0000}") ?? new string[] { "Set topKPredictionCount in evaluator to view" })}] # 0..1, higher is better"); Console.WriteLine($"Log-loss reduction: {metrics.LogLossReduction:0.0000;-0.000} # -Inf..1, higher is better"); Console.WriteLine($"Log-loss: {metrics.LogLoss:0.0000} # 0..Inf, lower is better"); Console.WriteLine("\nPer class metrics"); for (int i = 0; i < metrics.PerClassLogLoss.Count; i++) { Console.WriteLine($"LogLoss for class {i} ({classNames[i] + "):",-11} {metrics.PerClassLogLoss[i]:0.0000} # 0..Inf, lower is better"); } for (int i = 0; i < metrics.PerClassLogLoss.Count; i++) { Console.WriteLine($"Precision for class {i} ({classNames[i] + "):",-11} {metrics.ConfusionMatrix.PerClassPrecision[i]:0.0000} # 0..1, higher is better"); } for (int i = 0; i < metrics.PerClassLogLoss.Count; i++) { Console.WriteLine($"Recall for class {i} ({classNames[i] + "):",-11} {metrics.ConfusionMatrix.PerClassRecall[i]:0.0000} # 0..1, higher is better"); } Console.WriteLine(""); Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable()); Console.WriteLine($"************************************************************"); }
private static void PrintMulticlassClassificationMetrics(MulticlassClassificationMetrics metrics) { Console.WriteLine($"************************************************************"); Console.WriteLine($"* Metrics for multi-class classification model "); Console.WriteLine($"*-----------------------------------------------------------"); Console.WriteLine($" MacroAccuracy = {metrics.MacroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); Console.WriteLine($" MicroAccuracy = {metrics.MicroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); Console.WriteLine($" LogLoss = {metrics.LogLoss:0.####}, the closer to 0, the better"); for (int i = 0; i < metrics.PerClassLogLoss.Count; i++) { Console.WriteLine($" LogLoss for class {i} \t= {metrics.PerClassLogLoss[i]:0.####}, the closer to 0, the better"); } Console.WriteLine(" " + metrics.ConfusionMatrix.GetFormattedConfusionTable()); for (int i = 0; i < metrics.ConfusionMatrix.PerClassPrecision.Count; i++) { var precision = metrics.ConfusionMatrix.PerClassPrecision[i]; var recall = metrics.ConfusionMatrix.PerClassRecall[i]; var f1Score = 2 * (precision * recall) / (precision + recall); Console.WriteLine($" F1 Score for class {i} \t= {f1Score:0.####}, a value between 0 and 1, the closer to 1, the better"); } Console.WriteLine($"************************************************************"); }
//The Inception model has several parameters you need to pass in. //Create a struct to map the parameter values to friendly names with the following code public static ITransformer GenerateModel(MLContext mlContext) { IEstimator <ITransformer> pipeline = mlContext.Transforms.LoadImages(outputColumnName: "input", imageFolder: _imagesFolder, inputColumnName: nameof(ImageData.ImagePath)) // The image transforms transform the images into the model's expected format. .Append(mlContext.Transforms.ResizeImages(outputColumnName: "input", imageWidth: InceptionSettings.ImageWidth, imageHeight: InceptionSettings.ImageHeight, inputColumnName: "input")) .Append(mlContext.Transforms.ExtractPixels(outputColumnName: "input", interleavePixelColors: InceptionSettings.ChannelsLast, offsetImage: InceptionSettings.Mean)) .Append(mlContext.Model.LoadTensorFlowModel(_inceptionTensorFlowModel).ScoreTensorFlowModel(outputColumnNames: new[] { "softmax2_pre_activation" }, inputColumnNames: new[] { "input" }, addBatchDimensionInput: true)) .Append(mlContext.Transforms.Conversion.MapValueToKey(outputColumnName: "LabelKey", inputColumnName: "Label")) .Append(mlContext.MulticlassClassification.Trainers.LbfgsMaximumEntropy(labelColumnName: "LabelKey", featureColumnName: "softmax2_pre_activation")) .Append(mlContext.Transforms.Conversion.MapKeyToValue("PredictedLabelValue", "PredictedLabel")) .AppendCacheCheckpoint(mlContext); IDataView trainingData = mlContext.Data.LoadFromTextFile <ImageData>(path: _trainTagsTsv, hasHeader: false); ITransformer model = pipeline.Fit(trainingData); IDataView testData = mlContext.Data.LoadFromTextFile <ImageData>(path: _testTagsTsv, hasHeader: false); IDataView predictions = model.Transform(trainingData); // Create an IEnumerable for the predictions for displaying results IEnumerable <ImagePrediction> imagePredictionData = mlContext.Data.CreateEnumerable <ImagePrediction>(predictions, false); DisplayResults(imagePredictionData); MulticlassClassificationMetrics metrics = mlContext.MulticlassClassification.Evaluate(predictions, labelColumnName: "LabelKey", predictedLabelColumnName: "PredictedLabel"); Console.WriteLine($"LogLoss is: {metrics.LogLoss}"); Console.WriteLine($"PerClassLogLoss is: {String.Join(" , ", metrics.PerClassLogLoss.Select(c => c.ToString()))}"); return(model); }
/// <summary> /// Собирает данные о модели для инициализации данного объекта /// </summary> /// <param name="metrics">Снятые с модели метрики.</param> /// <param name="type">Тип модели.</param> /// <param name="model">Модель как ITransformer.</param> /// <param name="schema">Схема данных, использовавшихся при тренировке и снятии метрик.</param> /// <param name="parameters">Макропараметры, использовавшиеся для тренировки модели.</param> public MLModel(MulticlassClassificationMetrics metrics, MLModelTypes type, ITransformer model, DataViewSchema schema, TreeParameters parameters) { Metrics = new MyModelMetrics(metrics); ModelType = type; Model = model; Schema = schema; Parameters = parameters; }
// Pretty-print MulticlassClassificationMetrics objects. public static void PrintMetrics(MulticlassClassificationMetrics metrics) { Console.WriteLine($"Micro Accuracy: {metrics.MicroAccuracy:F2}"); Console.WriteLine($"Macro Accuracy: {metrics.MacroAccuracy:F2}"); Console.WriteLine($"Log Loss: {metrics.LogLoss:F2}"); Console.WriteLine($"Log Loss Reduction: {metrics.LogLossReduction:F2}\n"); Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable()); }
/// <summary> /// Add a new set of data. /// </summary> /// <param name="mcm"></param> /// <param name="timeTaken"></param> public void AddData(MulticlassClassificationMetrics mcm, double timeTaken) { MacroAccuracy.AddValue(mcm.MacroAccuracy); MicroAccuracy.AddValue(mcm.MicroAccuracy); LogLoss.AddValue(mcm.LogLoss); LogLossReduction.AddValue(mcm.LogLossReduction); TimeTakenMiliSeconds.AddValue(timeTaken); StatConfusionMatrix.AddConfusionMatrix(mcm.ConfusionMatrix); }
private static void EvaluateTestData(MLContext mlContext, IDataView testDataView, ITransformer trainedModel) { IDataView scoredData = trainedModel.Transform(testDataView); MulticlassClassificationMetrics evaluationMetrics = mlContext.MulticlassClassification.Evaluate(scoredData); PrintMulticlassClassificationMetrics(evaluationMetrics); }
public static void PrintMultiClassClassificationMetrics(string name, MulticlassClassificationMetrics metrics) { WriteLineColor($"**********************************************************************************", color); WriteLineColor($" Metrics for {name} multi-class classification model", color); WriteLineColor($"**********************************************************************************", color); WriteLineColor($" MicroAccuracy = {metrics.MicroAccuracy:0.000} (the closer to 1, the better)", color); WriteLineColor($" MacroAccuracy = {metrics.MacroAccuracy:0.000} (the closer to 1, the better)", color); WriteLineColor($" LogLoss = {metrics.LogLoss:0.000} (the closer to 0, the better)", color); WriteLineColor($"**********************************************************************************", color); }
private static TMetrics GetAverageMetrics(IEnumerable <TMetrics> metrics, TMetrics metricsClosestToAvg) { if (typeof(TMetrics) == typeof(BinaryClassificationMetrics)) { var newMetrics = metrics.Select(x => x as BinaryClassificationMetrics); Contracts.Assert(newMetrics != null); var result = new BinaryClassificationMetrics( auc: GetAverageOfNonNaNScores(newMetrics.Select(x => x.AreaUnderRocCurve)), accuracy: GetAverageOfNonNaNScores(newMetrics.Select(x => x.Accuracy)), positivePrecision: GetAverageOfNonNaNScores(newMetrics.Select(x => x.PositivePrecision)), positiveRecall: GetAverageOfNonNaNScores(newMetrics.Select(x => x.PositiveRecall)), negativePrecision: GetAverageOfNonNaNScores(newMetrics.Select(x => x.NegativePrecision)), negativeRecall: GetAverageOfNonNaNScores(newMetrics.Select(x => x.NegativeRecall)), f1Score: GetAverageOfNonNaNScores(newMetrics.Select(x => x.F1Score)), auprc: GetAverageOfNonNaNScores(newMetrics.Select(x => x.AreaUnderPrecisionRecallCurve)), // Return ConfusionMatrix from the fold closest to average score confusionMatrix: (metricsClosestToAvg as BinaryClassificationMetrics).ConfusionMatrix); return(result as TMetrics); } if (typeof(TMetrics) == typeof(MulticlassClassificationMetrics)) { var newMetrics = metrics.Select(x => x as MulticlassClassificationMetrics); Contracts.Assert(newMetrics != null); var result = new MulticlassClassificationMetrics( accuracyMicro: GetAverageOfNonNaNScores(newMetrics.Select(x => x.MicroAccuracy)), accuracyMacro: GetAverageOfNonNaNScores(newMetrics.Select(x => x.MacroAccuracy)), logLoss: GetAverageOfNonNaNScores(newMetrics.Select(x => x.LogLoss)), logLossReduction: GetAverageOfNonNaNScores(newMetrics.Select(x => x.LogLossReduction)), topKPredictionCount: newMetrics.ElementAt(0).TopKPredictionCount, topKAccuracy: GetAverageOfNonNaNScores(newMetrics.Select(x => x.TopKAccuracy)), // Return PerClassLogLoss and ConfusionMatrix from the fold closest to average score perClassLogLoss: (metricsClosestToAvg as MulticlassClassificationMetrics).PerClassLogLoss.ToArray(), confusionMatrix: (metricsClosestToAvg as MulticlassClassificationMetrics).ConfusionMatrix); return(result as TMetrics); } if (typeof(TMetrics) == typeof(RegressionMetrics)) { var newMetrics = metrics.Select(x => x as RegressionMetrics); Contracts.Assert(newMetrics != null); var result = new RegressionMetrics( l1: GetAverageOfNonNaNScores(newMetrics.Select(x => x.MeanAbsoluteError)), l2: GetAverageOfNonNaNScores(newMetrics.Select(x => x.MeanSquaredError)), rms: GetAverageOfNonNaNScores(newMetrics.Select(x => x.RootMeanSquaredError)), lossFunction: GetAverageOfNonNaNScores(newMetrics.Select(x => x.LossFunction)), rSquared: GetAverageOfNonNaNScores(newMetrics.Select(x => x.RSquared))); return(result as TMetrics); } throw new NotImplementedException($"Metric {typeof(TMetrics)} not implemented"); }
private void Print(MulticlassClassificationMetrics metrics) { Console.WriteLine($"*************************************************************************************************************"); Console.WriteLine($"* Metrics for Multi-class Classification model - Test Data "); Console.WriteLine($"*------------------------------------------------------------------------------------------------------------"); Console.WriteLine($"* MacroAccuracy: {metrics.MacroAccuracy:0.###}"); Console.WriteLine($"* MicroAccuracy: {metrics.MicroAccuracy:0.###}"); Console.WriteLine($"* LogLoss: {metrics.LogLoss:#.###} (0 is best)"); Console.WriteLine($"* LogLossReduction: {metrics.LogLossReduction:#.###} (1 is best)"); Console.WriteLine($"*************************************************************************************************************"); }
/// <summary> /// Check that a <see cref="MulticlassClassificationMetrics"/> object is valid. /// </summary> /// <param name="metrics">The metrics object.</param> public static void AssertMetrics(MulticlassClassificationMetrics metrics) { Assert.InRange(metrics.MacroAccuracy, 0, 1); Assert.InRange(metrics.MicroAccuracy, 0, 1); Assert.True(metrics.LogLoss >= 0); Assert.InRange(metrics.TopKAccuracy, 0, 1); // Confusion matrix validations Assert.NotNull(metrics.ConfusionMatrix); AssertConfusionMatrix(metrics.ConfusionMatrix); }
private void CompareMetrics(MulticlassClassificationMetrics metrics) { Assert.Equal(.98, metrics.MacroAccuracy); Assert.Equal(.98, metrics.MicroAccuracy, 2); Assert.InRange(metrics.LogLoss, .05, .06); Assert.InRange(metrics.LogLossReduction, 0.94, 0.96); Assert.Equal(3, metrics.PerClassLogLoss.Count); Assert.Equal(0, metrics.PerClassLogLoss[0], 1); Assert.Equal(.1, metrics.PerClassLogLoss[1], 1); Assert.Equal(.1, metrics.PerClassLogLoss[2], 1); }
/// <summary> /// Строит экземпляр класса на основании соответствующего экземпляра MulticlassClassificationMetrics. /// </summary> /// <param name="metrics">Экземпляр MulticlassClassificationMetrics.</param> public MyModelMetrics(MulticlassClassificationMetrics metrics) { LogLoss = metrics.LogLoss; LogLossReduction = metrics.LogLossReduction; MacroAccuracy = metrics.MacroAccuracy; MicroAccuracy = metrics.MicroAccuracy; TopKAccuracy = metrics.TopKAccuracy; TopKPredictionCount = metrics.TopKPredictionCount; PerClassLogLoss = new List <double>(metrics.PerClassLogLoss); ConfusionMatrix = new MyConfusionMatrix(metrics.ConfusionMatrix); }
public static void PrintMultiClassClassificationMetrics(string name, MulticlassClassificationMetrics metrics) { Console.WriteLine($"************************************************************"); Console.WriteLine($"* 多类别分类模型的指标 {name} "); Console.WriteLine($"*-----------------------------------------------------------"); Console.WriteLine($" 宏精度 = {metrics.MacroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); Console.WriteLine($" 微精度 = {metrics.MicroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); Console.WriteLine($" 损失 = {metrics.LogLoss:0.####}, the closer to 0, the better"); Console.WriteLine($" 损失 for class 1 = {metrics.PerClassLogLoss[0]:0.####}, the closer to 0, the better"); Console.WriteLine($" 损失 for class 2 = {metrics.PerClassLogLoss[1]:0.####}, the closer to 0, the better"); Console.WriteLine($" 损失 for class 3 = {metrics.PerClassLogLoss[2]:0.####}, the closer to 0, the better"); Console.WriteLine($"************************************************************"); }
/// <summary> /// Common data loading configuration,build, train, evaluate and save the trained model to a zip file. /// </summary> /// <param name="mlContext"></param> private static void BuildTrainEvaluateAndSaveModel(MLContext mlContext) { var trainingDataView = mlContext.Data.LoadFromTextFile <FetalHealthData>(TrainDataPath, hasHeader: false, separatorChar: ','); var testDataView = mlContext.Data.LoadFromTextFile <FetalHealthData>(TestDataPath, hasHeader: false, separatorChar: ','); var dataProcessPipeline = GetDataProcessPipeline(mlContext); var trainer = GetTrainer(dataProcessPipeline, mlContext); var trainingPipeline = dataProcessPipeline.Append(trainer); ITransformer trainedModel = getTrainedModel(trainingDataView, trainingPipeline); MulticlassClassificationMetrics metrics = EvaluateModel(mlContext, testDataView, trainedModel); ShowAccuracyStats(trainer, metrics); SaveModel(mlContext, trainingDataView, trainedModel); }
public static void PrintMulticlassClassificationMetrics(string name, MulticlassClassificationMetrics metrics) { Console.WriteLine($"************************************************************"); Console.WriteLine($"* Metrics for {name} multi-class classification model "); Console.WriteLine($"*-----------------------------------------------------------"); Console.WriteLine($" MacroAccuracy = {metrics.MacroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); Console.WriteLine($" MicroAccuracy = {metrics.MicroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); Console.WriteLine($" LogLoss = {metrics.LogLoss:0.####}, the closer to 0, the better"); Console.WriteLine($" LogLoss for class 1 = {metrics.PerClassLogLoss[0]:0.####}, the closer to 0, the better"); Console.WriteLine($" LogLoss for class 2 = {metrics.PerClassLogLoss[1]:0.####}, the closer to 0, the better"); Console.WriteLine($" LogLoss for class 3 = {metrics.PerClassLogLoss[2]:0.####}, the closer to 0, the better"); Console.WriteLine($"************************************************************"); }
public void PrintMulticlassClassificationMetrics(MulticlassClassificationMetrics metrics) { _logger.Enqueue($"************************************************************"); _logger.Enqueue($"* Metrics for multi-class classification model "); _logger.Enqueue($"*-----------------------------------------------------------"); _logger.Enqueue($" MacroAccuracy = {metrics.MacroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); _logger.Enqueue($" MicroAccuracy = {metrics.MicroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); _logger.Enqueue($" LogLoss = {metrics.LogLoss:0.####}, the closer to 0, the better"); for (var i = 0; i < metrics.PerClassLogLoss.Count; i++) { _logger.Enqueue($" LogLoss for class {i + 1} = {metrics.PerClassLogLoss[i]:0.####}, the closer to 0, the better"); } _logger.Enqueue($"************************************************************"); }
public static void PrintMulticlassClassificationMetrics(MulticlassClassificationMetrics metrics) { Console.WriteLine($"************************************************************"); Console.WriteLine($"* Metrics for multi-class classification model "); Console.WriteLine($"*-----------------------------------------------------------"); Console.WriteLine($" MacroAccuracy = {metrics.MacroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); Console.WriteLine($" MicroAccuracy = {metrics.MicroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); Console.WriteLine($" LogLoss = {metrics.LogLoss:0.####}, the closer to 0, the better"); for (int i = 0; i < metrics.PerClassLogLoss.Count; i++) { Console.WriteLine($" LogLoss for class {i + 1} = {metrics.PerClassLogLoss[i]:0.####}, the closer to 0, the better"); } Console.WriteLine($"************************************************************"); }
public static void PrintMulticlassClassificationMetrics(MulticlassClassificationMetrics metrics) { Console.WriteLine($"************************************************************"); Console.WriteLine($"* Метрики для модели классификации нескольких классов "); Console.WriteLine($"*-----------------------------------------------------------"); Console.WriteLine($" MacroAccuracy = {metrics.MacroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); Console.WriteLine($" MicroAccuracy = {metrics.MicroAccuracy:0.####}, a value between 0 and 1, the closer to 1, the better"); Console.WriteLine($" LogLoss = {metrics.LogLoss:0.####}, the closer to 0, the better"); for (int i = 0; i < metrics.PerClassLogLoss.Count; i++) { Console.WriteLine($" LogLoss for class {i + 1} = {metrics.PerClassLogLoss[i]:0.####}, the closer to 0, the better"); } Console.WriteLine($"************************************************************"); }
public static void PrintMultiClassClassificationMetrics(List <string> labelnames, string name, MulticlassClassificationMetrics metrics) { Console.WriteLine(metrics.ConfusionMatrix.GetFormattedConfusionTable()); Console.WriteLine($"{name} Multi-class klassifikationsmodel"); var table = new ConsoleTable("AccuracyMicro", "AccuracyMacro", "LogLoss", "LogLossReduction"); table.Options.EnableCount = false; table.AddRow($"{metrics.MicroAccuracy:0.####}", $"{metrics.MacroAccuracy:0.####}", $"{metrics.LogLoss:0.####}", $"{metrics.LogLossReduction:0.####}"); // table.AddRow($"Bedst tættere på 1", $"Bedst tættere på 1", "Bedst tættere på 0", "Bedst tættere på 0"); table.Write(); }
public static Bitmap GetConfusionMatrix(MulticlassClassificationMetrics metrics) { double[,] matrix = new double[metrics.ConfusionMatrix.NumberOfClasses, metrics.ConfusionMatrix.NumberOfClasses]; for (int c = 0; c < metrics.ConfusionMatrix.NumberOfClasses; c++) { for (int p = 0; p < metrics.ConfusionMatrix.NumberOfClasses; p++) { matrix[c, p] = metrics.ConfusionMatrix.GetCountForClassPair(p, c); } } return(DrawConfusionMatrix(matrix)); }