private void ValidateBinaryMetrics(Microsoft.ML.Legacy.Models.BinaryClassificationMetrics metrics) { Assert.Equal(0.6111, metrics.Accuracy, 4); Assert.Equal(0.6667, metrics.Auc, 4); Assert.Equal(0.8621, metrics.Auprc, 4); Assert.Equal(1, metrics.Entropy, 3); Assert.Equal(0.72, metrics.F1Score, 2); Assert.Equal(0.9689, metrics.LogLoss, 4); Assert.Equal(3.1122, metrics.LogLossReduction, 4); Assert.Equal(1, metrics.NegativePrecision, 1); Assert.Equal(0.2222, metrics.NegativeRecall, 4); Assert.Equal(0.5625, metrics.PositivePrecision, 4); Assert.Equal(1, metrics.PositiveRecall); var matrix = metrics.ConfusionMatrix; Assert.Equal(2, matrix.Order); Assert.Equal(2, matrix.ClassNames.Count); Assert.Equal("positive", matrix.ClassNames[0]); Assert.Equal("negative", matrix.ClassNames[1]); Assert.Equal(9, matrix[0, 0]); Assert.Equal(9, matrix["positive", "positive"]); Assert.Equal(0, matrix[0, 1]); Assert.Equal(0, matrix["positive", "negative"]); Assert.Equal(7, matrix[1, 0]); Assert.Equal(7, matrix["negative", "positive"]); Assert.Equal(2, matrix[1, 1]); Assert.Equal(2, matrix["negative", "negative"]); }
private void ValidateBinaryMetricsSymSGD(Microsoft.ML.Legacy.Models.BinaryClassificationMetrics metrics) { Assert.Equal(.8889, metrics.Accuracy, 4); Assert.Equal(1, metrics.Auc, 1); Assert.Equal(0.96, metrics.Auprc, 2); Assert.Equal(1, metrics.Entropy, 3); Assert.Equal(.9, metrics.F1Score, 4); Assert.Equal(.97, metrics.LogLoss, 3); Assert.Equal(3.030, metrics.LogLossReduction, 3); Assert.Equal(1, metrics.NegativePrecision, 3); Assert.Equal(.778, metrics.NegativeRecall, 3); Assert.Equal(.818, metrics.PositivePrecision, 3); Assert.Equal(1, metrics.PositiveRecall); var matrix = metrics.ConfusionMatrix; Assert.Equal(2, matrix.Order); Assert.Equal(2, matrix.ClassNames.Count); Assert.Equal("positive", matrix.ClassNames[0]); Assert.Equal("negative", matrix.ClassNames[1]); Assert.Equal(9, matrix[0, 0]); Assert.Equal(9, matrix["positive", "positive"]); Assert.Equal(0, matrix[0, 1]); Assert.Equal(0, matrix["positive", "negative"]); Assert.Equal(2, matrix[1, 0]); Assert.Equal(2, matrix["negative", "positive"]); Assert.Equal(7, matrix[1, 1]); Assert.Equal(7, matrix["negative", "negative"]); }
private void ValidateBinaryMetricsLightGBM(Microsoft.ML.Legacy.Models.BinaryClassificationMetrics metrics) { Assert.Equal(0.61111111111111116, metrics.Accuracy, 4); Assert.Equal(0.83950617283950613, metrics.Auc, 1); Assert.Equal(0.88324268324268318, metrics.Auprc, 2); Assert.Equal(1, metrics.Entropy, 3); Assert.Equal(.72, metrics.F1Score, 4); Assert.Equal(0.96456100297125325, metrics.LogLoss, 4); Assert.Equal(3.5438997028746755, metrics.LogLossReduction, 4); Assert.Equal(1, metrics.NegativePrecision, 3); Assert.Equal(0.22222222222222221, metrics.NegativeRecall, 3); Assert.Equal(0.5625, metrics.PositivePrecision, 3); Assert.Equal(1, metrics.PositiveRecall); var matrix = metrics.ConfusionMatrix; Assert.Equal(2, matrix.Order); Assert.Equal(2, matrix.ClassNames.Count); Assert.Equal("positive", matrix.ClassNames[0]); Assert.Equal("negative", matrix.ClassNames[1]); Assert.Equal(9, matrix[0, 0]); Assert.Equal(9, matrix["positive", "positive"]); Assert.Equal(0, matrix[0, 1]); Assert.Equal(0, matrix["positive", "negative"]); Assert.Equal(7, matrix[1, 0]); Assert.Equal(7, matrix["negative", "positive"]); Assert.Equal(2, matrix[1, 1]); Assert.Equal(2, matrix["negative", "negative"]); }
/// <summary> /// Computes the quality metrics for the PredictionModel using the specified data set. /// </summary> /// <param name="model"> /// The trained PredictionModel to be evaluated. /// </param> /// <param name="testData"> /// The test data that will be predicted and used to evaluate the model. /// </param> /// <returns> /// A BinaryClassificationMetrics instance that describes how well the model performed against the test data. /// </returns> public BinaryClassificationMetrics Evaluate(PredictionModel model, ILearningPipelineLoader testData) { var environment = new MLContext(); environment.CheckValue(model, nameof(model)); environment.CheckValue(testData, nameof(testData)); Experiment experiment = environment.CreateExperiment(); ILearningPipelineStep testDataStep = testData.ApplyStep(previousStep: null, experiment); if (!(testDataStep is ILearningPipelineDataStep testDataOutput)) { throw environment.Except($"The {nameof(ILearningPipelineLoader)} did not return a {nameof(ILearningPipelineDataStep)} from ApplyStep."); } var datasetScorer = new DatasetTransformScorer { Data = testDataOutput.Data }; DatasetTransformScorer.Output scoreOutput = experiment.Add(datasetScorer); Data = scoreOutput.ScoredData; Output evaluteOutput = experiment.Add(this); experiment.Compile(); experiment.SetInput(datasetScorer.TransformModel, model.PredictorModel); testData.SetInput(environment, experiment); experiment.Run(); IDataView overallMetrics = experiment.GetOutput(evaluteOutput.OverallMetrics); if (overallMetrics == null) { throw environment.Except($"Could not find OverallMetrics in the results returned in {nameof(BinaryClassificationEvaluator)} Evaluate."); } IDataView confusionMatrix = experiment.GetOutput(evaluteOutput.ConfusionMatrix); if (confusionMatrix == null) { throw environment.Except($"Could not find ConfusionMatrix in the results returned in {nameof(BinaryClassificationEvaluator)} Evaluate."); } var metric = BinaryClassificationMetrics.FromMetrics(environment, overallMetrics, confusionMatrix); if (metric.Count != 1) { throw environment.Except($"Exactly one metric set was expected but found {metric.Count} metrics"); } return(metric[0]); }