/// <summary> /// Add a set of evaluation metrics to the set of observations. /// </summary> /// <param name="metrics">The observed binary classification evaluation metric</param> void IMetricsStatistics <BinaryClassificationMetrics> .Add(BinaryClassificationMetrics metrics) { AreaUnderRocCurve.Add(metrics.AreaUnderRocCurve); Accuracy.Add(metrics.Accuracy); PositivePrecision.Add(metrics.PositivePrecision); PositiveRecall.Add(metrics.PositiveRecall); NegativePrecision.Add(metrics.NegativePrecision); NegativeRecall.Add(metrics.NegativeRecall); F1Score.Add(metrics.F1Score); AreaUnderPrecisionRecallCurve.Add(metrics.AreaUnderPrecisionRecallCurve); }
/// <summary> /// Add a set of evaluation metrics to the set of observations. /// </summary> /// <param name="metrics">The observed binary classification evaluation metric</param> public override void Add(BinaryClassificationMetrics metrics) { Auc.Add(metrics.AreaUnderRocCurve); Accuracy.Add(metrics.Accuracy); PositivePrecision.Add(metrics.PositivePrecision); PositiveRecall.Add(metrics.PositiveRecall); NegativePrecision.Add(metrics.NegativePrecision); NegativeRecall.Add(metrics.NegativeRecall); F1Score.Add(metrics.F1Score); Auprc.Add(metrics.AreaUnderPrecisionRecallCurve); }
public void F1ScoreTest() { var actual = new Matrix(100, 1); var expected = new Matrix(100, 1); actual.InRandomize(0.25, 0.75); expected.InRandomize(0.25, 0.75); var metric = new F1Score(); var e = metric.Evaluate(actual, expected); var val = 0.0; var div = 0.0; for (var i = 0; i < actual.Rows; i++) { for (var j = 0; j < actual.Columns; j++) { if (Math.Abs(actual[i, j]) < 0.5 && Math.Abs(expected[i, j]) < 0.5) { val += 0.0; } else if (Math.Abs(actual[i, j] - 1.0) < 0.5 && Math.Abs(expected[i, j] - 1.0) < 0.5) { val += 2.0; div += 2.0; } div++; } } val /= div; Assert.IsTrue(Math.Abs(e - val) < 0.01, metric.Type().ToString() + " Evaluate."); }