/// <summary>
    /// Writes the probability calibration plot to the file with the specified name.
    /// </summary>
    /// <param name="fileName">The name of the file to write the calibration plot to.</param>
    /// <param name="evaluator">The classifier evaluator.</param>
    /// <param name="x">The x vector of the ground truth.</param>
    /// <param name="y">The y of the ground truth.</param>
    /// <param name="yPredicDistrib">The predictive distributions.</param>
    /// <param name="positiveClassLabel">The label of the positive class.</param>
    /// <remarks>Adapted from MicrosoftResearch.Infer.Learners</remarks>
    private void WriteCalibrationCurve(
        string fileName,
        ClassifierEvaluator <IList <Vector>, int, IList <string>, string> evaluator,
        Vector[] x,
        IList <string> y,
        IEnumerable <IDictionary <string, double> > yPredicDistrib,
        string positiveClassLabel)
    {
        Debug.Assert(yPredicDistrib != null, "The predictive distributions must not be null.");
        Debug.Assert(yPredicDistrib.Count() > 0, "The predictive distributions must not be empty.");
        Debug.Assert(positiveClassLabel != null, "The label of the positive class must not be null.");

        var    calibrationCurve = evaluator.CalibrationCurve(positiveClassLabel, x, y, yPredicDistrib);
        double calibrationError = calibrationCurve.Select(v => Metrics.AbsoluteError(v.First, v.Second)).Average();

        using (var writer = new StreamWriter(fileName))
        {
            writer.WriteLine("# Empirical probability calibration plot");
            writer.WriteLine("#");
            writer.WriteLine("# Class '" + positiveClassLabel + "'     (versus the rest)");
            writer.WriteLine("# Calibration error = {0}     (mean absolute error)", calibrationError);
            writer.WriteLine("#");
            writer.WriteLine("# Predicted probability, empirical probability");
            foreach (var point in calibrationCurve)
            {
                writer.WriteLine("{0}, {1}", point.First, point.Second);
            }
        }
    }
Example #2
0
        /// <summary>
        /// Writes the probability calibration plot to the file with the specified name.
        /// </summary>
        /// <param name="fileName">The name of the file to write the calibration plot to.</param>
        /// <param name="evaluator">The classifier evaluator.</param>
        /// <param name="groundTruth">The ground truth.</param>
        /// <param name="predictiveDistributions">The predictive distributions.</param>
        /// <param name="positiveClassLabel">The label of the positive class.</param>
        private void WriteCalibrationCurve(
            string fileName,
            ClassifierEvaluator <IList <LabeledFeatureValues>, LabeledFeatureValues, IList <LabelDistribution>, string> evaluator,
            IList <LabeledFeatureValues> groundTruth,
            IList <IDictionary <string, double> > predictiveDistributions,
            string positiveClassLabel)
        {
            Debug.Assert(predictiveDistributions != null, "The predictive distributions must not be null.");
            Debug.Assert(predictiveDistributions.Count > 0, "The predictive distributions must not be empty.");
            Debug.Assert(positiveClassLabel != null, "The label of the positive class must not be null.");

            var    calibrationCurve = evaluator.CalibrationCurve(positiveClassLabel, groundTruth, predictiveDistributions);
            double calibrationError = calibrationCurve.Select(i => Metrics.AbsoluteError(i.EmpiricalProbability, i.PredictedProbability)).Average();

            using (var writer = new StreamWriter(fileName))
            {
                writer.WriteLine("# Empirical probability calibration plot");
                writer.WriteLine("#");
                writer.WriteLine("# Class '" + positiveClassLabel + "'     (versus the rest)");
                writer.WriteLine("# Calibration error = {0}     (mean absolute error)", calibrationError);
                writer.WriteLine("#");
                writer.WriteLine("# Predicted probability, empirical probability");
                foreach (var point in calibrationCurve)
                {
                    writer.WriteLine(point);
                }
            }
        }
Example #3
0
        public void TestAbsoluteError()
        {
            Assert.Equal(0, Metrics.AbsoluteError(1, 1), Tolerance);
            Assert.Equal(0, Metrics.AbsoluteError(0, 0), Tolerance);
            Assert.Equal(2, Metrics.AbsoluteError(3, 1), Tolerance);
            Assert.Equal(3, Metrics.AbsoluteError(-5, -2), Tolerance);
            Assert.Equal(4, Metrics.AbsoluteError(3, -1), Tolerance);

            Assert.Equal(0, Metrics.AbsoluteError(true, true), Tolerance);
            Assert.Equal(1, Metrics.AbsoluteError(false, true), Tolerance);
            Assert.Equal(0, Metrics.AbsoluteError(false, false), Tolerance);
            Assert.Equal(1, Metrics.AbsoluteError(true, false), Tolerance);
        }