Example #1
0
        /// <summary>
        /// Writes the precision-recall curve to the file with the specified name.
        /// </summary>
        /// <param name="fileName">The name of the file to write the precision-recall curve to.</param>
        /// <param name="evaluator">The classifier evaluator.</param>
        /// <param name="groundTruth">The ground truth.</param>
        /// <param name="predictiveDistributions">The predictive distributions.</param>
        /// <param name="positiveClassLabel">The label of the positive class.</param>
        private void WritePrecisionRecallCurve(
            string fileName,
            ClassifierEvaluator <IList <LabeledFeatureValues>, LabeledFeatureValues, IList <LabelDistribution>, string> evaluator,
            IList <LabeledFeatureValues> groundTruth,
            IList <IDictionary <string, double> > predictiveDistributions,
            string positiveClassLabel)
        {
            Debug.Assert(predictiveDistributions != null, "The predictive distributions must not be null.");
            Debug.Assert(predictiveDistributions.Count > 0, "The predictive distributions must not be empty.");
            Debug.Assert(positiveClassLabel != null, "The label of the positive class must not be null.");

            var precisionRecallCurve = evaluator.PrecisionRecallCurve(positiveClassLabel, groundTruth, predictiveDistributions);

            using (var writer = new StreamWriter(fileName))
            {
                writer.WriteLine("# Precision-recall curve");
                writer.WriteLine("#");
                writer.WriteLine("# Class '" + positiveClassLabel + "'     (versus the rest)");
                writer.WriteLine("#");
                writer.WriteLine("# precision (P), Recall (R)");
                foreach (var point in precisionRecallCurve)
                {
                    writer.WriteLine(point);
                }
            }
        }
    /// <summary>
    /// Writes the precision-recall curve to the file with the specified name.
    /// </summary>
    /// <param name="fileName">The name of the file to write the precision-recall curve to.</param>
    /// <param name="evaluator">The classifier evaluator.</param>
    /// <param name="x">The x vector of the ground truth.</param>
    /// <param name="y">The y of the ground truth.</param>
    /// <param name="yPredicDistrib">The predictive distributions.</param>
    /// <param name="positiveClassLabel">The label of the positive class.</param>
    /// <remarks>Adapted from MicrosoftResearch.Infer.Learners</remarks>
    private void WritePrecisionRecallCurve(
        string fileName,
        ClassifierEvaluator <IList <Vector>, int, IList <string>, string> evaluator,
        Vector[] x,
        IList <string> y,
        IEnumerable <IDictionary <string, double> > yPredicDistrib,
        string positiveClassLabel)
    {
        Debug.Assert(yPredicDistrib != null, "The predictive distributions must not be null.");
        Debug.Assert(yPredicDistrib.Count() > 0, "The predictive distributions must not be empty.");
        Debug.Assert(positiveClassLabel != null, "The label of the positive class must not be null.");

        var precisionRecallCurve = evaluator.PrecisionRecallCurve(positiveClassLabel, x, y, yPredicDistrib);

        using (var writer = new StreamWriter(fileName))
        {
            writer.WriteLine("# Precision-recall curve");
            writer.WriteLine("#");
            writer.WriteLine("# Class '" + positiveClassLabel + "'     (versus the rest)");
            writer.WriteLine("#");
            writer.WriteLine("# Recall (R), precision (P)");
            foreach (var point in precisionRecallCurve)
            {
                writer.WriteLine("{0}, {1}", point.First, point.Second);
            }
        }
    }