Example #1
0
    /// <summary>
    /// Writes the evaluation results to a file with the specified name.
    /// </summary>
    /// <param name="writer">The name of the file to write the report to.</param>
    /// <param name="evaluator">The classifier evaluator.</param>
    /// <param name="x">The x vector of the ground truth.</param>
    /// <param name="y">The y of the ground truth.</param>
    /// <param name="yPredicDistrib">The predictive distributions.</param>
    /// <param name="yPredicLabel">The predicted labels.</param>
    /// <remarks>Adapted from MicrosoftResearch.Infer.Learners</remarks>
    private void WriteReport(
        StreamWriter writer,
        ClassifierEvaluator <IList <Vector>, int, IList <string>, string> evaluator,
        Vector[] x,
        IList <string> y,
        IEnumerable <IDictionary <string, double> > yPredicDistrib,
        IEnumerable <string> yPredicLabel)
    {
        // Compute confusion matrix
        var confusionMatrix = evaluator.ConfusionMatrix(x, y, yPredicLabel);

        // Compute mean negative log probability
        double meanNegativeLogProbability =
            evaluator.Evaluate(x, y, yPredicDistrib, Metrics.NegativeLogProbability) / yPredicDistrib.Count();

        // Compute M-measure (averaged pairwise AUC)
        IDictionary <string, IDictionary <string, double> > aucMatrix;
        double auc = evaluator.AreaUnderRocCurve(x, y, yPredicDistrib, out aucMatrix);

        // Compute per-label AUC as well as micro- and macro-averaged AUC
        double microAuc;
        double macroAuc;
        int    macroAucClassLabelCount;
        var    labelAuc = this.ComputeLabelAuc(
            confusionMatrix,
            evaluator,
            x,
            y,
            yPredicDistrib,
            out microAuc,
            out macroAuc,
            out macroAucClassLabelCount);

        // Instance-averaged performance
        this.WriteInstanceAveragedPerformance(writer, confusionMatrix, meanNegativeLogProbability, microAuc);

        // Class-averaged performance
        this.WriteClassAveragedPerformance(writer, confusionMatrix, auc, macroAuc, macroAucClassLabelCount);

        // Performance on individual classes
        this.WriteIndividualClassPerformance(writer, confusionMatrix, labelAuc);

        // Confusion matrix
        this.WriteConfusionMatrix(writer, confusionMatrix);

        // Pairwise AUC
        this.WriteAucMatrix(writer, aucMatrix);
    }
Example #2
0
        /// <summary>
        /// Writes the evaluation results to a file with the specified name.
        /// </summary>
        /// <param name="writer">The name of the file to write the report to.</param>
        /// <param name="evaluator">The classifier evaluator.</param>
        /// <param name="groundTruth">The ground truth.</param>
        /// <param name="predictiveDistributions">The predictive distributions.</param>
        /// <param name="predictedLabels">The predicted labels.</param>
        private void WriteReport(
            StreamWriter writer,
            ClassifierEvaluator <IList <LabeledFeatureValues>, LabeledFeatureValues, IList <LabelDistribution>, string> evaluator,
            IList <LabeledFeatureValues> groundTruth,
            ICollection <IDictionary <string, double> > predictiveDistributions,
            IEnumerable <string> predictedLabels)
        {
            // Compute confusion matrix
            var confusionMatrix = evaluator.ConfusionMatrix(groundTruth, predictedLabels);

            // Compute mean negative log probability
            double meanNegativeLogProbability =
                evaluator.Evaluate(groundTruth, predictiveDistributions, Metrics.NegativeLogProbability) / predictiveDistributions.Count;

            // Compute M-measure (averaged pairwise AUC)
            IDictionary <string, IDictionary <string, double> > aucMatrix;
            double auc = evaluator.AreaUnderRocCurve(groundTruth, predictiveDistributions, out aucMatrix);

            // Compute per-label AUC as well as micro- and macro-averaged AUC
            double microAuc;
            double macroAuc;
            int    macroAucClassLabelCount;
            var    labelAuc = this.ComputeLabelAuc(
                confusionMatrix,
                evaluator,
                groundTruth,
                predictiveDistributions,
                out microAuc,
                out macroAuc,
                out macroAucClassLabelCount);

            // Instance-averaged performance
            this.WriteInstanceAveragedPerformance(writer, confusionMatrix, meanNegativeLogProbability, microAuc);

            // Class-averaged performance
            this.WriteClassAveragedPerformance(writer, confusionMatrix, auc, macroAuc, macroAucClassLabelCount);

            // Performance on individual classes
            this.WriteIndividualClassPerformance(writer, confusionMatrix, labelAuc);

            // Confusion matrix
            this.WriteConfusionMatrix(writer, confusionMatrix);

            // Pairwise AUC
            this.WriteAucMatrix(writer, aucMatrix);
        }