コード例 #1
0
 /// <summary>
 /// Creates an Infer.NET inference algorithm which trains the Bayes point machine classifier.
 /// </summary>
 /// <param name="computeModelEvidence">If true, the generated training algorithm computes evidence.</param>
 /// <param name="useSparseFeatures">If true, the generated training algorithm expects features in a sparse representation.</param>
 /// <returns>The created <see cref="IGeneratedAlgorithm"/> for training.</returns>
 protected override IGeneratedAlgorithm CreateTrainingAlgorithm(bool computeModelEvidence, bool useSparseFeatures)
 {
     return(InferenceAlgorithmUtilities.CreateMulticlassTrainingAlgorithm(
                computeModelEvidence,
                useSparseFeatures,
                useCompoundWeightPriorDistributions: false));
 }
コード例 #2
0
        /// <summary>
        /// Runs the generated prediction algorithm for the specified features.
        /// </summary>
        /// <param name="featureValues">The feature values.</param>
        /// <param name="featureIndexes">The feature indexes.</param>
        /// <param name="iterationCount">The number of iterations to run the prediction algorithm for.</param>
        /// <returns>The predictive distributions over labels.</returns>
        public IEnumerable <TLabelDistribution> PredictDistribution(double[][] featureValues, int[][] featureIndexes, int iterationCount)
        {
            InferenceAlgorithmUtilities.CheckIterationCount(iterationCount);
            InferenceAlgorithmUtilities.CheckFeatures(this.UseSparseFeatures, this.FeatureCount, featureValues, featureIndexes);

            // Update prior weight distributions of the prediction algorithm to the posterior weight distributions of the training algorithm
            this.PredictionAlgorithm.SetObservedValue(InferenceQueryVariableNames.WeightPriors, this.WeightMarginals);

            // Infer posterior distribution over labels, one instance after the other
            for (int i = 0; i < featureValues.Length; i++)
            {
                // Observe a single feature vector
                this.PredictionAlgorithm.SetObservedValue(InferenceQueryVariableNames.InstanceCount, 1);

                if (this.UseSparseFeatures)
                {
                    this.PredictionAlgorithm.SetObservedValue(InferenceQueryVariableNames.InstanceFeatureCounts, new[] { featureValues[i].Length });
                    this.PredictionAlgorithm.SetObservedValue(InferenceQueryVariableNames.FeatureIndexes, new[] { featureIndexes[i] });
                }

                this.PredictionAlgorithm.SetObservedValue(InferenceQueryVariableNames.FeatureValues, new[] { featureValues[i] });

                // Infer the posterior distribution over the label
                yield return(this.CopyLabelDistribution(this.RunPredictionAlgorithm(iterationCount)));
            }
        }
コード例 #3
0
        /// <summary>
        /// Initializes a new instance of the <see cref="GaussianBinaryFactorizedInferenceAlgorithms"/> class.
        /// </summary>
        /// <param name="computeModelEvidence">If true, the training algorithm computes evidence.</param>
        /// <param name="useSparseFeatures">If true, the inference algorithms expect features in a sparse representation.</param>
        /// <param name="featureCount">The number of features that the inference algorithms use.</param>
        /// <param name="weightPriorVariance">The variance of the prior distributions over weights. Defaults to 1.</param>
        public GaussianBinaryFactorizedInferenceAlgorithms(bool computeModelEvidence, bool useSparseFeatures, int featureCount, double weightPriorVariance = 1.0)
            : base(computeModelEvidence, useSparseFeatures, featureCount)
        {
            InferenceAlgorithmUtilities.CheckVariance(weightPriorVariance);

            // Set the prior distributions over weights
            this.weightPriorDistributions = new GaussianArray(new Gaussian(0.0, weightPriorVariance), featureCount);
        }
コード例 #4
0
        /// <summary>
        /// Computes the logarithm of the model evidence corrections required in batched training.
        /// </summary>
        /// <returns>The logarithm of the model evidence corrections.</returns>
        /// <remarks>
        /// When the training data is split into several batches, it is necessary to eliminate evidence contributions
        /// which would otherwise be double counted. In essence, evidence corrections remove the superfluous contributions
        /// of factors which are shared across data batches, such as priors and constraints. To compute the evidence
        /// contributions of the factors shared across batches, one can compute the evidence on an empty batch.
        /// </remarks>
        protected virtual double ComputeLogEvidenceCorrection()
        {
            // Correct the below empty batch correction and add the missing evidence contribution
            // for the Replicate operator on the weights for all batches
            double logModelEvidenceCorrection = InferenceAlgorithmUtilities.ComputeLogEvidenceCorrectionReplicateAllBatches(this.BatchWeightOutputMessages);

            // Compute the evidence contribution of all factors shared across batches and remove it for all but one batch.
            logModelEvidenceCorrection -= (this.BatchCount - 1) * this.ComputeLogEvidenceContributionEmptyBatch();

            return(logModelEvidenceCorrection);
        }
コード例 #5
0
        /// <summary>
        /// Computes the logarithm of the model evidence corrections required in batched training.
        /// </summary>
        /// <returns>The logarithm of the model evidence corrections.</returns>
        /// <remarks>
        /// When the training data is split into several batches, it is necessary to eliminate evidence contributions
        /// which would otherwise be double counted. In essence, evidence corrections remove the superfluous contributions
        /// of factors which are shared across data batches, such as priors and constraints. To compute the evidence
        /// contributions of the factors shared across batches, one can compute the evidence on an empty batch.
        /// </remarks>
        protected override double ComputeLogEvidenceCorrection()
        {
            // Correct the below base correction and add the missing evidence contribution
            // for the Replicate operator on the weight precision rates for all batches
            double logModelEvidenceCorrection =
                InferenceAlgorithmUtilities.ComputeLogEvidenceCorrectionReplicateAllBatches(this.batchWeightPrecisionRateOutputMessages);

            // Compute base evidence correction
            logModelEvidenceCorrection += base.ComputeLogEvidenceCorrection();

            return(logModelEvidenceCorrection);
        }
コード例 #6
0
        /// <summary>
        /// Initializes a new instance of the <see cref="InferenceAlgorithms{TWeightDistributions,TLabel,TLabelDistribution}"/> class.
        /// </summary>
        /// <param name="computeModelEvidence">If true, the training algorithm computes evidence.</param>
        /// <param name="useSparseFeatures">If true, the inference algorithms expect features in a sparse representation.</param>
        /// <param name="featureCount">The number of features that the inference algorithms use.</param>
        protected InferenceAlgorithms(bool computeModelEvidence, bool useSparseFeatures, int featureCount)
        {
            InferenceAlgorithmUtilities.CheckFeatureCount(featureCount);

            this.computeModelEvidence = computeModelEvidence;
            this.UseSparseFeatures    = useSparseFeatures;
            this.FeatureCount         = featureCount;
            this.BatchCount           = 1;

            // Create the inference algorithms for training and prediction
            this.CreateInferenceAlgorithms();

            // Initialize the inference algorithms
            this.InitializeInferenceAlgorithms();
        }
コード例 #7
0
        /// <summary>
        /// Initializes a new instance of the <see cref="MulticlassFactorizedInferenceAlgorithms"/> class.
        /// </summary>
        /// <param name="computeModelEvidence">If true, the training algorithm computes evidence.</param>
        /// <param name="useSparseFeatures">If true, the inference algorithms expect features in a sparse representation.</param>
        /// <param name="featureCount">The number of features that the inference algorithms use.</param>
        /// <param name="classCount">The number of classes that the inference algorithms use.</param>
        protected MulticlassFactorizedInferenceAlgorithms(bool computeModelEvidence, bool useSparseFeatures, int featureCount, int classCount)
            : base(computeModelEvidence, useSparseFeatures, featureCount)
        {
            InferenceAlgorithmUtilities.CheckClassCount(classCount);
            this.classCount = classCount;

            // Set the marginal distributions over weights divided by their prior distributions
            // to uniform distributions in the training algorithm (no constraints)
            this.WeightMarginalsDividedByPriors = new GaussianMatrix(new GaussianArray(Gaussian.Uniform(), featureCount), classCount);

            // Set the constraint distributions over weights to uniform distributions (no constraints)
            this.WeightConstraints = new GaussianMatrix(new GaussianArray(Gaussian.Uniform(), featureCount), classCount);

            // Initialize the inference algorithms
            this.InitializeInferenceAlgorithms();
        }
コード例 #8
0
        /// <summary>
        /// Runs the generated training algorithm for the specified features and labels.
        /// </summary>
        /// <param name="featureValues">The feature values.</param>
        /// <param name="featureIndexes">The feature indexes.</param>
        /// <param name="labels">The labels.</param>
        /// <param name="iterationCount">The number of iterations to run the training algorithm for.</param>
        /// <param name="batchNumber">
        /// An optional batch number. Defaults to 0 and is used only if the training data is divided into batches.
        /// </param>
        protected virtual void TrainInternal(double[][] featureValues, int[][] featureIndexes, TLabel[] labels, int iterationCount, int batchNumber = 0)
        {
            InferenceAlgorithmUtilities.CheckIterationCount(iterationCount);
            InferenceAlgorithmUtilities.CheckBatchNumber(batchNumber, this.BatchCount);
            InferenceAlgorithmUtilities.CheckFeatures(this.UseSparseFeatures, this.FeatureCount, featureValues, featureIndexes);
            Debug.Assert(featureValues.Length == labels.Length, "There must be the same number of feature values and labels.");

            // Observe features and labels
            this.TrainingAlgorithm.SetObservedValue(InferenceQueryVariableNames.InstanceCount, featureValues.Length);

            if (this.UseSparseFeatures)
            {
                this.TrainingAlgorithm.SetObservedValue(
                    InferenceQueryVariableNames.InstanceFeatureCounts,
                    Util.ArrayInit(featureValues.Length, instance => featureValues[instance].Length));
                this.TrainingAlgorithm.SetObservedValue(InferenceQueryVariableNames.FeatureIndexes, featureIndexes);
            }

            this.TrainingAlgorithm.SetObservedValue(InferenceQueryVariableNames.FeatureValues, featureValues);
            this.Labels = labels;

            if (this.BatchCount == 1)
            {
                // Required for incremental training
                this.WeightConstraints = this.WeightMarginalsDividedByPriors;

                // Run the training algorithm
                this.RunTrainingAlgorithm(iterationCount);
            }
            else
            {
                // Print information about current batch to console
                if (this.ShowProgress)
                {
                    Console.WriteLine("Batch {0} [{1} instance{2}]", batchNumber + 1, featureValues.Length, featureValues.Length == 1 ? string.Empty : "s");
                }

                // Compute the constraint distributions for the weights for the given batch
                this.WeightConstraints.SetToRatio(this.WeightMarginalsDividedByPriors, this.BatchWeightOutputMessages[batchNumber]);

                // Run the training algorithm
                this.RunTrainingAlgorithm(iterationCount);

                // Update the output messages for the weights for the given batch
                this.BatchWeightOutputMessages[batchNumber].SetToRatio(this.WeightMarginalsDividedByPriors, this.WeightConstraints);
            }
        }
コード例 #9
0
        /// <summary>
        /// Runs the generated training algorithm for the specified features and labels.
        /// </summary>
        /// <param name="featureValues">The feature values.</param>
        /// <param name="featureIndexes">The feature indexes.</param>
        /// <param name="labels">The labels.</param>
        /// <param name="iterationCount">The number of iterations to run the training algorithm for.</param>
        /// <param name="batchNumber">
        /// An optional batch number. Defaults to 0 and is used only if the training data is divided into batches.
        /// </param>
        protected override void TrainInternal(double[][] featureValues, int[][] featureIndexes, int[] labels, int iterationCount, int batchNumber = 0)
        {
            if (this.UseSparseFeatures)
            {
                InferenceAlgorithmUtilities.CheckSparseFeatures(this.FeatureCount, featureValues, featureIndexes);

                // Observe the number of instances with zero-value features to anchor the compound prior distributions
                this.TrainingAlgorithm.SetObservedValue(
                    InferenceQueryVariableNames.ZeroFeatureValueInstanceCounts,
                    InferenceAlgorithmUtilities.CountZeroFeatureValueInstances(this.FeatureCount, featureIndexes));
            }

            if (this.BatchCount > 1)
            {
                // Compute the constraint distributions for the weight precision rates for the given batch
                this.weightPrecisionRateConstraints.SetToRatio(
                    this.weightPrecisionRateMarginalsDividedByPriors, this.batchWeightPrecisionRateOutputMessages[batchNumber], false);
            }
            else
            {
                // Required for incremental training
                this.weightPrecisionRateConstraints = this.weightPrecisionRateMarginalsDividedByPriors;
            }

            // Set the constraint distributions for the weight precision rates
            this.TrainingAlgorithm.SetObservedValue(InferenceQueryVariableNames.WeightPrecisionRateConstraints, this.weightPrecisionRateConstraints);

            // Run training
            base.TrainInternal(featureValues, featureIndexes, labels, iterationCount, batchNumber);

            // Update the marginal distributions over weight precision rates divided by their prior distributions
            this.weightPrecisionRateMarginalsDividedByPriors = this.TrainingAlgorithm.Marginal <GammaArray>(
                InferenceQueryVariableNames.WeightPrecisionRates, QueryTypes.MarginalDividedByPrior.Name);

            if (this.BatchCount > 1)
            {
                // Update the output messages for the weight precision rates for the given batch
                this.batchWeightPrecisionRateOutputMessages[batchNumber].SetToRatio(
                    this.weightPrecisionRateMarginalsDividedByPriors, this.weightPrecisionRateConstraints, false);
            }
        }
コード例 #10
0
 /// <summary>
 /// Sets the number of batches the training data is split into and resets the per-batch output messages.
 /// </summary>
 /// <param name="value">The number of batches to use.</param>
 public void SetBatchCount(int value)
 {
     InferenceAlgorithmUtilities.CheckBatchCount(value);
     this.BatchCount = value;
     this.BatchWeightOutputMessages = this.CreateUniformBatchOutputMessages(this.BatchCount);
 }
コード例 #11
0
 /// <summary>
 /// Creates an Infer.NET inference algorithm for making predictions from the Bayes point machine classifier.
 /// </summary>
 /// <param name="useSparseFeatures">If true, the generated prediction algorithm expects features in a sparse representation.</param>
 /// <returns>The created <see cref="IGeneratedAlgorithm"/> for prediction.</returns>
 protected override IGeneratedAlgorithm CreatePredictionAlgorithm(bool useSparseFeatures)
 {
     return(InferenceAlgorithmUtilities.CreateMulticlassPredictionAlgorithm(useSparseFeatures));
 }