Ejemplo n.º 1
0
        /// <summary>
        ///   Trains each model to recognize each of the output labels.
        /// </summary>
        /// <returns>The sum log-likelihood for all models after training.</returns>
        /// 
        protected double Run<T>(T[] inputs, int[] outputs)
        {
            int classes = classifier.Classes;
            double[] logLikelihood = new double[classes];

            // For each model,
#if !DEBUG
            GABIZ.Base.Parallel.For(0, classes, i =>
#else
            for (int i = 0; i < classes; i++)
#endif
            {
                // Select the input/output set corresponding
                //  to the model's specialization class
                int[] inx = outputs.Find(y => y == i);
                T[] observations = inputs.Submatrix(inx);

                if (observations.Length > 0)
                {
                    // Create and configure the learning algorithm
                    IUnsupervisedLearning teacher = algorithm(i);

                    // Train the current model in the input/output subset
                    logLikelihood[i] = teacher.Run(observations as Array[]);
                }
            }
        public double Run(int[][] observations_db, int[] class_labels)
        {
            ValidationHelper.ValidateObservationDb(observations_db, 0, mClassifier.SymbolCount);

            int class_count = mClassifier.ClassCount;

            double[] logLikelihood = new double[class_count];

            int K = class_labels.Length;

            DiagnosticsHelper.Assert(observations_db.Length == K);

            int[] class_label_counts = new int[class_count];

            Parallel.For(0, class_count, i =>
            {
                IUnsupervisedLearning teacher = mAlgorithmEntity(i);

                List <int> match_record_index_set = new List <int>();
                for (int k = 0; k < K; ++k)
                {
                    if (class_labels[k] == i)
                    {
                        match_record_index_set.Add(k);
                    }
                }

                int K2 = match_record_index_set.Count;

                class_label_counts[i] = K2;

                if (K2 != 0)
                {
                    int[][] observations_subdb = new int[K2][];
                    for (int k = 0; k < K2; ++k)
                    {
                        int record_index      = match_record_index_set[k];
                        observations_subdb[k] = observations_db[record_index];
                    }


                    logLikelihood[i] = teacher.Run(observations_subdb);
                }
            });

            if (mEmpirical)
            {
                for (int i = 0; i < class_count; i++)
                {
                    mClassifier.Priors[i] = (double)class_label_counts[i] / K;
                }
            }

            //if (mRejection)
            //{
            //    mClassifier.Threshold = Threshold();
            //}

            return(logLikelihood.Sum());
        }
Ejemplo n.º 3
0
        /// <summary>
        ///   Trains each model to recognize each of the output labels.
        /// </summary>
        /// <returns>The sum log-likelihood for all models after training.</returns>
        /// 
        protected double Run<T>(T[] inputs, int[] outputs)
        {
            if (inputs == null) throw new ArgumentNullException("inputs");
            if (outputs == null) throw new ArgumentNullException("outputs");

            if (inputs.Length != outputs.Length)
                throw new DimensionMismatchException("outputs", 
                    "The number of inputs and outputs does not match.");

            for (int i = 0; i < outputs.Length; i++)
                if (outputs[i] < 0 || outputs[i] >= Classifier.Classes)
                    throw new ArgumentOutOfRangeException("outputs");


            int classes = Classifier.Classes;
            double[] logLikelihood = new double[classes];
            int[] classCounts = new int[classes];


            // For each model,
#if !DEBUG
            Parallel.For(0, classes, i =>
#else
            for (int i = 0; i < classes; i++)
#endif
            {
                // We will start the class model learning problem
                var args = new GenerativeLearningEventArgs(i, classes);
                OnGenerativeClassModelLearningStarted(args);

                // Select the input/output set corresponding
                //  to the model's specialization class
                int[] inx = outputs.Find(y => y == i);
                T[] observations = inputs.Submatrix(inx);

                classCounts[i] = observations.Length;

                if (observations.Length > 0)
                {
                    // Create and configure the learning algorithm
                    IUnsupervisedLearning teacher = Algorithm(i);

                    // Train the current model in the input/output subset
                    logLikelihood[i] = teacher.Run(observations as Array[]);
                }

                // Update and report progress
                OnGenerativeClassModelLearningFinished(args);
            }
Ejemplo n.º 4
0
 /// <summary>
 ///   Constructs a new <see cref="BagOfAudioWords"/>.
 /// </summary>
 ///
 /// <param name="extractor">The feature extractor to use.</param>
 /// <param name="algorithm">The clustering algorithm to use.</param>
 ///
 public BagOfAudioWords(IAudioFeatureExtractor <TFeature> extractor,
                        IUnsupervisedLearning <IClassifier <double[], int>, double[], int> algorithm)
 {
     base.Init(extractor, algorithm);
 }
Ejemplo n.º 5
0
 /// <summary>
 ///   Constructs a new <see cref="BagOfVisualWords"/>.
 /// </summary>
 ///
 /// <param name="detector">The feature detector to use.</param>
 /// <param name="algorithm">The clustering algorithm to use.</param>
 ///
 public BagOfVisualWords(IFeatureDetector <TPoint, TFeature> detector,
                         //IClusteringAlgorithm<TFeature>
                         IUnsupervisedLearning <IClassifier <TFeature, int>, TFeature, int> algorithm)
 {
     Init(detector, algorithm);
 }
        protected double Run <T>(T[] inputs, int[] outputs)
        {
            if (inputs == null)
            {
                throw new ArgumentNullException("inputs");
            }

            if (outputs == null)
            {
                throw new ArgumentNullException("outputs");
            }

            if (inputs.Length != outputs.Length)
            {
                throw new DimensionMismatchException("outputs",
                                                     "The number of inputs and outputs does not match.");
            }

            for (int i = 0; i < outputs.Length; i++)
            {
                if (outputs[i] < 0 || outputs[i] >= Classifier.Classes)
                {
                    throw new ArgumentOutOfRangeException("outputs");
                }
            }


            int classes = Classifier.Classes;

            double[] logLikelihood = new double[classes];
            int[]    classCounts   = new int[classes];


            // For each model,
            Parallel.For(0, classes, i =>
            {
                // We will start the class model learning problem
                var args = new GenerativeLearningEventArgs(i, classes);
                OnGenerativeClassModelLearningStarted(args);

                // Select the input/output set corresponding
                //  to the model's specialization class
                int[] inx        = outputs.Find(y => y == i);
                T[] observations = inputs.Get(inx);

                classCounts[i] = observations.Length;

                if (observations.Length > 0)
                {
                    // Create and configure the learning algorithm
                    IUnsupervisedLearning teacher = Algorithm(i);

                    // Train the current model in the input/output subset
                    logLikelihood[i] = teacher.Run(observations as Array[]);
                }

                // Update and report progress
                OnGenerativeClassModelLearningFinished(args);
            });

            if (Empirical)
            {
                for (int i = 0; i < classes; i++)
                {
                    Classifier.Priors[i] = (double)classCounts[i] / inputs.Length;
                }
            }

            if (Rejection)
            {
                Classifier.Threshold = Threshold();
            }

            // Returns the sum log-likelihood for all models.
            return(logLikelihood.Sum());
        }
Ejemplo n.º 7
0
 /// <summary>
 ///   Constructs a new <see cref="BagOfVisualWords"/> using a
 ///   <see cref="SpeededUpRobustFeaturesDetector">surf</see>
 ///   feature detector to identify features.
 /// </summary>
 ///
 /// <param name="algorithm">The clustering algorithm to use.</param>
 ///
 public BagOfVisualWords(IUnsupervisedLearning <IClassifier <double[], int>, double[], int> algorithm)
 {
     base.Init(new SpeededUpRobustFeaturesDetector(), algorithm);
 }
Ejemplo n.º 8
0
 /// <summary>
 ///   Constructs a new <see cref="BagOfVisualWords"/>.
 /// </summary>
 ///
 /// <param name="detector">The feature detector to use.</param>
 /// <param name="algorithm">The clustering algorithm to use.</param>
 ///
 public BagOfVisualWords(IFeatureDetector <TPoint> detector, //IClusteringAlgorithm<double[]>
                         IUnsupervisedLearning <IClassifier <double[], int>, double[], int> algorithm)
 {
     base.Init(detector, algorithm);
 }
Ejemplo n.º 9
0
 /// <summary>
 ///   Constructs a new <see cref="BagOfAudioWords"/>.
 /// </summary>
 ///
 /// <param name="extractor">The feature extractor to use.</param>
 /// <param name="algorithm">The clustering algorithm to use.</param>
 ///
 public BagOfAudioWords(IAudioFeatureExtractor <TFeature> extractor,
                        IUnsupervisedLearning <IClassifier <TPoint, int>, TPoint, int> algorithm)
 {
     Init(extractor, algorithm);
 }
Ejemplo n.º 10
0
 /// <summary>
 ///   Constructs a new <see cref="BagOfVisualWords"/>.
 /// </summary>
 ///
 /// <param name="extractor">The feature extractor to use.</param>
 /// <param name="algorithm">The clustering algorithm to use.</param>
 ///
 public BagOfVisualWords(IImageFeatureExtractor <TPoint> extractor, //IClusteringAlgorithm<double[]>
                         IUnsupervisedLearning <IClassifier <double[], int>, double[], int> algorithm)
 {
     base.Init(extractor, algorithm);
 }
Ejemplo n.º 11
0
 /// <summary>
 ///   Constructs a new <see cref="BagOfVisualWords"/>.
 /// </summary>
 ///
 /// <param name="extractor">The feature extractor to use.</param>
 /// <param name="algorithm">The clustering algorithm to use.</param>
 ///
 public BagOfVisualWords(IImageFeatureExtractor <TPoint> extractor,
                         IUnsupervisedLearning <IClassifier <TFeature, int>, TFeature, int> algorithm)
 {
     Init(extractor, algorithm);
 }
Ejemplo n.º 12
0
 /// <summary>
 ///   Constructs a new <see cref="BagOfAudioWords"/> using a
 ///   <see cref="MelFrequencyCepstrumCoefficient">MFCC</see>
 ///   feature detector to identify features.
 /// </summary>
 ///
 /// <param name="algorithm">The clustering algorithm to use.</param>
 ///
 public BagOfAudioWords(
     IUnsupervisedLearning <IClassifier <double[], int>, double[], int> algorithm)
 {
     base.Init(new MelFrequencyCepstrumCoefficient(), algorithm);
 }