// Start is called before the first frame update
    void Start()
    {
        Debug.Log("TESTING MACHINE LEARNING");
        // Declare some training data
        int[][] inputs = new int[][]
        {
            new int[] { 0, 1, 1, 0 },    // Class 0
            new int[] { 0, 0, 1, 0 },    // Class 0
            new int[] { 0, 1, 1, 1, 0 }, // Class 0
            new int[] { 0, 1, 0 },       // Class 0

            new int[] { 1, 0, 0, 1 },    // Class 1
            new int[] { 1, 1, 0, 1 },    // Class 1
            new int[] { 1, 0, 0, 0, 1 }, // Class 1
            new int[] { 1, 0, 1 },       // Class 1

            new int[] { 0, 0, 0, 0, 1, 0 },
        };

        int[] outputs = new int[]
        {
            0, 0, 0, 0, // First four sequences are of class 0
            1, 1, 1, 1, // Last four sequences are of class 1
            2,
        };


        // We are trying to predict two different classes
        int classes = 3;

        // Each sequence may have up to two symbols (0 or 1)
        int symbols = 2;

        // Nested models will have two states each
        int[] states = new int[] { 3, 3, 3 };

        // Creates a new Hidden Markov Model Classifier with the given parameters
        HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);

        // Create a new learning algorithm to train the sequence classifier
        var teacher = new HiddenMarkovClassifierLearning(classifier,

                                                         // Train each model until the log-likelihood changes less than 0.001
                                                         modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
        {
            Tolerance     = 0.001,
            MaxIterations = 1000
        }
                                                         );

        // Train the sequence classifier using the algorithm
        teacher.Learn(inputs, outputs);

        // Compute the classifier answers for the given inputs
        int[] answers = classifier.Decide(inputs);
        foreach (var item in answers)
        {
            Debug.Log(item);
        }
    }
        public static HiddenMarkovClassifier <Independent, double[]> CreateModel2_learn(out double[][][] sequences, out int[] labels)
        {
            sequences = new double[][][]
            {
                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1.1 },
                    new double[] { 1, 2.5 },
                    new double[] { 1, 3.4 },
                    new double[] { 1, 4.7 },
                    new double[] { 2, 5.8 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 2, 3.2 },
                    new double[] { 2, 2.6 },
                    new double[] { 1, 1.2 },
                    new double[] { 1, 0.8 },
                    new double[] { 0, 1.1 },
                }
            };

            labels = new[] { 0, 1 };

            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1   = new GeneralDiscreteDistribution(3);
            var comp2   = new NormalDistribution(1);
            var density = new Independent(comp1, comp2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier <Independent, double[]>(
                2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <Independent, double[]>(classifier)
            {
                // Train each model until the log-likelihood changes less than 0.0001
                Learner = modelIndex => new BaumWelchLearning <Independent, double[]>(classifier.Models[modelIndex])
                {
                    Tolerance  = 0.0001,
                    Iterations = 0,
                }
            };

            // Train the sequence classifier using the algorithm
            //double logLikelihood = teacher.Run(sequences, labels);
            var model = teacher.Learn(sequences, labels);

            Assert.AreSame(model, classifier);

            return(classifier);
        }
        public void LearnTest1()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a univariate sequence and the same sequence backwards.
            double[][] sequences = new double[][] 
            {
                new double[] { 0,1,2,3,4 }, // This is the first  sequence with label = 0
                new double[] { 4,3,2,1,0 }, // This is the second sequence with label = 1
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };

            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            NormalDistribution density = new NormalDistribution();
            var classifier = new HiddenMarkovClassifier<NormalDistribution, double>(2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<NormalDistribution, double>(classifier)
            {
                // Train each model until the log-likelihood changes less than 0.001
                Learner = modelIndex => new BaumWelchLearning<NormalDistribution, double>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0
                }
            };

            // Train the sequence classifier using the algorithm
            teacher.Learn(sequences, labels);
            double logLikelihood = teacher.LogLikelihood;


            // Calculate the probability that the given
            //  sequences originated from the model
            double likelihood1, likelihood2;

            // Try to classify the first sequence (output should be 0)
            int c1 = classifier.Decide(sequences[0]);
            likelihood1 = classifier.Probability(sequences[0]);

            // Try to classify the second sequence (output should be 1)
            int c2 = classifier.Decide(sequences[1]);
            likelihood2 = classifier.Probability(sequences[1]);

            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);


            Assert.AreEqual(-13.271981026832929, logLikelihood, 1e-10);
            Assert.AreEqual(0.99999791320102149, likelihood1, 1e-10);
            Assert.AreEqual(0.99999791320102149, likelihood2, 1e-10);
        }
    public void LearnGesture(int valuesUsed, int statesUsed)
    {
        double[][][] inputs  = new double[storedGestures.Count][][];
        int[]        outputs = new int[storedGestures.Count];

        for (int i = 0; i < inputs.Length; i++)
        {
            double[][] atemp = new double[storedGestures[i].points.Length][];
            for (int j = 0; j < storedGestures[i].points.Length; j++)
            {
                double[] btemp = new double[valuesUsed];
                for (int k = 0; k < valuesUsed; k++)
                {
                    btemp[k] = storedGestures[i].points[j][k];
                }
                atemp[j] = btemp;
            }

            inputs[i]  = atemp;
            outputs[i] = storedGestures[i].index;
        }

        List <String> classes = new List <String>();

        int states = gestureIndex.Count;

        MultivariateNormalDistribution dist = new MultivariateNormalDistribution(valuesUsed);

        hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>
                  (states, new Forward(statesUsed), dist);

        var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(hmm)
        {
            Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(hmm.Models[i])
            {
                Tolerance     = 0.01,
                MaxIterations = 0,

                FittingOptions = new NormalOptions()
                {
                    Regularization = 1e-5
                }
            }
        };

        teacher.Empirical = true;
        teacher.Rejection = false;

        teacher.Learn(inputs, outputs);

        Debug.Log("Sequence Learned!");
    }
Exemple #5
0
        private static void hmmc(int[][] inputs, int[] outputs)
        {
            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <GeneralDiscreteDistribution, int>()
            {
                Learner = (i) => new BaumWelchLearning <GeneralDiscreteDistribution, int, GeneralDiscreteOptions>()
                {
                    Tolerance     = 0.001,
                    MaxIterations = 0
                }
            };

            // Train the sequence classifier using the algorithm
            var hmmClassifier = teacher.Learn(inputs, outputs);

            // Compute the classifier answers for the given inputs
            int[] answers = hmmClassifier.Decide(inputs);
        }
Exemple #6
0
        public void LearnTest_old()
        {
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0, 1, 1, 0 },    // Class 0
                new int[] { 0, 0, 1, 0 },    // Class 0
                new int[] { 0, 1, 1, 1, 0 }, // Class 0
                new int[] { 0, 1, 0 },       // Class 0

                new int[] { 1, 0, 0, 1 },    // Class 1
                new int[] { 1, 1, 0, 1 },    // Class 1
                new int[] { 1, 0, 0, 0, 1 }, // Class 1
                new int[] { 1, 0, 1 },       // Class 1
            };

            int[] outputs = new int[]
            {
                0, 0, 0, 0, // First four sequences are of class 0
                1, 1, 1, 1, // Last four sequences are of class 1
            };


            // We are trying to predict two different classes
            int classes = 2;

            // Each sequence may have up to two symbols (0 or 1)
            int symbols = 2;

            // Nested models will have two states each
            int[] states = new int[] { 2, 2 };

            // Creates a new Hidden Markov Model Classifier with the given parameters
            HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning(classifier,

                                                             // Train each model until the log-likelihood changes less than 0.001
                                                             modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
            {
                Tolerance  = 0.001,
                Iterations = 0
            }
                                                             );

            // Train the sequence classifier
            teacher.Learn(inputs, outputs);

            // Obtain classification labels for the output
            int[] predicted = classifier.Decide(inputs);

            // Obtain prediction scores for the outputs
            double[] lls = classifier.LogLikelihood(inputs);

            // Will assert the models have learned the sequences correctly.
            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = outputs[i];
                int actual   = predicted[i];
                Assert.AreEqual(expected, actual);
            }
        }
    public void LearnGesture(int valuesUsed, int modeUsed, int statesUsed)
    {
        double[][][] inputs  = new double[storedGestures.Count][][];
        int[]        outputs = new int[storedGestures.Count];

        for (int i = 0; i < inputs.Length; i++)
        {
            double[][] points = new double[storedGestures[i].points.Length][];
            switch (modeUsed)
            {
            case 3:
                for (int j = 0; j < storedGestures[i].points.Length; j++)
                {
                    points[j] = new double[3] {
                        storedGestures[i].points[j][0], storedGestures[i].points[j][1], storedGestures[i].points[j][2]
                    };
                }
                break;

            case 33:
                for (int j = 0; j < storedGestures[i].points.Length; j++)
                {
                    points[j] = new double[6] {
                        storedGestures[i].points[j][0], storedGestures[i].points[j][1], storedGestures[i].points[j][2],
                        storedGestures[i].points[j][6], storedGestures[i].points[j][7], storedGestures[i].points[j][8]
                    };
                }
                break;

            case 6:
                for (int j = 0; j < storedGestures[i].points.Length; j++)
                {
                    points[j] = new double[6] {
                        storedGestures[i].points[j][0], storedGestures[i].points[j][1], storedGestures[i].points[j][2],
                        storedGestures[i].points[j][3], storedGestures[i].points[j][4], storedGestures[i].points[j][5]
                    };
                }
                break;

            case 66:
                for (int j = 0; j < storedGestures[i].points.Length; j++)
                {
                    points[j] = new double[12] {
                        storedGestures[i].points[j][0], storedGestures[i].points[j][1], storedGestures[i].points[j][2],
                        storedGestures[i].points[j][3], storedGestures[i].points[j][4], storedGestures[i].points[j][5],
                        storedGestures[i].points[j][6], storedGestures[i].points[j][7], storedGestures[i].points[j][8],
                        storedGestures[i].points[j][9], storedGestures[i].points[j][10], storedGestures[i].points[j][11]
                    };
                }
                break;
            }

            inputs[i]  = points;
            outputs[i] = storedGestures[i].index;
        }

        List <String> classes = new List <String>();

        int states = gestureIndex.Count;

        MultivariateNormalDistribution dist = new MultivariateNormalDistribution(valuesUsed);

        hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>
                  (states, new Forward(statesUsed), dist);

        var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(hmm)
        {
            Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(hmm.Models[i])
            {
                Tolerance     = 0.01,
                MaxIterations = 0,

                FittingOptions = new NormalOptions()
                {
                    Regularization = 1e-5
                }
            }
        };

        teacher.Empirical = true;
        teacher.Rejection = false;

        teacher.Learn(inputs, outputs);

        Debug.Log("Sequence Learned!");
    }
        public void learn_pendigits_normalization()
        {
            Console.WriteLine("Starting NormalQuasiNewtonHiddenLearningTest.learn_pendigits_normalization");

            using (var travis = new KeepTravisAlive())
            {
                #region doc_learn_pendigits
                // Ensure we get reproducible results
                Accord.Math.Random.Generator.Seed = 0;

                // Download the PENDIGITS dataset from UCI ML repository
                var pendigits = new Pendigits(path: Path.GetTempPath());

                // Get and pre-process the training set
                double[][][] trainInputs  = pendigits.Training.Item1;
                int[]        trainOutputs = pendigits.Training.Item2;

                // Pre-process the digits so each of them is centered and scaled
                trainInputs = trainInputs.Apply(Accord.Statistics.Tools.ZScores);
                trainInputs = trainInputs.Apply((x) => x.Subtract(x.Min())); // make them positive

                // Create some prior distributions to help initialize our parameters
                var priorC = new WishartDistribution(dimension: 2, degreesOfFreedom: 5);
                var priorM = new MultivariateNormalDistribution(dimension: 2);

                // Create a new learning algorithm for creating continuous hidden Markov model classifiers
                var teacher1 = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>()
                {
                    // This tells the generative algorithm how to train each of the component models. Note: The learning
                    // algorithm is more efficient if all generic parameters are specified, including the fitting options
                    Learner = (i) => new BaumWelchLearning <MultivariateNormalDistribution, double[], NormalOptions>()
                    {
                        Topology = new Forward(5), // Each model will have a forward topology with 5 states

                        // Their emissions will be multivariate Normal distributions initialized using the prior distributions
                        Emissions = (j) => new MultivariateNormalDistribution(mean: priorM.Generate(), covariance: priorC.Generate()),

                        // We will train until the relative change in the average log-likelihood is less than 1e-6 between iterations
                        Tolerance     = 1e-6,
                        MaxIterations = 1000, // or until we perform 1000 iterations (which is unlikely for this dataset)

                        // We will prevent our covariance matrices from becoming degenerate by adding a small
                        // regularization value to their diagonal until they become positive-definite again:
                        FittingOptions = new NormalOptions()
                        {
                            Regularization = 1e-6
                        }
                    }
                };

                // The following line is only needed to ensure reproducible results. Please remove it to enable full parallelization
                teacher1.ParallelOptions.MaxDegreeOfParallelism = 1; // (Remove, comment, or change this line to enable full parallelism)

                // Use the learning algorithm to create a classifier
                var hmmc = teacher1.Learn(trainInputs, trainOutputs);


                // Create a new learning algorithm for creating HCRFs
                var teacher2 = new HiddenQuasiNewtonLearning <double[]>()
                {
                    Function = new MarkovMultivariateFunction(hmmc),

                    MaxIterations = 10
                };

                // The following line is only needed to ensure reproducible results. Please remove it to enable full parallelization
                teacher2.ParallelOptions.MaxDegreeOfParallelism = 1; // (Remove, comment, or change this line to enable full parallelism)

                // Use the learning algorithm to create a classifier
                var hcrf = teacher2.Learn(trainInputs, trainOutputs);

                // Compute predictions for the training set
                int[] trainPredicted = hcrf.Decide(trainInputs);

                // Check the performance of the classifier by comparing with the ground-truth:
                var    m1       = new GeneralConfusionMatrix(predicted: trainPredicted, expected: trainOutputs);
                double trainAcc = m1.Accuracy; // should be 0.66523727844482561


                // Prepare the testing set
                double[][][] testInputs  = pendigits.Testing.Item1;
                int[]        testOutputs = pendigits.Testing.Item2;

                // Apply the same normalizations
                testInputs = testInputs.Apply(Accord.Statistics.Tools.ZScores);
                testInputs = testInputs.Apply((x) => x.Subtract(x.Min())); // make them positive

                // Compute predictions for the test set
                int[] testPredicted = hcrf.Decide(testInputs);

                // Check the performance of the classifier by comparing with the ground-truth:
                var    m2      = new GeneralConfusionMatrix(predicted: testPredicted, expected: testOutputs);
                double testAcc = m2.Accuracy; // should be 0.66506538564184681
                #endregion

                Assert.AreEqual(0.66523727844482561, trainAcc, 1e-10);
                Assert.AreEqual(0.66506538564184681, testAcc, 1e-10);
            }
        }
Exemple #9
0
        private void btnLearnHMM_Click(object sender, EventArgs e)
        {
            if (gridSamples.Rows.Count == 0)
            {
                MessageBox.Show("Please load or insert some data first.");
                return;
            }

            BindingList <Sequence> samples = database.Samples;
            BindingList <String>   classes = database.Classes;

            double[][][] inputs  = new double[samples.Count][][];
            int[]        outputs = new int[samples.Count];

            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i]  = samples[i].Input;
                outputs[i] = samples[i].Output;
            }

            int    states     = 5;
            int    iterations = 0;
            double tolerance  = 0.01;
            bool   rejection  = false;


            hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>(classes.Count,
                                                                                        new Forward(states), new MultivariateNormalDistribution(2), classes.ToArray());


            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(hmm)
            {
                // Train each model using the selected convergence criteria
                Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(hmm.Models[i])
                {
                    Tolerance  = tolerance,
                    Iterations = iterations,

                    FittingOptions = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            };

            teacher.Empirical = true;
            teacher.Rejection = rejection;


            // Run the learning algorithm
            teacher.Learn(inputs, outputs);


            // Classify all training instances
            foreach (var sample in database.Samples)
            {
                sample.RecognizedAs = hmm.Decide(sample.Input);
            }

            foreach (DataGridViewRow row in gridSamples.Rows)
            {
                var sample = row.DataBoundItem as Sequence;
                row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ?
                                                 Color.LightGreen : Color.White;
            }

            btnLearnHCRF.Enabled = true;
            hcrf = null;
        }
Exemple #10
0
        public override IObservable <string> Process(IObservable <HandTracker.Result> source)
        => Observable.Defer(() =>
        {
            var database = new Database();

            using (var stream = File.OpenRead(Database))
            {
                database.Load(stream);
            }

            var samples = database.Samples;
            var classes = database.Classes;

            var inputs  = new double[samples.Count][][];
            var outputs = new int[samples.Count];

            for (var i = 0; i < inputs.Length; i++)
            {
                inputs[i]  = samples[i].Input;
                outputs[i] = samples[i].Output;
            }

            var states     = 5;
            var iterations = 0;
            var tolerance  = 0.01;
            var rejection  = false;

            var hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>(
                classes.Count,
                new Forward(states),
                new MultivariateNormalDistribution(2), classes.ToArray());

            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(hmm)
            {
                // Train each model using the selected convergence criteria
                Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(hmm.Models[i])
                {
                    Tolerance     = tolerance,
                    MaxIterations = iterations,

                    FittingOptions = new NormalOptions
                    {
                        Regularization = 1e-5
                    }
                }
            };

            teacher.Empirical = true;
            teacher.Rejection = rejection;

            // Run the learning algorithm
            _ = teacher.Learn(inputs, outputs);

            // run the detection
            var triggers = source.Select(result => result.Visible).DistinctUntilChanged();
            var start    = triggers.Where(visible => visible != 0);
            var end      = triggers.Where(visible => visible == 0);

            return(source
                   .Window(start, _ => end)
                   .Select(window => window.Select(result => result.Position).ToArray()
                           .Where(points =>
            {
                if (points.Length < 2)
                {
                    return false;
                }

                var length = 0.0;
                for (var index = 1; index < points.Length; index++)
                {
                    var distance = Distance(points[index - 1], points[index]);
                    if (distance < MaxGesturesSpeed)
                    {
                        length += distance;
                    }
                }
                return length > MinGestureLength;
            })
                           .Select(points =>
            {
                var index = hmm.Decide(Preprocess(points));
                return database.Classes[index];
            })).Switch());
        });
Exemple #11
0
        public void LearnTest2()
        {
            #region doc_rejection
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0, 0, 1, 2 },     // Class 0
                new int[] { 0, 1, 1, 2 },     // Class 0
                new int[] { 0, 0, 0, 1, 2 },  // Class 0
                new int[] { 0, 1, 2, 2, 2 },  // Class 0

                new int[] { 2, 2, 1, 0 },     // Class 1
                new int[] { 2, 2, 2, 1, 0 },  // Class 1
                new int[] { 2, 2, 2, 1, 0 },  // Class 1
                new int[] { 2, 2, 2, 2, 1 },  // Class 1
            };

            int[] outputs = new int[]
            {
                0, 0, 0, 0, // First four sequences are of class 0
                1, 1, 1, 1, // Last four sequences are of class 1
            };


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning()
            {
                Learner = (i) => new BaumWelchLearning()
                {
                    NumberOfStates = 3,
                    Tolerance      = 0.001,
                    Iterations     = 0,
                },

                Rejection = true // Enable support for sequence rejection
            };

            // Train the sequence classifier
            var classifier = teacher.Learn(inputs, outputs);

            // Obtain prediction classes for the outputs
            int[] prediction = classifier.Decide(inputs);

            // Obtain prediction scores for the outputs
            double[] lls = classifier.LogLikelihood(inputs);
            #endregion

            double likelihood = teacher.LogLikelihood;
            Assert.AreEqual(-24.857860924867815, likelihood, 1e-8);

            Assert.AreEqual(0, classifier.NumberOfInputs);
            Assert.AreEqual(2, classifier.NumberOfOutputs);
            Assert.AreEqual(2, classifier.NumberOfClasses);
            Assert.AreEqual(3, classifier.NumberOfSymbols);

            for (int i = 0; i < classifier.NumberOfClasses; i++)
            {
                Assert.AreEqual(3, classifier[i].NumberOfStates);
                Assert.AreEqual(3, classifier[i].NumberOfSymbols);
                Assert.AreEqual(1, classifier[i].NumberOfInputs);
                Assert.AreEqual(3, classifier[i].NumberOfOutputs);
            }

            Assert.AreEqual(0.5, classifier.Priors[0]);
            Assert.AreEqual(0.5, classifier.Priors[1]);

            likelihood = testThresholdModel(inputs, outputs, classifier, likelihood);
        }
Exemple #12
0
        public void LearnTest()
        {
            #region doc_learn
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0, 1, 2, 0 },    // Class 0
                new int[] { 0, 0, 2, 0 },    // Class 0
                new int[] { 0, 1, 2, 1, 0 }, // Class 0
                new int[] { 0, 1, 2, 0 },    // Class 0

                new int[] { 1, 0, 2, 1 },    // Class 1
                new int[] { 1, 1, 2, 1 },    // Class 1
                new int[] { 1, 0, 2, 0, 1 }, // Class 1
                new int[] { 1, 0, 2, 1 },    // Class 1
            };

            int[] outputs = new int[]
            {
                0, 0, 0, 0, // First four sequences are of class 0
                1, 1, 1, 1, // Last four sequences are of class 1
            };

            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning()
            {
                // Train each model until the log-likelihood changes less than 0.001
                Learner = (i) => new BaumWelchLearning()
                {
                    Tolerance      = 0.001,
                    Iterations     = 0,
                    NumberOfStates = 2,
                }
            };

            // Train the sequence classifier
            HiddenMarkovClassifier classifier = teacher.Learn(inputs, outputs);

            // Obtain classification labels for the output
            int[] predicted = classifier.Decide(inputs);

            // Obtain prediction scores for the outputs
            double[] lls = classifier.LogLikelihood(inputs);
            #endregion

            Assert.AreEqual(0, classifier.NumberOfInputs);
            Assert.AreEqual(2, classifier.NumberOfOutputs);
            Assert.AreEqual(2, classifier.NumberOfClasses);
            Assert.AreEqual(3, classifier.NumberOfSymbols);

            for (int i = 0; i < classifier.NumberOfClasses; i++)
            {
                Assert.AreEqual(2, classifier[i].NumberOfStates);
                Assert.AreEqual(3, classifier[i].NumberOfSymbols);
                Assert.AreEqual(1, classifier[i].NumberOfInputs);
                Assert.AreEqual(2, classifier[i].NumberOfOutputs);
            }

            Assert.AreEqual(0.5, classifier.Priors[0]);
            Assert.AreEqual(0.5, classifier.Priors[1]);

            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = outputs[i];
                int actual   = predicted[i];
                Assert.AreEqual(expected, actual);
            }
        }
        public void LearnTest6()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            double[][][] sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1 },
                    new double[] { 1, 2 },
                    new double[] { 2, 3 },
                    new double[] { 3, 4 },
                    new double[] { 4, 5 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 4,  3 },
                    new double[] { 3,  2 },
                    new double[] { 2,  1 },
                    new double[] { 1,  0 },
                    new double[] { 0, -1 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };


            var density = new MultivariateNormalDistribution(2);

            try
            {
                new HiddenMarkovClassifier<MultivariateNormalDistribution>(
                    2, new Custom(new double[2, 2], new double[2]), density);

                Assert.Fail();
            }
            catch (ArgumentException)
            {
            }

            var topology = new Custom(
                new[,] { { 1 / 2.0, 1 / 2.0 }, { 1 / 2.0, 1 / 2.0 } },
                new[] { 1.0, 0.0 });

            Array.Clear(topology.Initial, 0, topology.Initial.Length);
            Array.Clear(topology.Transitions, 0, topology.Transitions.Length);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution, double[]>(
                2, topology, density);


            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution, double[]>(classifier)
            {
                // Train each model until the log-likelihood changes less than 0.0001
                Learner = modelIndex => new BaumWelchLearning<MultivariateNormalDistribution, double[]>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,

                    FittingOptions = new NormalOptions() { Diagonal = true }
                }
            };

            // Train the sequence classifier using the algorithm
            teacher.Learn(sequences, labels);
            double logLikelihood = teacher.LogLikelihood;


            // Calculate the probability that the given
            //  sequences originated from the model
            double response1, response2;

            // Try to classify the first sequence (output should be 0)
            int c1 = classifier.Decide(sequences[0]);
            response1 = classifier.Probability(sequences[0]);

            // Try to classify the second sequence (output should be 1)
            int c2 = classifier.Decide(sequences[1]);
            response2 = classifier.Probability(sequences[1]);

            Assert.AreEqual(double.NegativeInfinity, logLikelihood);
            Assert.AreEqual(0, response1);
            Assert.AreEqual(0, response2);
        }
        public void LearnTest5()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            double[][][] sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1 },
                    new double[] { 1, 2 },
                    new double[] { 2, 3 },
                    new double[] { 3, 4 },
                    new double[] { 4, 5 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 4,  3 },
                    new double[] { 3,  2 },
                    new double[] { 2,  1 },
                    new double[] { 1,  0 },
                    new double[] { 0, -1 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };


            var density = new MultivariateNormalDistribution(2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution, double[]>(
                2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution, double[]>(classifier)
            {
                // Train each model until the log-likelihood changes less than 0.0001
                Learner = modelIndex => new BaumWelchLearning<MultivariateNormalDistribution, double[]>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,

                    FittingOptions = new NormalOptions() { Diagonal = true }
                }
            };

            // Train the sequence classifier using the algorithm
            teacher.Learn(sequences, labels);
            double logLikelihood = teacher.LogLikelihood;


            // Calculate the probability that the given
            //  sequences originated from the model
            double logLikelihood1, logLikelihood2;
            int c1, c2;

            // Try to classify the 1st sequence (output should be 0)
            logLikelihood1 = classifier.Probability(sequences[0], out c1);

            // Try to classify the 2nd sequence (output should be 1)
            logLikelihood2 = classifier.Probability(sequences[1], out c2);


            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);

            Assert.AreEqual(-24.560599651649841, logLikelihood, 1e-10);
            Assert.AreEqual(0.99999999998806466, logLikelihood1, 1e-10);
            Assert.AreEqual(0.99999999998806466, logLikelihood2, 1e-10);
        }
        public void LearnTest4()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            double[][][] sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0 },
                    new double[] { 1 },
                    new double[] { 2 },
                    new double[] { 3 },
                    new double[] { 4 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 4 },
                    new double[] { 3 },
                    new double[] { 2 },
                    new double[] { 1 },
                    new double[] { 0 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };


            // Create a mixture of two 1-dimensional normal distributions (by default,
            // initialized with zero mean and unit covariance matrices).
            var density = new MultivariateMixture<MultivariateNormalDistribution>(
                new MultivariateNormalDistribution(1),
                new MultivariateNormalDistribution(1));

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<MultivariateMixture<MultivariateNormalDistribution>, double[]>(
                2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateMixture<MultivariateNormalDistribution>, double[]>(classifier)
            {
                // Train each model until the log-likelihood changes less than 0.0001
                Learner = modelIndex => new BaumWelchLearning<MultivariateMixture<MultivariateNormalDistribution>, double[]>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,
                }
            };

            // Train the sequence classifier using the algorithm
            teacher.Learn(sequences, labels);
            double logLikelihood = teacher.LogLikelihood;


            // Calculate the probability that the given
            //  sequences originated from the model
            double likelihood1, likelihood2;
            int c1, c2;

            // Try to classify the 1st sequence (output should be 0)
            likelihood1 = classifier.Probability(sequences[0], out c1);

            // Try to classify the 2nd sequence (output should be 1)
            likelihood2 = classifier.Probability(sequences[1], out c2);


            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);

            Assert.AreEqual(-13.271981026832933, logLikelihood, 1e-10);
            Assert.AreEqual(0.99999791320102149, likelihood1, 1e-10);
            Assert.AreEqual(0.99999791320102149, likelihood2, 1e-10);
        }
        /// <summary>
        ///   Trains the hidden Markov classifier
        /// </summary>
        ///
        private void btnTrain_Click(object sender, EventArgs e)
        {
            DataTable source = dgvSequenceSource.DataSource as DataTable;

            if (source == null || hmmc == null)
            {
                MessageBox.Show("Please create a sequence classifier first.");
                return;
            }

            int rows = source.Rows.Count;

            // Gets the input sequences
            int[][] sequences = new int[rows][];
            int[]   labels    = new int[rows];

            // Foreach row in the datagridview
            for (int i = 0; i < rows; i++)
            {
                // Get the row at the index
                DataRow row = source.Rows[i];

                // Get the label associated with this sequence
                string label = row["Label"] as string;

                // Extract the sequence and the expected label for it
                sequences[i] = decode(row["Sequences"] as string);
                labels[i]    = hmmc.Models.Find(x => x.Tag as string == label)[0];
            }


            // Grab training parameters
            int    iterations = (int)numIterations.Value;
            double limit      = (double)numConvergence.Value;

            if (rbStopIterations.Checked)
            {
                limit = 0;
            }
            else
            {
                iterations = 0;
            }

            // Create a new hidden Markov model learning algorithm
            var teacher = new HiddenMarkovClassifierLearning(hmmc, i =>
            {
                return(new BaumWelchLearning(hmmc.Models[i])
                {
                    MaxIterations = iterations,
                    Tolerance = limit
                });
            });

            // Learn the classifier
            teacher.Learn(sequences, labels);


            // Update the GUI
            dgvModels_CurrentCellChanged(this, EventArgs.Empty);
        }
        public static HiddenMarkovClassifier<Independent, double[]> CreateModel2_learn(out double[][][] sequences, out int[] labels)
        {
            sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1.1 },
                    new double[] { 1, 2.5 },
                    new double[] { 1, 3.4 },
                    new double[] { 1, 4.7 },
                    new double[] { 2, 5.8 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 2,  3.2 },
                    new double[] { 2,  2.6 },
                    new double[] { 1,  1.2 },
                    new double[] { 1,  0.8 },
                    new double[] { 0,  1.1 },
                }
            };

            labels = new[] { 0, 1 };

            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1 = new GeneralDiscreteDistribution(3);
            var comp2 = new NormalDistribution(1);
            var density = new Independent(comp1, comp2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<Independent, double[]>(
                2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent, double[]>(classifier)
            {

                // Train each model until the log-likelihood changes less than 0.0001
                Learner = modelIndex => new BaumWelchLearning<Independent, double[]>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,
                }
            };

            // Train the sequence classifier using the algorithm
            //double logLikelihood = teacher.Run(sequences, labels);
            var model = teacher.Learn(sequences, labels);
            Assert.AreSame(model, classifier);

            return classifier;
        }
Exemple #18
0
        public void LearnTest2_old()
        {
            #region doc_rejection_old
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0, 0, 1, 2 },     // Class 0
                new int[] { 0, 1, 1, 2 },     // Class 0
                new int[] { 0, 0, 0, 1, 2 },  // Class 0
                new int[] { 0, 1, 2, 2, 2 },  // Class 0

                new int[] { 2, 2, 1, 0 },     // Class 1
                new int[] { 2, 2, 2, 1, 0 },  // Class 1
                new int[] { 2, 2, 2, 1, 0 },  // Class 1
                new int[] { 2, 2, 2, 2, 1 },  // Class 1
            };

            int[] outputs = new int[]
            {
                0, 0, 0, 0, // First four sequences are of class 0
                1, 1, 1, 1, // Last four sequences are of class 1
            };


            // We are trying to predict two different classes
            int classes = 2;

            // Each sequence may have up to 3 symbols (0,1,2)
            int symbols = 3;

            // Nested models will have 3 states each
            int[] states = new int[] { 3, 3 };

            // Creates a new Hidden Markov Model Classifier with the given parameters
            HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning(classifier,

                                                             // Train each model until the log-likelihood changes less than 0.001
                                                             modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
            {
                Tolerance  = 0.001,
                Iterations = 0
            }
                                                             );

            // Enable support for sequence rejection
            teacher.Rejection = true;

            // Train the sequence classifier
            teacher.Learn(inputs, outputs);

            // Obtain prediction classes for the outputs
            int[] prediction = classifier.Decide(inputs);

            // Obtain prediction scores for the outputs
            double[] lls = classifier.LogLikelihood(inputs);
            #endregion

            double likelihood = teacher.LogLikelihood;
            Assert.AreEqual(-24.857860924867815, likelihood, 1e-8);

            likelihood = testThresholdModel(inputs, outputs, classifier, likelihood);
        }
        public void LearnTest7()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.

            double[][][] sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1 },
                    new double[] { 1, 2 },
                    new double[] { 2, 3 },
                    new double[] { 3, 4 },
                    new double[] { 4, 5 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 4,  3 },
                    new double[] { 3,  2 },
                    new double[] { 2,  1 },
                    new double[] { 1,  0 },
                    new double[] { 0, -1 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };


            var initialDensity = new MultivariateNormalDistribution(2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution, double[]>(
                classes: 2, topology: new Forward(2), initial: initialDensity);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution, double[]>(classifier)
            {
                // Train each model until the log-likelihood changes less than 0.0001
                Learner = modelIndex => new BaumWelchLearning<MultivariateNormalDistribution, double[], NormalOptions>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,

                    FittingOptions = new NormalOptions()
                    {
                        Diagonal = true,      // only diagonal covariance matrices
                        Regularization = 1e-5 // avoid non-positive definite errors
                    }
                }
            };

            // Train the sequence classifier using the algorithm
            teacher.Learn(sequences, labels);
            double logLikelihood = teacher.LogLikelihood;


            // Calculate the probability that the given
            //  sequences originated from the model
            double likelihood, likelihood2;

            int c1 = classifier.Decide(sequences[0]);
            likelihood = classifier.Probability(sequences[0]);

            // Try to classify the second sequence (output should be 1)
            int c2 = classifier.Decide(sequences[1]);
            likelihood2 = classifier.Probability(sequences[1]);


            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);

            Assert.AreEqual(-24.560663315259973, logLikelihood, 1e-10);
            Assert.AreEqual(0.99999999998805045, likelihood, 1e-10);
            Assert.AreEqual(0.99999999998805045, likelihood2, 1e-10);

            Assert.IsFalse(double.IsNaN(logLikelihood));
            Assert.IsFalse(double.IsNaN(likelihood));
            Assert.IsFalse(double.IsNaN(likelihood2));
        }
        public void LearnTest8()
        {
            // Declare some testing data
            int[][] inputs = new double[][]
            {
                new double[] { 0,0,1,2 },     // Class 0
                new double[] { 0,1,1,2 },     // Class 0
                new double[] { 0,0,0,1,2 },   // Class 0
                new double[] { 0,1,2,2,2 },   // Class 0

                new double[] { 2,2,1,0 },     // Class 1
                new double[] { 2,2,2,1,0 },   // Class 1
                new double[] { 2,2,2,1,0 },   // Class 1
                new double[] { 2,2,2,2,1 },   // Class 1
            }.ToInt32();

            int[] outputs = new int[]
            {
                0,0,0,0, // First four sequences are of class 0
                1,1,1,1, // Last four sequences are of class 1
            };


            // We are trying to predict two different classes
            int classes = 2;

            // Each sequence may have up to 3 symbols (0,1,2)
            int symbols = 3;

            // Nested models will have 3 states each
            int[] states = new int[] { 3, 3 };

            // Creates a new Hidden Markov Model Classifier with the given parameters
            var classifier = HiddenMarkovClassifier.CreateGeneric2(classes, states, symbols);


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<GeneralDiscreteDistribution, int>(classifier)
            {
                // Train each model until the log-likelihood changes less than 0.001
                Learner = modelIndex => new BaumWelchLearning<GeneralDiscreteDistribution, int>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 0
                }
            };

            // Enable support for sequence rejection
            teacher.Rejection = true;

            // Train the sequence classifier using the algorithm
            teacher.Learn(inputs, outputs);
            double likelihood = teacher.LogLikelihood;


            //Assert.AreEqual(-0.84036002169162149, likelihood);

            likelihood = testThresholdModel(inputs, outputs, classifier, likelihood);
        }
        public void LearnTest9()
        {
            double[][][] inputs = large_gestures;
            int[] outputs = large_outputs;

            int states = 5;
            int iterations = 100;
            double tolerance = 0.01;
            bool rejection = true;
            double sensitivity = 1E-85;

            int dimension = inputs[0][0].Length;

            var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution, double[]>(2,
                new Forward(states), new MultivariateNormalDistribution(dimension));

            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution, double[]>(hmm)
            {
                // Train each model using the selected convergence criteria
                Learner = i => new BaumWelchLearning<MultivariateNormalDistribution, double[]>(hmm.Models[i])
                {
                    Tolerance = tolerance,
                    Iterations = iterations,

                    FittingOptions = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            };

            teacher.Empirical = true;
            teacher.Rejection = rejection;

            // Run the learning algorithm
            teacher.Learn(inputs, outputs);
            double logLikelihood = teacher.LogLikelihood;

            hmm.Sensitivity = sensitivity;

            for (int i = 0; i < large_gestures.Length; i++)
            {
                int actual = hmm.Decide(large_gestures[i]);
                int expected = large_outputs[i];
                Assert.AreEqual(expected, actual);
            }
        }
        public void learn_test()
        {
            Accord.Math.Random.Generator.Seed = 0;

            #region doc_learn_1
            // Let's say we would like to do a very simple mechanism for gesture recognition.
            // In this example, we will be trying to create a classifier that can distinguish
            // between the words "hello", "car", and "wardrobe".

            // Let's say we decided to acquire some data, and we asked some people to perform
            // those words in front of a Kinect camera, and, using Microsoft's SDK, we were able
            // to captured the x and y coordinates of each hand while the word was being performed.

            // Let's say we decided to represent our frames as:
            //
            //    double[] frame = { leftHandX, leftHandY, rightHandX, rightHandY }; // 4 dimensions
            //
            // Since we captured words, this means we captured sequences of frames as we described
            // above. Let's write some of those as rough examples to explain how gesture recognition
            // can be done:

            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            // Please note that a real-world example would involve *lots* of samples for each word.
            // Here, we are considering just one from each class which is clearly sub-optimal and
            // should _never_ be done on practice. Please keep in mind that we are doing like this
            // only to simplify this example on how to create and use HCRFs.

            // These are the words we have in our vocabulary:
            double[][][] words = { hello, car, wardrobe };

            // Now, let's associate integer labels with them. This is needed
            // for the case where there are multiple samples for each word.
            int[] labels = { 0, 1, 2 };

            // Create a new learning algorithm to train the hidden Markov model sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <Independent <NormalDistribution>, double[]>()
            {
                // Train each model until the log-likelihood changes less than 0.001
                Learner = (i) => new BaumWelchLearning <Independent <NormalDistribution>, double[]>()
                {
                    Topology = new Forward(5), // this value can be found by trial-and-error

                    // We will create our classifiers assuming an independent Gaussian distribution
                    // for each component in our feature vectors (assuming a Naive Bayes assumption).
                    Emissions = (s) => new Independent <NormalDistribution>(dimensions: 4), // 4 dimensions

                    Tolerance  = 0.001,
                    Iterations = 100,

                    // This is necessary so the code doesn't blow up when it realizes there is only one
                    // sample per word class. But this could also be needed in normal situations as well:
                    FittingOptions = new IndependentOptions()
                    {
                        InnerOption = new NormalOptions()
                        {
                            Regularization = 1e-5
                        }
                    }
                }
            };

            // PS: In case you find exceptions trying to configure your model, you might want
            //     to try disabling parallel processing to get more descriptive error messages:
            // teacher.ParallelOptions.MaxDegreeOfParallelism = 1;

            // Finally, we can run the learning algorithm!
            var    hmm           = teacher.Learn(words, labels);
            double logLikelihood = teacher.LogLikelihood;

            // At this point, the classifier should be successfully
            // able to distinguish between our three word classes:
            //
            int tc1 = hmm.Decide(hello);    // should be 0
            int tc2 = hmm.Decide(car);      // should be 1
            int tc3 = hmm.Decide(wardrobe); // should be 2
            #endregion

            Assert.AreEqual(0, tc1);
            Assert.AreEqual(1, tc2);
            Assert.AreEqual(2, tc3);

            #region doc_learn_2
            // Now, we can use the Markov classifier to initialize a HCRF
            var baseline = HiddenConditionalRandomField.FromHiddenMarkov(hmm);

            // We can check that both are equivalent, although they have
            // formulations that can be learned with different methods:
            int[] predictedLabels = baseline.Decide(words);

            #endregion

            // We can check that both are equivalent, although they have
            // formulations that can be learned with different methods
            //
            for (int i = 0; i < words.Length; i++)
            {
                // Should be the same
                int expected = hmm.Decide(words[i]);
                int actual   = baseline.Decide(words[i]);

                // Should be the same
                double h0 = hmm.LogLikelihood(words[i], 0);
                double c0 = baseline.LogLikelihood(words[i], 0);

                double h1 = hmm.LogLikelihood(words[i], 1);
                double c1 = baseline.LogLikelihood(words[i], 1);

                double h2 = hmm.LogLikelihood(words[i], 2);
                double c2 = baseline.LogLikelihood(words[i], 2);

                Assert.AreEqual(expected, predictedLabels[i]);
                Assert.AreEqual(expected, actual);
                Assert.AreEqual(h0, c0, 1e-10);
                Assert.IsTrue(h1.IsRelativelyEqual(c1, 1e-10));
                Assert.IsTrue(h2.IsRelativelyEqual(c2, 1e-10));
            }

            Accord.Math.Random.Generator.Seed = 0;

            #region doc_learn_3
            // Now we can learn the HCRF using one of the best learning
            // algorithms available, Resilient Backpropagation learning:

            // Create the Resilient Backpropagation learning algorithm
            var rprop = new HiddenResilientGradientLearning <double[]>()
            {
                Function = baseline.Function, // use the same HMM function

                Iterations = 50,
                Tolerance  = 1e-5
            };

            // Run the algorithm and learn the models
            var hcrf = rprop.Learn(words, labels);

            // At this point, the HCRF should be successfully
            // able to distinguish between our three word classes:
            //
            int hc1 = hcrf.Decide(hello);    // should be 0
            int hc2 = hcrf.Decide(car);      // should be 1
            int hc3 = hcrf.Decide(wardrobe); // should be 2
            #endregion

            Assert.AreEqual(0, hc1);
            Assert.AreEqual(1, hc2);
            Assert.AreEqual(2, hc3);
        }
    // Update is called once per frame
    void Update()
    {
        if (Main.database.isLoadedFile && isFirst)
        {
            BindingList <Sequence> samples = Main.database.Samples;
            BindingList <string>   classes = Main.database.Classes;

            double[][][] inputs  = new double[samples.Count][][];
            int[]        outputs = new int[samples.Count];

            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i]  = samples[i].Input;
                outputs[i] = samples[i].Output;
            }

            int    states     = 9;
            int    iterations = 0;
            double tolerance  = 0.01;
            bool   rejection  = false;


            Main.hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>(classes.Count,
                                                                                             new Forward(states), new MultivariateNormalDistribution(2), classes.ToArray());


            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(Main.hmm)
            {
                // Train each model using the selected convergence criteria
                Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(Main.hmm.Models[i])
                {
                    Tolerance     = tolerance,
                    MaxIterations = iterations,

                    FittingOptions = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            };

            teacher.Empirical = true;
            teacher.Rejection = rejection;


            // Run the learning algorithm
            teacher.Learn(inputs, outputs);


            // Classify all training instances
            foreach (var sample in Main.database.Samples)
            {
                sample.RecognizedAs = Main.hmm.Decide(sample.Input);
            }

            //foreach (DataGridViewRow row in gridSamples.Rows)
            //{
            //    var sample = row.DataBoundItem as Sequence;
            //    row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ?
            //        Color.LightGreen : Color.White;
            //}
            isFirst             = false;
            Main.IsLoadDatabase = true;
        }
    }