public void LearnTest1()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a univariate sequence and the same sequence backwards.
            double[][] sequences = new double[][] 
            {
                new double[] { 0,1,2,3,4 }, // This is the first  sequence with label = 0
                new double[] { 4,3,2,1,0 }, // This is the second sequence with label = 1
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };

            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            NormalDistribution density = new NormalDistribution();
            var classifier = new HiddenMarkovClassifier<NormalDistribution, double>(2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<NormalDistribution, double>(classifier)
            {
                // Train each model until the log-likelihood changes less than 0.001
                Learner = modelIndex => new BaumWelchLearning<NormalDistribution, double>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0
                }
            };

            // Train the sequence classifier using the algorithm
            teacher.Learn(sequences, labels);
            double logLikelihood = teacher.LogLikelihood;


            // Calculate the probability that the given
            //  sequences originated from the model
            double likelihood1, likelihood2;

            // Try to classify the first sequence (output should be 0)
            int c1 = classifier.Decide(sequences[0]);
            likelihood1 = classifier.Probability(sequences[0]);

            // Try to classify the second sequence (output should be 1)
            int c2 = classifier.Decide(sequences[1]);
            likelihood2 = classifier.Probability(sequences[1]);

            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);


            Assert.AreEqual(-13.271981026832929, logLikelihood, 1e-10);
            Assert.AreEqual(0.99999791320102149, likelihood1, 1e-10);
            Assert.AreEqual(0.99999791320102149, likelihood2, 1e-10);
        }
示例#2
0
        // Canvas events
        private void inputCanvas_MouseUp(object sender, MouseEventArgs e)
        {
            double[][] input = Sequence.Preprocess(canvas.GetSequence());

            if (input.Length < 5)
            {
                panelUserLabeling.Visible   = false;
                panelClassification.Visible = false;
                return;
            }

            if (hmm == null && hcrf == null)
            {
                panelUserLabeling.Visible   = true;
                panelClassification.Visible = false;
            }

            else
            {
                int index = (hcrf != null) ? hcrf.Decide(input) : hmm.Decide(input);

                string label = database.Classes[index];
                lbHaveYouDrawn.Text         = String.Format("Have you drawn a {0}?", label);
                panelClassification.Visible = true;
                panelUserLabeling.Visible   = false;
                cbClasses.SelectedItem      = label;
            }
        }
    // Start is called before the first frame update
    void Start()
    {
        Debug.Log("TESTING MACHINE LEARNING");
        // Declare some training data
        int[][] inputs = new int[][]
        {
            new int[] { 0, 1, 1, 0 },    // Class 0
            new int[] { 0, 0, 1, 0 },    // Class 0
            new int[] { 0, 1, 1, 1, 0 }, // Class 0
            new int[] { 0, 1, 0 },       // Class 0

            new int[] { 1, 0, 0, 1 },    // Class 1
            new int[] { 1, 1, 0, 1 },    // Class 1
            new int[] { 1, 0, 0, 0, 1 }, // Class 1
            new int[] { 1, 0, 1 },       // Class 1

            new int[] { 0, 0, 0, 0, 1, 0 },
        };

        int[] outputs = new int[]
        {
            0, 0, 0, 0, // First four sequences are of class 0
            1, 1, 1, 1, // Last four sequences are of class 1
            2,
        };


        // We are trying to predict two different classes
        int classes = 3;

        // Each sequence may have up to two symbols (0 or 1)
        int symbols = 2;

        // Nested models will have two states each
        int[] states = new int[] { 3, 3, 3 };

        // Creates a new Hidden Markov Model Classifier with the given parameters
        HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);

        // Create a new learning algorithm to train the sequence classifier
        var teacher = new HiddenMarkovClassifierLearning(classifier,

                                                         // Train each model until the log-likelihood changes less than 0.001
                                                         modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
        {
            Tolerance     = 0.001,
            MaxIterations = 1000
        }
                                                         );

        // Train the sequence classifier using the algorithm
        teacher.Learn(inputs, outputs);

        // Compute the classifier answers for the given inputs
        int[] answers = classifier.Decide(inputs);
        foreach (var item in answers)
        {
            Debug.Log(item);
        }
    }
 public void Preprocess()
 {
     double[][] input;
     if (sequence.Count > 1)
     {
         input = Sequence.Preprocess(GetSequence());
         if (hmm != null)
         {
             int    index = hmm.Decide(input);
             string label = database.Classes[index];
             text.text = "Do you Mean : " + label + "?";
             CreateObject(label);
             sequence.Clear();
         }
     }
 }
示例#5
0
        private void btnLearnHMM_Click(object sender, EventArgs e)
        {
            if (gridSamples.Rows.Count == 0)
            {
                MessageBox.Show("Please load or insert some data first.");
                return;
            }

            BindingList <Sequence> samples = database.Samples;
            BindingList <String>   classes = database.Classes;

            double[][][] inputs  = new double[samples.Count][][];
            int[]        outputs = new int[samples.Count];

            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i]  = samples[i].Input;
                outputs[i] = samples[i].Output;
            }

            int    states     = 5;
            int    iterations = 0;
            double tolerance  = 0.01;
            bool   rejection  = false;


            hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>(classes.Count,
                                                                                        new Forward(states), new MultivariateNormalDistribution(2), classes.ToArray());


            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(hmm)
            {
                // Train each model using the selected convergence criteria
                Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(hmm.Models[i])
                {
                    Tolerance  = tolerance,
                    Iterations = iterations,

                    FittingOptions = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            };

            teacher.Empirical = true;
            teacher.Rejection = rejection;


            // Run the learning algorithm
            teacher.Learn(inputs, outputs);


            // Classify all training instances
            foreach (var sample in database.Samples)
            {
                sample.RecognizedAs = hmm.Decide(sample.Input);
            }

            foreach (DataGridViewRow row in gridSamples.Rows)
            {
                var sample = row.DataBoundItem as Sequence;
                row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ?
                                                 Color.LightGreen : Color.White;
            }

            btnLearnHCRF.Enabled = true;
            hcrf = null;
        }
示例#6
0
        public void LearnTest()
        {
            #region doc_learn
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0, 1, 2, 0 },    // Class 0
                new int[] { 0, 0, 2, 0 },    // Class 0
                new int[] { 0, 1, 2, 1, 0 }, // Class 0
                new int[] { 0, 1, 2, 0 },    // Class 0

                new int[] { 1, 0, 2, 1 },    // Class 1
                new int[] { 1, 1, 2, 1 },    // Class 1
                new int[] { 1, 0, 2, 0, 1 }, // Class 1
                new int[] { 1, 0, 2, 1 },    // Class 1
            };

            int[] outputs = new int[]
            {
                0, 0, 0, 0, // First four sequences are of class 0
                1, 1, 1, 1, // Last four sequences are of class 1
            };

            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning()
            {
                // Train each model until the log-likelihood changes less than 0.001
                Learner = (i) => new BaumWelchLearning()
                {
                    Tolerance      = 0.001,
                    Iterations     = 0,
                    NumberOfStates = 2,
                }
            };

            // Train the sequence classifier
            HiddenMarkovClassifier classifier = teacher.Learn(inputs, outputs);

            // Obtain classification labels for the output
            int[] predicted = classifier.Decide(inputs);

            // Obtain prediction scores for the outputs
            double[] lls = classifier.LogLikelihood(inputs);
            #endregion

            Assert.AreEqual(0, classifier.NumberOfInputs);
            Assert.AreEqual(2, classifier.NumberOfOutputs);
            Assert.AreEqual(2, classifier.NumberOfClasses);
            Assert.AreEqual(3, classifier.NumberOfSymbols);

            for (int i = 0; i < classifier.NumberOfClasses; i++)
            {
                Assert.AreEqual(2, classifier[i].NumberOfStates);
                Assert.AreEqual(3, classifier[i].NumberOfSymbols);
                Assert.AreEqual(1, classifier[i].NumberOfInputs);
                Assert.AreEqual(2, classifier[i].NumberOfOutputs);
            }

            Assert.AreEqual(0.5, classifier.Priors[0]);
            Assert.AreEqual(0.5, classifier.Priors[1]);

            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = outputs[i];
                int actual   = predicted[i];
                Assert.AreEqual(expected, actual);
            }
        }
示例#7
0
        public void LearnTest2_old()
        {
            #region doc_rejection_old
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0, 0, 1, 2 },     // Class 0
                new int[] { 0, 1, 1, 2 },     // Class 0
                new int[] { 0, 0, 0, 1, 2 },  // Class 0
                new int[] { 0, 1, 2, 2, 2 },  // Class 0

                new int[] { 2, 2, 1, 0 },     // Class 1
                new int[] { 2, 2, 2, 1, 0 },  // Class 1
                new int[] { 2, 2, 2, 1, 0 },  // Class 1
                new int[] { 2, 2, 2, 2, 1 },  // Class 1
            };

            int[] outputs = new int[]
            {
                0, 0, 0, 0, // First four sequences are of class 0
                1, 1, 1, 1, // Last four sequences are of class 1
            };


            // We are trying to predict two different classes
            int classes = 2;

            // Each sequence may have up to 3 symbols (0,1,2)
            int symbols = 3;

            // Nested models will have 3 states each
            int[] states = new int[] { 3, 3 };

            // Creates a new Hidden Markov Model Classifier with the given parameters
            HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning(classifier,

                                                             // Train each model until the log-likelihood changes less than 0.001
                                                             modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
            {
                Tolerance  = 0.001,
                Iterations = 0
            }
                                                             );

            // Enable support for sequence rejection
            teacher.Rejection = true;

            // Train the sequence classifier
            teacher.Learn(inputs, outputs);

            // Obtain prediction classes for the outputs
            int[] prediction = classifier.Decide(inputs);

            // Obtain prediction scores for the outputs
            double[] lls = classifier.LogLikelihood(inputs);
            #endregion

            double likelihood = teacher.LogLikelihood;
            Assert.AreEqual(-24.857860924867815, likelihood, 1e-8);

            likelihood = testThresholdModel(inputs, outputs, classifier, likelihood);
        }
示例#8
0
        public void LearnTest_old()
        {
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0, 1, 1, 0 },    // Class 0
                new int[] { 0, 0, 1, 0 },    // Class 0
                new int[] { 0, 1, 1, 1, 0 }, // Class 0
                new int[] { 0, 1, 0 },       // Class 0

                new int[] { 1, 0, 0, 1 },    // Class 1
                new int[] { 1, 1, 0, 1 },    // Class 1
                new int[] { 1, 0, 0, 0, 1 }, // Class 1
                new int[] { 1, 0, 1 },       // Class 1
            };

            int[] outputs = new int[]
            {
                0, 0, 0, 0, // First four sequences are of class 0
                1, 1, 1, 1, // Last four sequences are of class 1
            };


            // We are trying to predict two different classes
            int classes = 2;

            // Each sequence may have up to two symbols (0 or 1)
            int symbols = 2;

            // Nested models will have two states each
            int[] states = new int[] { 2, 2 };

            // Creates a new Hidden Markov Model Classifier with the given parameters
            HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning(classifier,

                                                             // Train each model until the log-likelihood changes less than 0.001
                                                             modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
            {
                Tolerance  = 0.001,
                Iterations = 0
            }
                                                             );

            // Train the sequence classifier
            teacher.Learn(inputs, outputs);

            // Obtain classification labels for the output
            int[] predicted = classifier.Decide(inputs);

            // Obtain prediction scores for the outputs
            double[] lls = classifier.LogLikelihood(inputs);

            // Will assert the models have learned the sequences correctly.
            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = outputs[i];
                int actual   = predicted[i];
                Assert.AreEqual(expected, actual);
            }
        }
    public void CheckRecognized(List <Vector3> positions)
    {
        Debug.Log("Checking sequence!");

        double[][] points = new double[positions.Count][];
        switch (valuesTracked)
        {
        case 3:
            for (int i = 0; i < rightHandPositions.Count; i++)
            {
                points[i] = new double[3] {
                    rightHandPositions[i].x, rightHandPositions[i].y, rightHandPositions[i].z
                };
            }
            break;

        case 6:
            for (int i = 0; i < rightHandPositions.Count; i++)
            {
                points[i] = new double[6] {
                    rightHandPositions[i].x, rightHandPositions[i].y, rightHandPositions[i].z,
                    rightHandRotations[i].x, rightHandRotations[i].y, rightHandRotations[i].z
                };
            }
            break;

        case 12:
            for (int i = 0; i < rightHandPositions.Count; i++)
            {
                points[i] = new double[12] {
                    rightHandPositions[i].x, rightHandPositions[i].y, rightHandPositions[i].z,
                    rightHandRotations[i].x, rightHandRotations[i].y, rightHandRotations[i].z,
                    leftHandPositions[i].x, leftHandPositions[i].y, leftHandPositions[i].z,
                    leftHandRotations[i].x, leftHandRotations[i].y, leftHandRotations[i].z
                };
            }
            break;
        }

        int    decision = hmm.Decide(points);
        string value    = string.Empty;

        foreach (KeyValuePair <string, int> item in gestureIndex)
        {
            if (item.Value == decision)
            {
                value = item.Key;
            }
        }
        text.text           = value;
        nameInputField.text = value;
        Debug.Log("Did you write a: " + value + "?");

        foreach (Gesture gesture in storedGestures)
        {
            if (gesture.name == value)
            {
                //animator.BeginAnimation(gesture.points);
            }
        }
    }
        public void LearnTest9()
        {
            double[][][] inputs = large_gestures;
            int[] outputs = large_outputs;

            int states = 5;
            int iterations = 100;
            double tolerance = 0.01;
            bool rejection = true;
            double sensitivity = 1E-85;

            int dimension = inputs[0][0].Length;

            var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution, double[]>(2,
                new Forward(states), new MultivariateNormalDistribution(dimension));

            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution, double[]>(hmm)
            {
                // Train each model using the selected convergence criteria
                Learner = i => new BaumWelchLearning<MultivariateNormalDistribution, double[]>(hmm.Models[i])
                {
                    Tolerance = tolerance,
                    Iterations = iterations,

                    FittingOptions = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            };

            teacher.Empirical = true;
            teacher.Rejection = rejection;

            // Run the learning algorithm
            teacher.Learn(inputs, outputs);
            double logLikelihood = teacher.LogLikelihood;

            hmm.Sensitivity = sensitivity;

            for (int i = 0; i < large_gestures.Length; i++)
            {
                int actual = hmm.Decide(large_gestures[i]);
                int expected = large_outputs[i];
                Assert.AreEqual(expected, actual);
            }
        }
        private static double testThresholdModel(int[][] inputs, int[] outputs, HiddenMarkovClassifier<GeneralDiscreteDistribution, int> classifier, double likelihood)
        {
            var threshold = classifier.Threshold;

            Assert.AreEqual(classifier.Models[0].LogTransitions[0][0], threshold.LogTransitions[0][0], 1e-10);
            Assert.AreEqual(classifier.Models[0].LogTransitions[1][1], threshold.LogTransitions[1][1], 1e-10);
            Assert.AreEqual(classifier.Models[0].LogTransitions[2][2], threshold.LogTransitions[2][2], 1e-10);

            Assert.AreEqual(classifier.Models[1].LogTransitions[0][0], threshold.LogTransitions[3][3], 1e-10);
            Assert.AreEqual(classifier.Models[1].LogTransitions[1][1], threshold.LogTransitions[4][4], 1e-10);
            Assert.AreEqual(classifier.Models[1].LogTransitions[2][2], threshold.LogTransitions[5][5], 1e-10);

            for (int i = 0; i < 3; i++)
                for (int j = 3; j < 6; j++)
                    Assert.AreEqual(Double.NegativeInfinity, threshold.LogTransitions[i][j]);

            for (int i = 3; i < 6; i++)
                for (int j = 0; j < 3; j++)
                    Assert.AreEqual(Double.NegativeInfinity, threshold.LogTransitions[i][j]);

            Assert.IsFalse(Matrix.HasNaN(threshold.LogTransitions));


            classifier.Sensitivity = 0.5;

            // Will assert the models have learned the sequences correctly.
            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = outputs[i];
                int actual = classifier.Decide(inputs[i]);
                likelihood = classifier.Probability(inputs[i]);
                Assert.AreEqual(expected, actual);
            }


            int[] r0 = new int[] { 1, 1, 0, 0, 2 };


            double logRejection;
            int c = classifier.Decide(r0);
            logRejection = classifier.Probability(r0);

            Assert.AreEqual(-1, c);
            Assert.AreEqual(0.99993993054384978, logRejection);

            logRejection = threshold.LogLikelihood(r0);
            Assert.AreEqual(-5.6367018741984483, logRejection);
            Assert.IsFalse(double.IsNaN(logRejection));

            threshold.Decode(r0, out logRejection);
            Assert.AreEqual(-8.1618027917853073, logRejection);
            Assert.IsFalse(double.IsNaN(logRejection));

            foreach (var model in classifier.Models)
            {
                double[,] A = model.LogTransitions.ToMatrix();

                for (int i = 0; i < A.GetLength(0); i++)
                {
                    double[] row = A.Exp().GetRow(i);
                    double sum = row.Sum();
                    Assert.AreEqual(1, sum, 1e-10);
                }
            }
            {
                double[,] A = classifier.Threshold.LogTransitions.ToMatrix();

                for (int i = 0; i < A.GetLength(0); i++)
                {
                    double[] row = A.GetRow(i);
                    double sum = row.Exp().Sum();
                    Assert.AreEqual(1, sum, 1e-6);
                }
            }
            return likelihood;
        }
        public void LearnTest7()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.

            double[][][] sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1 },
                    new double[] { 1, 2 },
                    new double[] { 2, 3 },
                    new double[] { 3, 4 },
                    new double[] { 4, 5 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 4,  3 },
                    new double[] { 3,  2 },
                    new double[] { 2,  1 },
                    new double[] { 1,  0 },
                    new double[] { 0, -1 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };


            var initialDensity = new MultivariateNormalDistribution(2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution, double[]>(
                classes: 2, topology: new Forward(2), initial: initialDensity);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution, double[]>(classifier)
            {
                // Train each model until the log-likelihood changes less than 0.0001
                Learner = modelIndex => new BaumWelchLearning<MultivariateNormalDistribution, double[], NormalOptions>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,

                    FittingOptions = new NormalOptions()
                    {
                        Diagonal = true,      // only diagonal covariance matrices
                        Regularization = 1e-5 // avoid non-positive definite errors
                    }
                }
            };

            // Train the sequence classifier using the algorithm
            teacher.Learn(sequences, labels);
            double logLikelihood = teacher.LogLikelihood;


            // Calculate the probability that the given
            //  sequences originated from the model
            double likelihood, likelihood2;

            int c1 = classifier.Decide(sequences[0]);
            likelihood = classifier.Probability(sequences[0]);

            // Try to classify the second sequence (output should be 1)
            int c2 = classifier.Decide(sequences[1]);
            likelihood2 = classifier.Probability(sequences[1]);


            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);

            Assert.AreEqual(-24.560663315259973, logLikelihood, 1e-10);
            Assert.AreEqual(0.99999999998805045, likelihood, 1e-10);
            Assert.AreEqual(0.99999999998805045, likelihood2, 1e-10);

            Assert.IsFalse(double.IsNaN(logLikelihood));
            Assert.IsFalse(double.IsNaN(likelihood));
            Assert.IsFalse(double.IsNaN(likelihood2));
        }
        public void LearnTest6()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            double[][][] sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1 },
                    new double[] { 1, 2 },
                    new double[] { 2, 3 },
                    new double[] { 3, 4 },
                    new double[] { 4, 5 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 4,  3 },
                    new double[] { 3,  2 },
                    new double[] { 2,  1 },
                    new double[] { 1,  0 },
                    new double[] { 0, -1 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };


            var density = new MultivariateNormalDistribution(2);

            try
            {
                new HiddenMarkovClassifier<MultivariateNormalDistribution>(
                    2, new Custom(new double[2, 2], new double[2]), density);

                Assert.Fail();
            }
            catch (ArgumentException)
            {
            }

            var topology = new Custom(
                new[,] { { 1 / 2.0, 1 / 2.0 }, { 1 / 2.0, 1 / 2.0 } },
                new[] { 1.0, 0.0 });

            Array.Clear(topology.Initial, 0, topology.Initial.Length);
            Array.Clear(topology.Transitions, 0, topology.Transitions.Length);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution, double[]>(
                2, topology, density);


            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution, double[]>(classifier)
            {
                // Train each model until the log-likelihood changes less than 0.0001
                Learner = modelIndex => new BaumWelchLearning<MultivariateNormalDistribution, double[]>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,

                    FittingOptions = new NormalOptions() { Diagonal = true }
                }
            };

            // Train the sequence classifier using the algorithm
            teacher.Learn(sequences, labels);
            double logLikelihood = teacher.LogLikelihood;


            // Calculate the probability that the given
            //  sequences originated from the model
            double response1, response2;

            // Try to classify the first sequence (output should be 0)
            int c1 = classifier.Decide(sequences[0]);
            response1 = classifier.Probability(sequences[0]);

            // Try to classify the second sequence (output should be 1)
            int c2 = classifier.Decide(sequences[1]);
            response2 = classifier.Probability(sequences[1]);

            Assert.AreEqual(double.NegativeInfinity, logLikelihood);
            Assert.AreEqual(0, response1);
            Assert.AreEqual(0, response2);
        }
    public bool ContinuousCheckRecognized()
    {
        Debug.Log("ContinuousChecking!");
        double[][] points = new double[constantPositions.Count][];
        switch (valuesTracked)
        {
        case 3:
            for (int i = 0; i < constantPositions.Count; i++)
            {
                points[i] = new double[3] {
                    constantPositions[i].x, constantPositions[i].y, constantPositions[i].z
                };
            }
            break;

        case 6:
            for (int i = 0; i < constantPositions.Count; i++)
            {
                points[i] = new double[6] {
                    constantPositions[i].x, constantPositions[i].y, constantPositions[i].z,
                    rightHandRotations[i].x, rightHandRotations[i].y, rightHandRotations[i].z
                };
            }
            break;

        case 12:
            for (int i = 0; i < constantPositions.Count; i++)
            {
                points[i] = new double[12] {
                    constantPositions[i].x, constantPositions[i].y, constantPositions[i].z,
                    rightHandRotations[i].x, rightHandRotations[i].y, rightHandRotations[i].z,
                    leftHandPositions[i].x, leftHandPositions[i].y, leftHandPositions[i].z,
                    leftHandRotations[i].x, leftHandRotations[i].y, leftHandRotations[i].z
                };
            }
            break;
        }

        double[] probabilities = hmm.Probabilities(points);
        double[] likelihoods   = hmm.LogLikelihoods(points);
        double[] scores        = hmm.Scores(points);
        double   bestFit       = Mathf.NegativeInfinity;

        for (int i = 0; i < likelihoods.Length; i++)
        {
            //Debug.Log(scores[i]);
            if (likelihoods[i] > bestFit)
            {
                bestFit = likelihoods[i];
            }
        }
        if (bestFit >= 0)
        {
            int    decision = hmm.Decide(points);
            string value    = string.Empty;
            foreach (KeyValuePair <string, int> item in gestureIndex)
            {
                if (item.Value == decision)
                {
                    value = item.Key;
                }
            }
            friendAI.RecieveCommand(value);
            logger.Log(value, true);
            Debug.Log("Did you write a: " + value + "?");
            return(true);
        }
        return(false);
    }