Exemplo n.º 1
0
        public void SingleDependencyBeforeUpdateIsAsSmallAsPossible()
        {
            GC.Collect();
            long start = GC.GetTotalMemory(true);
            Independent <int> newIndependent = new Independent <int>();
            Dependent <int>   newDependent   = new Dependent <int>(() => newIndependent);

            newIndependent.Value = 42;
            long end = GC.GetTotalMemory(true);

            // Started at 336.
            // Making Precedent a base class: 312.
            // Removing Gain/LoseDependent events: 288.
            // Making IsUpToDate no longer a precident: 248.
            // Custom linked list implementation for dependents: 200.
            // Custom linked list implementation for precedents: 160.
            // Other optimizations: 144.
            // Added WeakReferenceToSelf: 148.
            // Removed WeakReferenceToSelf: 124.
            Assert.AreEqual(124 + IndependentPlatformOffset, end - start);

            int value = newDependent;

            Assert.AreEqual(42, value);
        }
Exemplo n.º 2
0
        public void ConstructorTest()
        {
            var p1 = new NormalDistribution(4.2, 1);
            var p2 = new NormalDistribution(7.0, 2);

            Independent <NormalDistribution> target = new Independent <NormalDistribution>(p1, p2);

            Assert.AreEqual(target.Components[0], p1);
            Assert.AreEqual(target.Components[1], p2);

            Assert.AreEqual(2, target.Dimension);

            Assert.AreEqual(4.2, target.Mean[0]);
            Assert.AreEqual(7.0, target.Mean[1]);

            Assert.AreEqual(1, target.Variance[0]);
            Assert.AreEqual(4, target.Variance[1]);

            Assert.AreEqual(1, target.Covariance[0, 0]);
            Assert.AreEqual(4, target.Covariance[1, 1]);
            Assert.AreEqual(0, target.Covariance[0, 1]);
            Assert.AreEqual(0, target.Covariance[1, 0]);

            var text = target.ToString("N2", System.Globalization.CultureInfo.InvariantCulture);

            Assert.AreEqual("Independent(x0, x1; N(x0; μ = 4.20, σ² = 1.00) + N(x1; μ = 7.00, σ² = 4.00))", text);
        }
Exemplo n.º 3
0
        public void ProbabilityFunctionTest()
        {
            var p1 = new NormalDistribution(4.2, 1);
            var p2 = new NormalDistribution(7.0, 2);

            Independent <NormalDistribution> target = new Independent <NormalDistribution>(p1, p2);

            double[] x;
            double   actual, expected;

            x        = new double[] { 4.2, 7.0 };
            actual   = target.ProbabilityDensityFunction(x);
            expected = p1.ProbabilityDensityFunction(x[0]) * p2.ProbabilityDensityFunction(x[1]);
            Assert.AreEqual(expected, actual, 1e-10);
            Assert.IsFalse(double.IsNaN(actual));

            x        = new double[] { 0.0, 0.0 };
            actual   = target.ProbabilityDensityFunction(x);
            expected = p1.ProbabilityDensityFunction(x[0]) * p2.ProbabilityDensityFunction(x[1]);
            Assert.AreEqual(expected, actual, 1e-10);
            Assert.IsFalse(double.IsNaN(actual));

            x        = new double[] { 7.0, 4.2 };
            actual   = target.ProbabilityDensityFunction(x);
            expected = p1.ProbabilityDensityFunction(x[0]) * p2.ProbabilityDensityFunction(x[1]);
            Assert.AreEqual(expected, actual, 1e-10);
            Assert.IsFalse(double.IsNaN(actual));
        }
Exemplo n.º 4
0
        public void CumulativeFunctionTest()
        {
            var p1 = new NormalDistribution(4.2, 1);
            var p2 = new NormalDistribution(7.0, 2);

            Independent <NormalDistribution> target = new Independent <NormalDistribution>(p1, p2);

            double[] x;
            double   actual, expected;

            x        = new double[] { 4.2, 7.0 };
            actual   = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);

            x        = new double[] { 0.0, 0.0 };
            actual   = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);

            x        = new double[] { 7.0, 4.2 };
            actual   = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);
        }
        public static HiddenMarkovClassifier <Independent> CreateModel3()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1   = new GeneralDiscreteDistribution(3);
            var comp2   = new NormalDistribution(1);
            var comp3   = new NormalDistribution(2);
            var comp4   = new NormalDistribution(3);
            var comp5   = new NormalDistribution(4);
            var density = new Independent(comp1, comp2, comp3, comp4, comp5);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier <Independent>(
                2, new Forward(5), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <Independent>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning <Independent>(
                    classifier.Models[modelIndex])
            {
                Tolerance  = 0.0001,
                Iterations = 0,
            }
                );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences2, labels2);

            return(classifier);
        }
Exemplo n.º 6
0
        public void ConstructorTest()
        {
            var p1 = new NormalDistribution(4.2, 1);
            var p2 = new NormalDistribution(7.0, 2);

            Independent<NormalDistribution> target = new Independent<NormalDistribution>(p1, p2);

            Assert.AreEqual(target.Components[0], p1);
            Assert.AreEqual(target.Components[1], p2);

            Assert.AreEqual(2, target.Dimension);

            Assert.AreEqual(4.2, target.Mean[0]);
            Assert.AreEqual(7.0, target.Mean[1]);

            Assert.AreEqual(1, target.Variance[0]);
            Assert.AreEqual(4, target.Variance[1]);

            Assert.AreEqual(1, target.Covariance[0, 0]);
            Assert.AreEqual(4, target.Covariance[1, 1]);
            Assert.AreEqual(0, target.Covariance[0, 1]);
            Assert.AreEqual(0, target.Covariance[1, 0]);

            var text = target.ToString("N2", System.Globalization.CultureInfo.InvariantCulture);

            Assert.AreEqual("Independent(x0, x1; N(x0; μ = 4.20, σ² = 1.00) + N(x1; μ = 7.00, σ² = 4.00))", text);
        }
Exemplo n.º 7
0
        public void FitTest()
        {
            double[][] data =
            {
                new double[] { 1, 8 },
                new double[] { 2, 6 },
                new double[] { 5, 7 },
                new double[] { 3, 9 },
            };


            var p1 = new NormalDistribution(0, 1);
            var p2 = new NormalDistribution(0, 1);

            Independent <NormalDistribution> target = new Independent <NormalDistribution>(p1, p2);

            target.Fit(data);

            Assert.AreEqual(2.75, target.Mean[0]);
            Assert.AreEqual(7.50, target.Mean[1]);

            Assert.AreEqual(2.91666, target.Variance[0], 1e-5);
            Assert.AreEqual(1.66666, target.Variance[1], 1e-5);

            Assert.AreEqual(target.Variance[0], target.Covariance[0, 0]);
            Assert.AreEqual(target.Variance[1], target.Covariance[1, 1]);
            Assert.AreEqual(0, target.Covariance[0, 1]);
            Assert.AreEqual(0, target.Covariance[1, 0]);
        }
Exemplo n.º 8
0
        public void ProbabilityFunctionTest()
        {
            var p1 = new NormalDistribution(4.2, 1);
            var p2 = new NormalDistribution(7.0, 2);

            Independent<NormalDistribution> target = new Independent<NormalDistribution>(p1, p2);

            double[] x;
            double actual, expected;

            x = new double[] { 4.2, 7.0 };
            actual = target.ProbabilityDensityFunction(x);
            expected = p1.ProbabilityDensityFunction(x[0]) * p2.ProbabilityDensityFunction(x[1]);
            Assert.AreEqual(expected, actual, 1e-10);
            Assert.IsFalse(double.IsNaN(actual));

            x = new double[] { 0.0, 0.0 };
            actual = target.ProbabilityDensityFunction(x);
            expected = p1.ProbabilityDensityFunction(x[0]) * p2.ProbabilityDensityFunction(x[1]);
            Assert.AreEqual(expected, actual, 1e-10);
            Assert.IsFalse(double.IsNaN(actual));

            x = new double[] { 7.0, 4.2 };
            actual = target.ProbabilityDensityFunction(x);
            expected = p1.ProbabilityDensityFunction(x[0]) * p2.ProbabilityDensityFunction(x[1]);
            Assert.AreEqual(expected, actual, 1e-10);
            Assert.IsFalse(double.IsNaN(actual));
        }
        public void NumberOfFeaturesTest()
        {
            Independent initial = new Independent(
                new GeneralDiscreteDistribution(46),       // output,
                new NormalDistribution(0, 1),              // headAngle,
                new NormalDistribution(0, 1),              // handAngle,
                new NormalDistribution(0, 1),              // relHandX,
                new NormalDistribution(0, 1)               // relHandY,
                );



            var model = new HiddenMarkovClassifier <Independent>(
                classes: 13, topology: new Forward(5), initial: initial);

            var function = new MarkovMultivariateFunction(model);
            var field    = new HiddenConditionalRandomField <double[]>(function);



            int discreteDistributions   = 1;
            int continuousDistributions = 4;
            int symbols  = 46;
            int states   = 5;
            int classes  = 13;
            int expected = classes *
                           (1 + states + states * states +
                            states * discreteDistributions * symbols +
                            states * continuousDistributions * 3);

            Assert.AreEqual(expected, field.Function.Features.Length);
            Assert.AreEqual(expected, field.Function.Weights.Length);
            Assert.AreEqual(4173, expected);
        }
Exemplo n.º 10
0
        public void IndependentModelExample()
        {
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 },
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };

            // Create a new 2D independent Normal distribution
            var independent = new Independent <NormalDistribution>(
                new NormalDistribution(), new NormalDistribution());

            // Estimate it!
            independent.Fit(samples);

            var a = independent.Components[0].ToString("N2"); // N(x1; μ = 3.20, σ² = 7.96)
            var b = independent.Components[1].ToString("N2"); // N(x2; μ = 2.20, σ² = 4.40)

            Assert.AreEqual("N(x; μ = 3.20, σ² = 7.96)", a);
            Assert.AreEqual("N(x; μ = 2.20, σ² = 4.40)", b);
        }
        private const double dist = 0.0005396; //constant that is approximatively 60m in Earth coordinate

        /// <summary>
        /// Initializes a new instance of ClientCoordsGenerator class.
        /// </summary>
        /// <param name="nominatimClient">Nominatim client.</param>
        /// <param name="standardDeviation">Standard deviation for two dimensional uniform distribution.</param>
        public ClientCoordsGenerator(INominatimClient nominatimClient, double standardDeviation)
        {
            _nominatimClient = nominatimClient;
            var uniform = new UniformContinuousDistribution(-dist, +dist);

            _distribution = new Independent <UniformContinuousDistribution>(uniform, uniform);
        }
        public static HiddenMarkovClassifier <Independent> CreateModel2(out double[][][] sequences, out int[] labels)
        {
            sequences = new double[][][]
            {
                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1.1 },
                    new double[] { 1, 2.5 },
                    new double[] { 1, 3.4 },
                    new double[] { 1, 4.7 },
                    new double[] { 2, 5.8 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 2, 3.2 },
                    new double[] { 2, 2.6 },
                    new double[] { 1, 1.2 },
                    new double[] { 1, 0.8 },
                    new double[] { 0, 1.1 },
                }
            };

            labels = new[] { 0, 1 };

            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1   = new GeneralDiscreteDistribution(3);
            var comp2   = new NormalDistribution(1);
            var density = new Independent(comp1, comp2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier <Independent>(
                2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <Independent>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning <Independent>(
                    classifier.Models[modelIndex])
            {
                Tolerance  = 0.0001,
                Iterations = 0,
            }
                );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);

            Assert.AreEqual(double.NegativeInfinity, logLikelihood); // only one training sample per class

            return(classifier);
        }
Exemplo n.º 13
0
        public void FittingOptionsTest()
        {
            double[][] data1 =
            {
                new double[] { 0, 8 },
                new double[] { 0, 6 },
                new double[] { 0, 7 },
                new double[] { 0, 9 },
            };

            double[][] data2 =
            {
                new double[] { 8, 0 },
                new double[] { 6, 0 },
                new double[] { 7, 0 },
                new double[] { 9, 0 },
            };


            var p1 = new NormalDistribution(0, 1);
            var p2 = new NormalDistribution(0, 1);

            {
                Independent <NormalDistribution> target = new Independent <NormalDistribution>(p1, p2);

                target.Fit(data1, new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                });

                Assert.AreEqual(0.00, target.Mean[0]);
                Assert.AreEqual(7.50, target.Mean[1]);

                Assert.AreEqual(1e-5, target.Variance[0]);
                Assert.AreEqual(1.66666, target.Variance[1], 1e-5);
            }

            {
                Independent <NormalDistribution> target = new Independent <NormalDistribution>(p1, p2);

                target.Fit(data2, new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                });

                Assert.AreEqual(7.5, target.Mean[0]);
                Assert.AreEqual(0, target.Mean[1]);

                Assert.AreEqual(1.66666, target.Variance[0], 1e-5);
                Assert.AreEqual(1e-5, target.Variance[1]);
            }
        }
Exemplo n.º 14
0
 /// <summary>
 ///     Object hash code
 /// </summary>
 /// <returns></returns>
 public override int GetHashCode()
 {
     unchecked
     {
         var hashCode = Independent.GetHashCode();
         hashCode = (hashCode * 397) ^ (Enzymes != null ? Enzymes.GetHashCode() : 0);
         return(hashCode);
     }
 }
        public static HiddenMarkovClassifier<Independent> CreateModel1()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            double[][][] sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0 },
                    new double[] { 1 },
                    new double[] { 2 },
                    new double[] { 3 },
                    new double[] { 4 },
                }, 

                new double[][]
                {
                     // This is the second sequence with label = 1
                    new double[] { 4 },
                    new double[] { 3 },
                    new double[] { 2 },
                    new double[] { 1 },
                    new double[] { 0 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };

            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            NormalDistribution component = new NormalDistribution();
            Independent density = new Independent(component);
            var classifier = new HiddenMarkovClassifier<Independent>(2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent>(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning<Independent>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);

            Assert.AreEqual(-13.271981026832929d, logLikelihood);

            return classifier;
        }
        public static HiddenMarkovClassifier <Independent> CreateModel1()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            double[][][] sequences = new double[][][]
            {
                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 0 },
                    new double[] { 1 },
                    new double[] { 2 },
                    new double[] { 3 },
                    new double[] { 4 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 4 },
                    new double[] { 3 },
                    new double[] { 2 },
                    new double[] { 1 },
                    new double[] { 0 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };

            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            NormalDistribution component = new NormalDistribution();
            Independent        density   = new Independent(component);
            var classifier = new HiddenMarkovClassifier <Independent>(2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <Independent>(classifier,

                                                                           // Train each model until the log-likelihood changes less than 0.001
                                                                           modelIndex => new BaumWelchLearning <Independent>(classifier.Models[modelIndex])
            {
                Tolerance  = 0.0001,
                Iterations = 0
            }
                                                                           );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);

            Assert.AreEqual(-13.271981026832929d, logLikelihood, 1e-10);

            return(classifier);
        }
Exemplo n.º 17
0
        public void FittingNoOptionsTest()
        {
            double[][] data1 =
            {
                new double[] { 0, 8 },
                new double[] { 0, 6 },
                new double[] { 0, 7 },
                new double[] { 0, 9 },
            };

            double[][] data2 =
            {
                new double[] { 8, 0 },
                new double[] { 6, 0 },
                new double[] { 7, 0 },
                new double[] { 9, 0 },
            };


            var p1 = new NormalDistribution(0, 1);
            var p2 = new NormalDistribution(0, 1);

            bool thrown1 = false;
            bool thrown2 = false;

            try
            {
                var target = new Independent <NormalDistribution>(p1, p2);
                target.Fit(data1);
            }
            catch (ArgumentException)
            {
                thrown1 = true;
            }

            try
            {
                var target = new Independent <NormalDistribution>(p1, p2);
                target.Fit(data2);
            }
            catch (ArgumentException)
            {
                thrown2 = true;
            }

            Assert.IsTrue(thrown1);
            Assert.IsTrue(thrown2);
        }
Exemplo n.º 18
0
        public void IndependentIsAsSmallAsPossible()
        {
            GC.Collect();
            long start = GC.GetTotalMemory(true);
            Independent <int> newIndependent = new Independent <int>();

            newIndependent.Value = 42;
            long end = GC.GetTotalMemory(true);

            // Started at 92.
            // Making Precedent a base class: 80.
            // Removing Gain/LoseDependent events: 72.
            // Custom linked list implementation for dependents: 48.
            // Other optimizations: 40.
            // Removed WeakReferenceToSelf: 20.
            Assert.AreEqual(20 + IndependentPlatformOffset, end - start);

            int value = newIndependent;

            Assert.AreEqual(42, value);
        }
Exemplo n.º 19
0
        public void ConstructorTest()
        {
            var p1 = new NormalDistribution(4.2, 1);
            var p2 = new NormalDistribution(7.0, 2);

            Independent <NormalDistribution> target = new Independent <NormalDistribution>(p1, p2);

            Assert.AreEqual(target.Components[0], p1);
            Assert.AreEqual(target.Components[1], p2);

            Assert.AreEqual(2, target.Dimension);

            Assert.AreEqual(4.2, target.Mean[0]);
            Assert.AreEqual(7.0, target.Mean[1]);

            Assert.AreEqual(1, target.Variance[0]);
            Assert.AreEqual(4, target.Variance[1]);

            Assert.AreEqual(1, target.Covariance[0, 0]);
            Assert.AreEqual(4, target.Covariance[1, 1]);
            Assert.AreEqual(0, target.Covariance[0, 1]);
            Assert.AreEqual(0, target.Covariance[1, 0]);
        }
Exemplo n.º 20
0
        public void ConstructorTest()
        {
            var p1 = new NormalDistribution(4.2, 1);
            var p2 = new NormalDistribution(7.0, 2);

            Independent<NormalDistribution> target = new Independent<NormalDistribution>(p1, p2);

            Assert.AreEqual(target.Components[0], p1);
            Assert.AreEqual(target.Components[1], p2);

            Assert.AreEqual(2, target.Dimension);

            Assert.AreEqual(4.2, target.Mean[0]);
            Assert.AreEqual(7.0, target.Mean[1]);

            Assert.AreEqual(1, target.Variance[0]);
            Assert.AreEqual(4, target.Variance[1]);

            Assert.AreEqual(1, target.Covariance[0, 0]);
            Assert.AreEqual(4, target.Covariance[1, 1]);
            Assert.AreEqual(0, target.Covariance[0, 1]);
            Assert.AreEqual(0, target.Covariance[1, 0]);
        }
Exemplo n.º 21
0
        public void FittingOptionsTest()
        {
            double[][] data1 =
            {
                new double[] { 0, 8 },
                new double[] { 0, 6 },
                new double[] { 0, 7 },
                new double[] { 0, 9 },
            };

            double[][] data2 =
            {
                new double[] { 8, 0 },
                new double[] { 6, 0 },
                new double[] { 7, 0 },
                new double[] { 9, 0 },
            };


            var p1 = new NormalDistribution(0, 1);
            var p2 = new NormalDistribution(0, 1);

            {
                Independent<NormalDistribution> target = new Independent<NormalDistribution>(p1, p2);

                target.Fit(data1, new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                });

                Assert.AreEqual(0.00, target.Mean[0]);
                Assert.AreEqual(7.50, target.Mean[1]);

                Assert.AreEqual(1e-5, target.Variance[0]);
                Assert.AreEqual(1.66666, target.Variance[1], 1e-5);
            }

            {
                Independent<NormalDistribution> target = new Independent<NormalDistribution>(p1, p2);

                target.Fit(data2, new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                });

                Assert.AreEqual(7.5, target.Mean[0]);
                Assert.AreEqual(0, target.Mean[1]);

                Assert.AreEqual(1.66666, target.Variance[0], 1e-5);
                Assert.AreEqual(1e-5, target.Variance[1]);
            }
        }
        public static HiddenMarkovClassifier <Independent <NormalDistribution> > CreateModel4(out double[][][] words, out int[] labels, bool usePriors)
        {
            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            double[][] wardrobe2 =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.2, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.2 },
            };

            words = new double[][][] { hello, car, wardrobe, wardrobe2 };

            labels = new [] { 0, 1, 2, 2 };

            var initial = new Independent <NormalDistribution>
                          (
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1)
                          );


            int numberOfWords  = 3;
            int numberOfStates = 5;

            var classifier = new HiddenMarkovClassifier <Independent <NormalDistribution> >
                             (
                classes: numberOfWords,
                topology: new Forward(numberOfStates),
                initial: initial
                             );

            var teacher = new HiddenMarkovClassifierLearning <Independent <NormalDistribution> >(classifier,

                                                                                                 modelIndex => new BaumWelchLearning <Independent <NormalDistribution> >(classifier.Models[modelIndex])
            {
                Tolerance  = 0.001,
                Iterations = 100,

                FittingOptions = new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            }
                                                                                                 );

            if (usePriors)
            {
                teacher.Empirical = true;
            }

            double logLikelihood = teacher.Run(words, labels);

            Assert.AreEqual(208.38345600145777d, logLikelihood);

            return(classifier);
        }
        public static HiddenMarkovClassifier <Independent> CreateModel3(out double[][][] sequences2, out int[] labels2)
        {
            sequences2 = new double[][][]
            {
                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 1, 1.12, 2.41, 1.17, 9.3 },
                    new double[] { 1, 2.54, 1.45, 0.16, 4.5 },
                    new double[] { 1, 3.46, 2.63, 1.15, 9.2 },
                    new double[] { 1, 4.73, 0.41, 1.54, 5.5 },
                    new double[] { 2, 5.81, 2.42, 1.13, 9.1 },
                },

                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1.49, 2.48, 1.18, 9.37 },
                    new double[] { 1, 2.18, 1.44, 2.19, 1.56 },
                    new double[] { 1, 3.77, 2.62, 1.10, 9.25 },
                    new double[] { 2, 4.76, 5.44, 3.58, 5.54 },
                    new double[] { 2, 5.85, 2.46, 1.16, 5.13 },
                    new double[] { 2, 4.84, 5.44, 3.54, 5.52 },
                    new double[] { 2, 5.83, 3.41, 1.22, 5.11 },
                },

                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 2, 1.11, 2.41, 1.12, 2.31 },
                    new double[] { 1, 2.52, 3.73, 0.12, 4.50 },
                    new double[] { 1, 3.43, 2.61, 1.24, 9.29 },
                    new double[] { 1, 4.74, 2.42, 2.55, 6.57 },
                    new double[] { 2, 5.85, 2.43, 1.16, 9.16 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 0, 1.26, 5.44, 1.56, 9.55 },
                    new double[] { 2, 2.67, 5.45, 4.27, 1.54 },
                    new double[] { 1, 1.28, 3.46, 2.18, 4.13 },
                    new double[] { 1, 5.89, 2.57, 1.79, 5.02 },
                    new double[] { 0, 1.40, 2.48, 2.10, 6.41 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 2, 3.21, 2.49, 1.54, 9.17 },
                    new double[] { 2, 2.62, 5.40, 4.25, 1.54 },
                    new double[] { 1, 1.53, 6.49, 2.17, 4.52 },
                    new double[] { 1, 2.84, 2.58, 1.73, 6.04 },
                    new double[] { 1, 1.45, 2.47, 2.28, 5.42 },
                    new double[] { 1, 1.46, 2.46, 2.35, 5.41 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 1, 5.27, 5.45, 1.4, 9.5 },
                    new double[] { 2, 2.68, 2.54, 3.2, 2.2 },
                    new double[] { 1, 2.89, 3.83, 2.6, 4.1 },
                    new double[] { 1, 1.80, 1.32, 1.2, 4.2 },
                    new double[] { 0, 1.41, 2.41, 2.1, 6.4 },
                }
            };

            labels2 = new[] { 0, 0, 0, 1, 1, 1 };

            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1   = new GeneralDiscreteDistribution(3);
            var comp2   = new NormalDistribution(1);
            var comp3   = new NormalDistribution(2);
            var comp4   = new NormalDistribution(3);
            var comp5   = new NormalDistribution(4);
            var density = new Independent(comp1, comp2, comp3, comp4, comp5);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier <Independent>(
                2, new Forward(5), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <Independent>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning <Independent>(
                    classifier.Models[modelIndex])
            {
                Tolerance  = 0.0001,
                Iterations = 0,
            }
                );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences2, labels2);

            Assert.AreEqual(-3.0493028798326081d, logLikelihood, 1e-10);

            return(classifier);
        }
Exemplo n.º 24
0
        public void SimpleGestureRecognitionTest()
        {
            // Let's say we would like to do a very simple mechanism for
            // gesture recognition. In this example, we will be trying to
            // create a classifier that can distinguish between the words
            // "hello", "car", and "wardrobe".

            // Let's say we decided to acquire some data, and we asked some
            // people to perform those words in front of a Kinect camera, and,
            // using Microsoft's SDK, we were able to captured the x and y
            // coordinates of each hand while the word was being performed.

            // Let's say we decided to represent our frames as:
            //
            //    double[] frame = { leftHandX, leftHandY, rightHandX, rightHandY };
            //
            // Since we captured words, this means we captured sequences of
            // frames as we described above. Let's write some of those as
            // rough examples to explain how gesture recognition can be done:

            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            // Here, please note that a real-world example would involve *lots*
            // of samples for each word. Here, we are considering just one from
            // each class which is clearly sub-optimal and should _never_ be done
            // on practice. For example purposes, however, please disregard this.

            // Those are the words we have in our vocabulary:
            //
            double[][][] words = { hello, car, wardrobe };

            // Now, let's associate integer labels with them. This is needed
            // for the case where there are multiple samples for each word.
            //
            int[] labels = { 0, 1, 2 };


            // We will create our classifiers assuming an independent
            // Gaussian distribution for each component in our feature
            // vectors (like assuming a Naive Bayes assumption).

            Independent initial = new Independent
                                  (
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1)
                                  );


            // Now, we can proceed and create our classifier.
            //
            int numberOfWords  = 3; // we are trying to distinguish between 3 words
            int numberOfStates = 5; // this value can be found by trial-and-error

            var classifier = new HiddenMarkovClassifier <Independent>
                             (
                classes: numberOfWords,
                topology: new Forward(numberOfStates), // word classifiers should use a forward topology
                initial: initial
                             );

            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <Independent>(classifier,

                                                                           // Train each model until the log-likelihood changes less than 0.001
                                                                           modelIndex => new BaumWelchLearning <Independent>(classifier.Models[modelIndex])
            {
                Tolerance  = 0.001,
                Iterations = 100,

                // This is necessary so the code doesn't blow up when it realize
                // there is only one sample per word class. But this could also be
                // needed in normal situations as well.
                //
                FittingOptions = new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            }
                                                                           );

            // Finally, we can run the learning algorithm!
            double logLikelihood = teacher.Run(words, labels);

            // At this point, the classifier should be successfully
            // able to distinguish between our three word classes:
            //
            int c1 = classifier.Compute(hello);
            int c2 = classifier.Compute(car);
            int c3 = classifier.Compute(wardrobe);

            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);
            Assert.AreEqual(2, c3);
        }
Exemplo n.º 25
0
        public void CumulativeFunctionTest()
        {
            var p1 = new NormalDistribution(4.2, 1);
            var p2 = new NormalDistribution(7.0, 2);

            Independent<NormalDistribution> target = new Independent<NormalDistribution>(p1, p2);

            double[] x;
            double actual, expected;

            x = new double[] { 4.2, 7.0 };
            actual = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);

            x = new double[] { 0.0, 0.0 };
            actual = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);

            x = new double[] { 7.0, 4.2 };
            actual = target.DistributionFunction(x);
            expected = p1.DistributionFunction(x[0]) * p2.DistributionFunction(x[1]);
            Assert.AreEqual(expected, actual);
        }
Exemplo n.º 26
0
        public void FitTest()
        {
            double[][] data =
            {
                new double[] { 1, 8 },
                new double[] { 2, 6 },
                new double[] { 5, 7 },
                new double[] { 3, 9 },
            };


            var p1 = new NormalDistribution(0, 1);
            var p2 = new NormalDistribution(0, 1);

            Independent<NormalDistribution> target = new Independent<NormalDistribution>(p1, p2);

            target.Fit(data);

            Assert.AreEqual(2.75, target.Mean[0]);
            Assert.AreEqual(7.50, target.Mean[1]);

            Assert.AreEqual(2.91666, target.Variance[0], 1e-5);
            Assert.AreEqual(1.66666, target.Variance[1], 1e-5);

            Assert.AreEqual(target.Variance[0], target.Covariance[0, 0]);
            Assert.AreEqual(target.Variance[1], target.Covariance[1, 1]);
            Assert.AreEqual(0, target.Covariance[0, 1]);
            Assert.AreEqual(0, target.Covariance[1, 0]);
        }
        public static HiddenMarkovClassifier<Independent> CreateModel2(out double[][][] sequences, out int[] labels)
        {
            sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1.1 },
                    new double[] { 1, 2.5 },
                    new double[] { 1, 3.4 },
                    new double[] { 1, 4.7 },
                    new double[] { 2, 5.8 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 2,  3.2 },
                    new double[] { 2,  2.6 },
                    new double[] { 1,  1.2 },
                    new double[] { 1,  0.8 },
                    new double[] { 0,  1.1 },
                }
            };

            labels = new[] { 0, 1 };

            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1 = new GeneralDiscreteDistribution(3);
            var comp2 = new NormalDistribution(1);
            var density = new Independent(comp1, comp2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<Independent>(
                2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning<Independent>(
                    classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);

            return classifier;
        }
Exemplo n.º 28
0
        public void LearnTest10_Independent()
        {
            // Let's say we have 2 meteorological sensors gathering data
            // from different time periods of the day. Those periods are
            // represented below:

            double[][][] data =
            {
                new double[][] // first sequence (we just repeated the measurements 
                {              //  once, so there is only one observation sequence)

                    new double[] { 1, 2 }, // Day 1, 15:00 pm
                    new double[] { 6, 7 }, // Day 1, 16:00 pm
                    new double[] { 2, 3 }, // Day 1, 17:00 pm
                    new double[] { 2, 2 }, // Day 1, 18:00 pm
                    new double[] { 9, 8 }, // Day 1, 19:00 pm
                    new double[] { 1, 0 }, // Day 1, 20:00 pm
                    new double[] { 1, 3 }, // Day 1, 21:00 pm
                    new double[] { 8, 9 }, // Day 1, 22:00 pm
                    new double[] { 3, 3 }, // Day 1, 23:00 pm
                    new double[] { 1, 3 }, // Day 2, 00:00 am
                    new double[] { 1, 1 }, // Day 2, 01:00 am
                }
            };

            // Let's assume those sensors are unrelated (for simplicity). As
            // such, let's assume the data gathered from the sensors may reside
            // into circular centroids denoting each state the underlying system
            // might be in.
            NormalDistribution[] initial_components = 
            {
                new NormalDistribution(), // initial value for the first variable's distribution
                new NormalDistribution()  // initial value for the second variable's distribution
            };

            // Specify a initial independent normal distribution for the samples.
            var density = new Independent<NormalDistribution>(initial_components);

            // Creates a continuous hidden Markov Model with two states organized in an Ergodic
            //  topology and an underlying independent Normal distribution as probability density.
            var model = new HiddenMarkovModel<Independent<NormalDistribution>>(new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier until the
            // difference in the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning<Independent<NormalDistribution>>(model)
            {
                Tolerance = 0.0001,
                Iterations = 0,
            };

            // Fit the model
            double error = teacher.Run(data);

            // Get the hidden state associated with each observation
            //
            double logLikelihood; // log-likelihood of the Viterbi path
            int[] hidden_states = model.Decode(data[0], out logLikelihood);

            Assert.AreEqual(-33.978800850637882, error);
            Assert.AreEqual(-33.9788008509802, logLikelihood);
            Assert.AreEqual(11, hidden_states.Length);
        }
Exemplo n.º 29
0
        public void SingleDependencyBeforeUpdateIsAsSmallAsPossible()
        {
            GC.Collect();
            long start = GC.GetTotalMemory(true);
            Independent<int> newIndependent = new Independent<int>();
            Dependent<int> newDependent = new Dependent<int>(() => newIndependent);
            newIndependent.Value = 42;
            long end = GC.GetTotalMemory(true);

            // Started at 336.
            // Making Precedent a base class: 312.
            // Removing Gain/LoseDependent events: 288.
            // Making IsUpToDate no longer a precident: 248.
            // Custom linked list implementation for dependents: 200.
            // Custom linked list implementation for precedents: 160.
            // Other optimizations: 144.
            // Added WeakReferenceToSelf: 148.
            // Removed WeakReferenceToSelf: 124.
            Assert.AreEqual(124 + IndependentPlatformOffset, end - start);

            int value = newDependent;
            Assert.AreEqual(42, value);
        }
Exemplo n.º 30
0
 /// <summary>
 ///   Initializes a new instance of the <see cref="MetropolisHasting"/> algorithm.
 /// </summary>
 ///
 /// <param name="dimensions">The number of dimensions in each observation.</param>
 /// <param name="logDensity">A function specifying the log probability density of the distribution to be sampled.</param>
 /// <param name="proposal">The proposal distribution that is used to generate new parameter samples to be explored.</param>
 ///
 public MetropolisHasting(int dimensions, Func <double[], double> logDensity, Independent <NormalDistribution> proposal)
     : base(dimensions, logDensity, proposal)
 {
 }
Exemplo n.º 31
0
        public void FittingNoOptionsTest()
        {
            double[][] data1 =
            {
                new double[] { 0, 8 },
                new double[] { 0, 6 },
                new double[] { 0, 7 },
                new double[] { 0, 9 },
            };

            double[][] data2 =
            {
                new double[] { 8, 0 },
                new double[] { 6, 0 },
                new double[] { 7, 0 },
                new double[] { 9, 0 },
            };


            var p1 = new NormalDistribution(0, 1);
            var p2 = new NormalDistribution(0, 1);

            bool thrown1 = false;
            bool thrown2 = false;

            try
            {
                var target = new Independent<NormalDistribution>(p1, p2);
                target.Fit(data1);
            }
            catch (ArgumentException)
            {
                thrown1 = true;
            }

            try
            {
                var target = new Independent<NormalDistribution>(p1, p2);
                target.Fit(data2);
            }
            catch (ArgumentException)
            {
                thrown2 = true;
            }

            Assert.IsTrue(thrown1);
            Assert.IsTrue(thrown2);
        }
        public static HiddenMarkovClassifier<Independent> CreateModel3(out double[][][] sequences2, out int[] labels2)
        {
            sequences2 = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 1, 1.12, 2.41, 1.17, 9.3 },
                    new double[] { 1, 2.54, 1.45, 0.16, 4.5 },
                    new double[] { 1, 3.46, 2.63, 1.15, 9.2 },
                    new double[] { 1, 4.73, 0.41, 1.54, 5.5 },
                    new double[] { 2, 5.81, 2.42, 1.13, 9.1 },
                }, 

                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1.49, 2.48, 1.18, 9.37 },
                    new double[] { 1, 2.18, 1.44, 2.19, 1.56 },
                    new double[] { 1, 3.77, 2.62, 1.10, 9.25 },
                    new double[] { 2, 4.76, 5.44, 3.58, 5.54 },
                    new double[] { 2, 5.85, 2.46, 1.16, 5.13 },
                    new double[] { 2, 4.84, 5.44, 3.54, 5.52 },
                    new double[] { 2, 5.83, 3.41, 1.22, 5.11 },
                }, 

                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 2, 1.11, 2.41, 1.12, 2.31 },
                    new double[] { 1, 2.52, 3.73, 0.12, 4.50 },
                    new double[] { 1, 3.43, 2.61, 1.24, 9.29 },
                    new double[] { 1, 4.74, 2.42, 2.55, 6.57 },
                    new double[] { 2, 5.85, 2.43, 1.16, 9.16 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 0,  1.26, 5.44, 1.56, 9.55 },
                    new double[] { 2,  2.67, 5.45, 4.27, 1.54 },
                    new double[] { 1,  1.28, 3.46, 2.18, 4.13 },
                    new double[] { 1,  5.89, 2.57, 1.79, 5.02 },
                    new double[] { 0,  1.40, 2.48, 2.10, 6.41 },
                },

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 2,  3.21, 2.49, 1.54, 9.17 },
                    new double[] { 2,  2.62, 5.40, 4.25, 1.54 },
                    new double[] { 1,  1.53, 6.49, 2.17, 4.52 },
                    new double[] { 1,  2.84, 2.58, 1.73, 6.04 },
                    new double[] { 1,  1.45, 2.47, 2.28, 5.42 },
                    new double[] { 1,  1.46, 2.46, 2.35, 5.41 },
                },

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 1,  5.27, 5.45, 1.4, 9.5 },
                    new double[] { 2,  2.68, 2.54, 3.2, 2.2 },
                    new double[] { 1,  2.89, 3.83, 2.6, 4.1 },
                    new double[] { 1,  1.80, 1.32, 1.2, 4.2 },
                    new double[] { 0,  1.41, 2.41, 2.1, 6.4 },
                }
            };

            labels2 = new[] { 0, 0, 0, 1, 1, 1 };

            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1 = new GeneralDiscreteDistribution(3);
            var comp2 = new NormalDistribution(1);
            var comp3 = new NormalDistribution(2);
            var comp4 = new NormalDistribution(3);
            var comp5 = new NormalDistribution(4);
            var density = new Independent(comp1, comp2, comp3, comp4, comp5);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<Independent>(
                2, new Forward(5), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning<Independent>(
                    classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences2, labels2);

            return classifier;
        }
        public void SimpleGestureRecognitionTest()
        {
            // Let's say we would like to do a very simple mechanism for
            // gesture recognition. In this example, we will be trying to
            // create a classifier that can distinguish between the words
            // "hello", "car", and "wardrobe".

            // Let's say we decided to acquire some data, and we asked some
            // people to perform those words in front of a Kinect camera, and,
            // using Microsoft's SDK, we were able to captured the x and y
            // coordinates of each hand while the word was being performed.

            // Let's say we decided to represent our frames as:
            //
            //    double[] frame = { leftHandX, leftHandY, rightHandX, rightHandY };
            //
            // Since we captured words, this means we captured sequences of
            // frames as we described above. Let's write some of those as
            // rough examples to explain how gesture recognition can be done:

            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            // Here, please note that a real-world example would involve *lots*
            // of samples for each word. Here, we are considering just one from
            // each class which is clearly sub-optimal and should _never_ be done
            // on practice. For example purposes, however, please disregard this.

            // Those are the words we have in our vocabulary:
            //
            double[][][] words = { hello, car, wardrobe };

            // Now, let's associate integer labels with them. This is needed
            // for the case where there are multiple samples for each word.
            //
            int[] labels = { 0, 1, 2 };


            // We will create our classifiers assuming an independent
            // Gaussian distribution for each component in our feature
            // vectors (like assuming a Naive Bayes assumption).

            var initial = new Independent <NormalDistribution>
                          (
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1)
                          );


            // Now, we can proceed and create our classifier.
            //
            int numberOfWords  = 3; // we are trying to distinguish between 3 words
            int numberOfStates = 5; // this value can be found by trial-and-error

            var hmm = new HiddenMarkovClassifier <Independent <NormalDistribution> >
                      (
                classes: numberOfWords,
                topology: new Forward(numberOfStates), // word classifiers should use a forward topology
                initial: initial
                      );

            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <Independent <NormalDistribution> >(hmm,

                                                                                                 // Train each model until the log-likelihood changes less than 0.001
                                                                                                 modelIndex => new BaumWelchLearning <Independent <NormalDistribution> >(hmm.Models[modelIndex])
            {
                Tolerance  = 0.001,
                Iterations = 100,

                // This is necessary so the code doesn't blow up when it realize
                // there is only one sample per word class. But this could also be
                // needed in normal situations as well.
                //
                FittingOptions = new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            }
                                                                                                 );

            // Finally, we can run the learning algorithm!
            double logLikelihood = teacher.Run(words, labels);

            // At this point, the classifier should be successfully
            // able to distinguish between our three word classes:
            //
            int tc1 = hmm.Compute(hello);
            int tc2 = hmm.Compute(car);
            int tc3 = hmm.Compute(wardrobe);

            Assert.AreEqual(0, tc1);
            Assert.AreEqual(1, tc2);
            Assert.AreEqual(2, tc3);

            // Now, we can use the Markov classifier to initialize a HCRF
            var function = new MarkovMultivariateFunction(hmm);
            var hcrf     = new HiddenConditionalRandomField <double[]>(function);


            // We can check that both are equivalent, although they have
            // formulations that can be learned with different methods
            //
            for (int i = 0; i < words.Length; i++)
            {
                // Should be the same
                int expected = hmm.Compute(words[i]);
                int actual   = hcrf.Compute(words[i]);

                // Should be the same
                double h0 = hmm.LogLikelihood(words[i], 0);
                double c0 = hcrf.LogLikelihood(words[i], 0);

                double h1 = hmm.LogLikelihood(words[i], 1);
                double c1 = hcrf.LogLikelihood(words[i], 1);

                double h2 = hmm.LogLikelihood(words[i], 2);
                double c2 = hcrf.LogLikelihood(words[i], 2);

                Assert.AreEqual(expected, actual);
                Assert.AreEqual(h0, c0, 1e-10);
                Assert.IsTrue(h1.IsRelativelyEqual(c1, 1e-10));
                Assert.IsTrue(h2.IsRelativelyEqual(c2, 1e-10));

                Assert.IsFalse(double.IsNaN(c0));
                Assert.IsFalse(double.IsNaN(c1));
                Assert.IsFalse(double.IsNaN(c2));
            }


            // Now we can learn the HCRF using one of the best learning
            // algorithms available, Resilient Backpropagation learning:

            // Create a learning algorithm
            var rprop = new HiddenResilientGradientLearning <double[]>(hcrf)
            {
                Iterations = 50,
                Tolerance  = 1e-5
            };

            // Run the algorithm and learn the models
            double error = rprop.Run(words, labels);

            // At this point, the HCRF should be successfully
            // able to distinguish between our three word classes:
            //
            int hc1 = hcrf.Compute(hello);
            int hc2 = hcrf.Compute(car);
            int hc3 = hcrf.Compute(wardrobe);

            Assert.AreEqual(0, hc1);
            Assert.AreEqual(1, hc2);
            Assert.AreEqual(2, hc3);
        }
        public static HiddenMarkovClassifier<Independent<NormalDistribution>> CreateModel4(out double[][][] words, out int[] labels, bool usePriors)
        {
            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            double[][] wardrobe2 =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.2, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.2 },
            };

            words = new double[][][] { hello, car, wardrobe, wardrobe2 };

            labels = new [] { 0, 1, 2, 2 };

            var initial = new Independent<NormalDistribution>
            (
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1)
            );


            int numberOfWords = 3;
            int numberOfStates = 5;

            var classifier = new HiddenMarkovClassifier<Independent<NormalDistribution>>
            (
                classes: numberOfWords,
                topology: new Forward(numberOfStates),
                initial: initial
            );

            var teacher = new HiddenMarkovClassifierLearning<Independent<NormalDistribution>>(classifier,

                modelIndex => new BaumWelchLearning<Independent<NormalDistribution>>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 100,

                    FittingOptions = new IndependentOptions()
                    {
                        InnerOption = new NormalOptions() { Regularization = 1e-5 }
                    }
                }
            );

            if (usePriors)
                teacher.Empirical = true;

            double logLikelihood = teacher.Run(words, labels);

            return classifier;
        }
        public void SaveLoadTest()
        {
            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            double[][][] words = { hello, car, wardrobe };

            int[] labels = { 0, 1, 2 };

            var initial = new Independent
                          (
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1)
                          );

            int numberOfWords  = 3;
            int numberOfStates = 5;

            var classifier = new HiddenMarkovClassifier <Independent>
                             (
                classes: numberOfWords,
                topology: new Forward(numberOfStates),
                initial: initial
                             );

            var teacher = new HiddenMarkovClassifierLearning <Independent>(classifier,
                                                                           modelIndex => new BaumWelchLearning <Independent>(classifier.Models[modelIndex])
            {
                Tolerance      = 0.001,
                Iterations     = 100,
                FittingOptions = new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            }
                                                                           );

            double logLikelihood = teacher.Run(words, labels);

            var function = new MarkovMultivariateFunction(classifier);
            var hcrf     = new HiddenConditionalRandomField <double[]>(function);


            MemoryStream stream = new MemoryStream();

            hcrf.Save(stream);

            stream.Seek(0, SeekOrigin.Begin);

            var target = HiddenConditionalRandomField <double[]> .Load(stream);

            Assert.AreEqual(hcrf.Function.Factors.Length, target.Function.Factors.Length);
            for (int i = 0; i < hcrf.Function.Factors.Length; i++)
            {
                var e = hcrf.Function.Factors[i];
                var a = target.Function.Factors[i];
                Assert.AreEqual(e.Index, target.Function.Factors[i].Index);
                Assert.AreEqual(e.States, target.Function.Factors[i].States);

                Assert.AreEqual(e.EdgeParameters.Count, a.EdgeParameters.Count);
                Assert.AreEqual(e.EdgeParameters.Offset, a.EdgeParameters.Offset);
                Assert.AreEqual(e.FactorParameters.Count, a.FactorParameters.Count);
                Assert.AreEqual(e.FactorParameters.Offset, a.FactorParameters.Offset);

                Assert.AreEqual(e.OutputParameters.Count, a.OutputParameters.Count);
                Assert.AreEqual(e.OutputParameters.Offset, a.OutputParameters.Offset);
                Assert.AreEqual(e.StateParameters.Count, a.StateParameters.Count);
                Assert.AreEqual(e.StateParameters.Offset, a.StateParameters.Offset);

                Assert.AreEqual(target.Function, a.Owner);
                Assert.AreEqual(hcrf.Function, e.Owner);
            }

            Assert.AreEqual(hcrf.Function.Features.Length, target.Function.Features.Length);
            for (int i = 0; i < hcrf.Function.Factors.Length; i++)
            {
                Assert.AreEqual(hcrf.Function.Features[i].GetType(), target.Function.Features[i].GetType());
            }

            Assert.AreEqual(hcrf.Function.Outputs, target.Function.Outputs);

            for (int i = 0; i < hcrf.Function.Weights.Length; i++)
            {
                Assert.AreEqual(hcrf.Function.Weights[i], target.Function.Weights[i]);
            }
        }
        public static HiddenMarkovClassifier<Independent> CreateModel3()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1 = new GeneralDiscreteDistribution(3);
            var comp2 = new NormalDistribution(1);
            var comp3 = new NormalDistribution(2);
            var comp4 = new NormalDistribution(3);
            var comp5 = new NormalDistribution(4);
            var density = new Independent(comp1, comp2, comp3, comp4, comp5);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<Independent>(
                2, new Forward(5), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning<Independent>(
                    classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences2, labels2);

            return classifier;
        }
        public void IndependentModelExample()
        {
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 }, 
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };

            // Create a new 2D independent Normal distribution
            var independent = new Independent<NormalDistribution>(
                new NormalDistribution(), new NormalDistribution());

            // Estimate it!
            independent.Fit(samples);

            var a = independent.Components[0].ToString("N2"); // N(x1; μ = 3.20, σ² = 7.96)
            var b = independent.Components[1].ToString("N2"); // N(x2; μ = 2.20, σ² = 4.40)

            Assert.AreEqual("N(x; μ = 3.20, σ² = 7.96)", a);
            Assert.AreEqual("N(x; μ = 2.20, σ² = 4.40)", b);
        }
Exemplo n.º 38
0
        public void IndependentIsAsSmallAsPossible()
        {
            GC.Collect();
            long start = GC.GetTotalMemory(true);
            Independent<int> newIndependent = new Independent<int>();
            newIndependent.Value = 42;
            long end = GC.GetTotalMemory(true);

            // Started at 92.
            // Making Precedent a base class: 80.
            // Removing Gain/LoseDependent events: 72.
            // Custom linked list implementation for dependents: 48.
            // Other optimizations: 40.
            // Removed WeakReferenceToSelf: 20.
            Assert.AreEqual(20 + IndependentPlatformOffset, end - start);

            int value = newIndependent;
            Assert.AreEqual(42, value);
        }
        public void NumberOfFeaturesTest()
        {
            Independent initial = new Independent(
                      new GeneralDiscreteDistribution(46), // output,
                      new NormalDistribution(0, 1),        // headAngle, 
                      new NormalDistribution(0, 1),        // handAngle, 
                      new NormalDistribution(0, 1),        // relHandX, 
                      new NormalDistribution(0, 1)         // relHandY, 
            );



            var model = new HiddenMarkovClassifier<Independent>(
                classes: 13, topology: new Forward(5), initial: initial);

            var function = new MarkovMultivariateFunction(model);
            var field = new HiddenConditionalRandomField<double[]>(function);



            int discreteDistributions = 1;
            int continuousDistributions = 4;
            int symbols = 46;
            int states = 5;
            int classes = 13;
            int expected = classes *
                (1 + states + states * states +
                states * discreteDistributions * symbols +
                states * continuousDistributions * 3);

            Assert.AreEqual(expected, field.Function.Features.Length);
            Assert.AreEqual(expected, field.Function.Weights.Length);
            Assert.AreEqual(4173, expected);
        }
Exemplo n.º 40
0
 /// <summary>
 /// Serves as a hash function for a particular type. <see cref="M:System.Object.GetHashCode"></see> is suitable for use in hashing algorithms and data structures like a hash table.
 /// </summary>
 /// <returns>
 /// A hash code for the current <see cref="T:System.Object"></see>.
 /// </returns>
 public override int GetHashCode()
 {
     return((Dependent == null ? 0 : Dependent.GetHashCode()) ^ (Independent == null ? 0 : Independent.GetHashCode()));
 }
        public void SaveLoadTest()
        {
            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            double[][][] words = { hello, car, wardrobe };

            int[] labels = { 0, 1, 2 };

            var initial = new Independent
            (
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1)
            );

            int numberOfWords = 3;
            int numberOfStates = 5;

            var classifier = new HiddenMarkovClassifier<Independent>
            (
               classes: numberOfWords,
               topology: new Forward(numberOfStates),
               initial: initial
            );

            var teacher = new HiddenMarkovClassifierLearning<Independent>(classifier,
                modelIndex => new BaumWelchLearning<Independent>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 100,
                    FittingOptions = new IndependentOptions()
                    {
                        InnerOption = new NormalOptions() { Regularization = 1e-5 }
                    }
                }
            );

            double logLikelihood = teacher.Run(words, labels);

            var function = new MarkovMultivariateFunction(classifier);
            var hcrf = new HiddenConditionalRandomField<double[]>(function);


            MemoryStream stream = new MemoryStream();

            hcrf.Save(stream);

            stream.Seek(0, SeekOrigin.Begin);

            var target = HiddenConditionalRandomField<double[]>.Load(stream);

            Assert.AreEqual(hcrf.Function.Factors.Length, target.Function.Factors.Length);
            for (int i = 0; i < hcrf.Function.Factors.Length; i++)
            {
                var e = hcrf.Function.Factors[i];
                var a = target.Function.Factors[i];
                Assert.AreEqual(e.Index, target.Function.Factors[i].Index);
                Assert.AreEqual(e.States, target.Function.Factors[i].States);

                Assert.AreEqual(e.EdgeParameters.Count, a.EdgeParameters.Count);
                Assert.AreEqual(e.EdgeParameters.Offset, a.EdgeParameters.Offset);
                Assert.AreEqual(e.FactorParameters.Count, a.FactorParameters.Count);
                Assert.AreEqual(e.FactorParameters.Offset, a.FactorParameters.Offset);

                Assert.AreEqual(e.OutputParameters.Count, a.OutputParameters.Count);
                Assert.AreEqual(e.OutputParameters.Offset, a.OutputParameters.Offset);
                Assert.AreEqual(e.StateParameters.Count, a.StateParameters.Count);
                Assert.AreEqual(e.StateParameters.Offset, a.StateParameters.Offset);

                Assert.AreEqual(target.Function, a.Owner);
                Assert.AreEqual(hcrf.Function, e.Owner);    
            }
            
            Assert.AreEqual(hcrf.Function.Features.Length, target.Function.Features.Length);
            for (int i = 0; i < hcrf.Function.Factors.Length; i++)
                Assert.AreEqual(hcrf.Function.Features[i].GetType(), target.Function.Features[i].GetType());

            Assert.AreEqual(hcrf.Function.Outputs, target.Function.Outputs);

            for (int i = 0; i < hcrf.Function.Weights.Length; i++)
                Assert.AreEqual(hcrf.Function.Weights[i], target.Function.Weights[i]);    
        }
        public void SimpleGestureRecognitionTest()
        {
            // Let's say we would like to do a very simple mechanism for
            // gesture recognition. In this example, we will be trying to
            // create a classifier that can distinguish between the words
            // "hello", "car", and "wardrobe". 
            
            // Let's say we decided to acquire some data, and we asked some
            // people to perform those words in front of a Kinect camera, and,
            // using Microsoft's SDK, we were able to captured the x and y
            // coordinates of each hand while the word was being performed.

            // Let's say we decided to represent our frames as:
            // 
            //    double[] frame = { leftHandX, leftHandY, rightHandX, rightHandY };
            //
            // Since we captured words, this means we captured sequences of
            // frames as we described above. Let's write some of those as 
            // rough examples to explain how gesture recognition can be done:

            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            // Here, please note that a real-world example would involve *lots*
            // of samples for each word. Here, we are considering just one from
            // each class which is clearly sub-optimal and should _never_ be done
            // on practice. For example purposes, however, please disregard this.

            // Those are the words we have in our vocabulary:
            //
            double[][][] words = { hello, car, wardrobe }; 

            // Now, let's associate integer labels with them. This is needed
            // for the case where there are multiple samples for each word.
            //
            int[] labels = { 0, 1, 2 };


            // We will create our classifiers assuming an independent
            // Gaussian distribution for each component in our feature
            // vectors (like assuming a Naive Bayes assumption).

            Independent initial = new Independent
            (
                new NormalDistribution(0, 1), 
                new NormalDistribution(0, 1), 
                new NormalDistribution(0, 1), 
                new NormalDistribution(0, 1)  
            );


            // Now, we can proceed and create our classifier. 
            //
            int numberOfWords = 3;  // we are trying to distinguish between 3 words
            int numberOfStates = 5; // this value can be found by trial-and-error

            var classifier = new HiddenMarkovClassifier<Independent>
            (
               classes: numberOfWords, 
               topology: new Forward(numberOfStates), // word classifiers should use a forward topology
               initial: initial
            );

            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent>(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning<Independent>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 100,

                    // This is necessary so the code doesn't blow up when it realize
                    // there is only one sample per word class. But this could also be
                    // needed in normal situations as well.
                    //
                    FittingOptions = new IndependentOptions()
                    {
                        InnerOption = new NormalOptions() { Regularization = 1e-5 }
                    }
                }
            );

            // Finally, we can run the learning algorithm!
            double logLikelihood = teacher.Run(words, labels);

            // At this point, the classifier should be successfully 
            // able to distinguish between our three word classes:
            //
            int c1 = classifier.Compute(hello);
            int c2 = classifier.Compute(car);
            int c3 = classifier.Compute(wardrobe);

            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);
            Assert.AreEqual(2, c3);
        }
			public Dependent(Independent independent)
			{
				_Independent = independent;
			}
        public void SimpleGestureRecognitionTest()
        {
            // Let's say we would like to do a very simple mechanism for
            // gesture recognition. In this example, we will be trying to
            // create a classifier that can distinguish between the words
            // "hello", "car", and "wardrobe". 
            
            // Let's say we decided to acquire some data, and we asked some
            // people to perform those words in front of a Kinect camera, and,
            // using Microsoft's SDK, we were able to captured the x and y
            // coordinates of each hand while the word was being performed.

            // Let's say we decided to represent our frames as:
            // 
            //    double[] frame = { leftHandX, leftHandY, rightHandX, rightHandY };
            //
            // Since we captured words, this means we captured sequences of
            // frames as we described above. Let's write some of those as 
            // rough examples to explain how gesture recognition can be done:

            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            // Here, please note that a real-world example would involve *lots*
            // of samples for each word. Here, we are considering just one from
            // each class which is clearly sub-optimal and should _never_ be done
            // on practice. For example purposes, however, please disregard this.

            // Those are the words we have in our vocabulary:
            //
            double[][][] words = { hello, car, wardrobe }; 

            // Now, let's associate integer labels with them. This is needed
            // for the case where there are multiple samples for each word.
            //
            int[] labels = { 0, 1, 2 };


            // We will create our classifiers assuming an independent
            // Gaussian distribution for each component in our feature
            // vectors (like assuming a Naive Bayes assumption).

            var initial = new Independent<NormalDistribution>
            (
                new NormalDistribution(0, 1), 
                new NormalDistribution(0, 1), 
                new NormalDistribution(0, 1), 
                new NormalDistribution(0, 1)  
            );


            // Now, we can proceed and create our classifier. 
            //
            int numberOfWords = 3;  // we are trying to distinguish between 3 words
            int numberOfStates = 5; // this value can be found by trial-and-error

            var hmm = new HiddenMarkovClassifier<Independent<NormalDistribution>>
            (
                classes: numberOfWords, 
                topology: new Forward(numberOfStates), // word classifiers should use a forward topology
                initial: initial
            );

            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent<NormalDistribution>>(hmm,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning<Independent<NormalDistribution>>(hmm.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 100,

                    // This is necessary so the code doesn't blow up when it realize
                    // there is only one sample per word class. But this could also be
                    // needed in normal situations as well.
                    //
                    FittingOptions = new IndependentOptions()
                    {
                        InnerOption = new NormalOptions() { Regularization = 1e-5 }
                    }
                }
            );

            // Finally, we can run the learning algorithm!
            double logLikelihood = teacher.Run(words, labels);

            // At this point, the classifier should be successfully 
            // able to distinguish between our three word classes:
            //
            int tc1 = hmm.Compute(hello);
            int tc2 = hmm.Compute(car);
            int tc3 = hmm.Compute(wardrobe);

            Assert.AreEqual(0, tc1);
            Assert.AreEqual(1, tc2);
            Assert.AreEqual(2, tc3);

            // Now, we can use the Markov classifier to initialize a HCRF
            var function = new MarkovMultivariateFunction(hmm);
            var hcrf = new HiddenConditionalRandomField<double[]>(function);


            // We can check that both are equivalent, although they have
            // formulations that can be learned with different methods
            //
            for (int i = 0; i < words.Length; i++)
            {
                // Should be the same
                int expected = hmm.Compute(words[i]);
                int actual = hcrf.Compute(words[i]);

                // Should be the same
                double h0 = hmm.LogLikelihood(words[i], 0);
                double c0 = hcrf.LogLikelihood(words[i], 0);

                double h1 = hmm.LogLikelihood(words[i], 1);
                double c1 = hcrf.LogLikelihood(words[i], 1);

                double h2 = hmm.LogLikelihood(words[i], 2);
                double c2 = hcrf.LogLikelihood(words[i], 2);

                Assert.AreEqual(expected, actual);
                Assert.AreEqual(h0, c0, 1e-10);
                Assert.IsTrue(h1.IsRelativelyEqual(c1, 1e-10));
                Assert.IsTrue(h2.IsRelativelyEqual(c2, 1e-10));

                Assert.IsFalse(double.IsNaN(c0));
                Assert.IsFalse(double.IsNaN(c1));
                Assert.IsFalse(double.IsNaN(c2));
            }


            // Now we can learn the HCRF using one of the best learning
            // algorithms available, Resilient Backpropagation learning:

            // Create a learning algorithm
            var rprop = new HiddenResilientGradientLearning<double[]>(hcrf)
            {
                Iterations = 50,
                Tolerance = 1e-5
            };

            // Run the algorithm and learn the models
            double error = rprop.Run(words, labels);

            // At this point, the HCRF should be successfully 
            // able to distinguish between our three word classes:
            //
            int hc1 = hcrf.Compute(hello);
            int hc2 = hcrf.Compute(car);
            int hc3 = hcrf.Compute(wardrobe);

            Assert.AreEqual(0, hc1);
            Assert.AreEqual(1, hc2);
            Assert.AreEqual(2, hc3);
        }