예제 #1
0
        public void LikelihoodTest()
        {
            HiddenMarkovModel hmm = DiscreteHiddenMarkovModelFunctionTest.CreateModel2();

            int states  = hmm.States;
            int symbols = hmm.Symbols;


            var function1 = new MarkovDiscreteFunction(hmm);
            var target1   = new ConditionalRandomField <int>(states, function1);

            var function2 = new MarkovDiscreteFunction(states, symbols);
            var target2   = new ConditionalRandomField <int>(states, function2);


            int[] observations;

            double a, b, la, lb;

            observations = new int[] { 0, 0, 1, 1, 1, 2 };
            a            = target1.LogLikelihood(observations, observations);
            b            = target2.LogLikelihood(observations, observations);
            Assert.IsTrue(a > b);

            observations = new int[] { 0, 0, 1, 1, 1, 2 };
            la           = target1.LogLikelihood(observations, observations);
            lb           = target2.LogLikelihood(observations, observations);
            Assert.IsTrue(la > lb);

            double lla = System.Math.Log(a);
            double llb = System.Math.Log(b);

            Assert.AreEqual(lla, la, 1e-6);
            Assert.AreEqual(llb, lb, 1e-6);
        }
예제 #2
0
        private double run(T[][] observations, int[][] labels)
        {
            double f;

            double[] g;

            lbfgs.Function = parameters =>
            {
                model.Function.Weights = parameters;
                f = -model.LogLikelihood(observations, labels);
                return(f);
            };

            lbfgs.Gradient = parameters =>
            {
                model.Function.Weights = parameters;
                g = gradient(observations, labels);
                return(g);
            };

            lbfgs.Token = Token;

            if (lbfgs.Minimize(model.Function.Weights))
            {
                model.Function.Weights = lbfgs.Solution;
            }

            return(model.LogLikelihood(observations, labels));
        }
예제 #3
0
        /// <summary>
        ///   Runs the learning algorithm with the specified input
        ///   training observations and corresponding output labels.
        /// </summary>
        ///
        /// <param name="observations">The training observations.</param>
        /// <param name="labels">The observation's labels.</param>
        ///
        public double Run(T[][] observations, int[][] labels)
        {
            double f;

            double[] g;

            lbfgs.Function = parameters =>
            {
                model.Function.Weights = parameters;
                f = -model.LogLikelihood(observations, labels);
                return(f);
            };

            lbfgs.Gradient = parameters =>
            {
                model.Function.Weights = parameters;
                g = gradient(observations, labels);
                return(g);
            };


            try
            {
                double ll = lbfgs.Minimize(model.Function.Weights);
            }
            catch (LineSearchFailedException)
            {
                // TODO: Restructure L-BFGS to avoid exceptions.
            }

            model.Function.Weights = lbfgs.Solution;

            return(model.LogLikelihood(observations, labels));
        }
예제 #4
0
        public void RunTest()
        {
            Accord.Math.Random.Generator.Seed = 0;

            int nstates = 3;
            int symbols = 3;

            int[][] sequences = new int[][]
            {
                new int[] { 0, 1, 1, 1, 2 },
                new int[] { 0, 1, 1, 1, 2, 2, 2 },
                new int[] { 0, 0, 1, 1, 2, 2 },
                new int[] { 0, 1, 1, 1, 2, 2, 2 },
                new int[] { 0, 1, 1, 1, 2, 2 },
                new int[] { 0, 1, 1, 2, 2 },
                new int[] { 0, 0, 1, 1, 1, 2, 2 },
                new int[] { 0, 0, 0, 1, 1, 1, 2, 2 },
                new int[] { 0, 1, 1, 2, 2, 2 },
            };


            var function = new MarkovDiscreteFunction(nstates, symbols, new NormalDistribution());
            var model    = new ConditionalRandomField <int>(nstates, function);


            for (int i = 0; i < sequences.Length; i++)
            {
                double p;
                int[]  s = sequences[i];
                int[]  r = model.Compute(s, out p);
                Assert.IsFalse(s.IsEqual(r));
            }

            var target = new QuasiNewtonLearning <int>(model);

            target.ParallelOptions.MaxDegreeOfParallelism = 1;

            int[][] labels       = sequences;
            int[][] observations = sequences;

            double ll0 = model.LogLikelihood(observations, labels);

            double actual = target.Run(observations, labels);

            double ll1 = model.LogLikelihood(observations, labels);

            Assert.IsTrue(ll1 > ll0);


            Assert.AreEqual(-0.0010766857305242183, actual, 1e-6);

            for (int i = 0; i < sequences.Length; i++)
            {
                double p;
                int[]  s = sequences[i];
                int[]  r = model.Compute(s, out p);
                Assert.IsTrue(s.IsEqual(r));
            }
        }
        public void RunTest()
        {
            int nstates = 3;
            int symbols = 3;

            int[][] sequences = new int[][]
            {
                new int[] { 0, 1, 1, 1, 2 },
                new int[] { 0, 1, 1, 1, 2, 2, 2 },
                new int[] { 0, 0, 1, 1, 2, 2 },
                new int[] { 0, 1, 1, 1, 2, 2, 2 },
                new int[] { 0, 1, 1, 1, 2, 2 },
                new int[] { 0, 1, 1, 2, 2 },
                new int[] { 0, 0, 1, 1, 1, 2, 2 },
                new int[] { 0, 0, 0, 1, 1, 1, 2, 2 },
                new int[] { 0, 1, 1, 2, 2, 2 },
            };


            var function = new MarkovDiscreteFunction(nstates, symbols);
            var model    = new ConditionalRandomField <int>(nstates, function);


            for (int i = 0; i < sequences.Length; i++)
            {
                double p;
                int[]  s = sequences[i];
                int[]  r = model.Compute(s, out p);
                Assert.IsFalse(s.IsEqual(r));
            }

            var target = new QuasiNewtonLearning <int>(model);

            int[][] labels       = sequences;
            int[][] observations = sequences;

            double ll0 = model.LogLikelihood(observations, labels);

            double actual = target.Run(observations, labels);

            double ll1 = model.LogLikelihood(observations, labels);

            Assert.IsTrue(ll1 > ll0);


            Assert.AreEqual(0, actual, 1e-8);

            for (int i = 0; i < sequences.Length; i++)
            {
                double p;
                int[]  s = sequences[i];
                int[]  r = model.Compute(s, out p);
                Assert.IsTrue(s.IsEqual(r));
            }
        }
예제 #6
0
        public void LikelihoodTest()
        {
            var hmm = DiscreteHiddenMarkovModelFunctionTest.CreateModel2();

            int states  = hmm.States;
            int symbols = hmm.Symbols;

            var hcrf = new ConditionalRandomField <int>(states,
                                                        new MarkovDiscreteFunction(hmm));

            var hmm0  = new HiddenMarkovModel(states, symbols);
            var hcrf0 = new ConditionalRandomField <int>(states,
                                                         new MarkovDiscreteFunction(hmm0));


            int[]  observations = new int[] { 0, 0, 1, 1, 1, 2 };
            double la           = hcrf.LogLikelihood(observations, observations);
            double lb           = hcrf0.LogLikelihood(observations, observations);

            Assert.IsTrue(la > lb);

            double lc = hmm.Evaluate(observations, observations);
            double ld = hmm0.Evaluate(observations, observations);

            Assert.IsTrue(lc > ld);

            double za = hcrf.LogPartition(observations);
            double zb = hcrf0.LogPartition(observations);

            la += za;
            lb += zb;

            Assert.AreEqual(la, lc, 1e-6);
            Assert.AreEqual(lb, ld, 1e-6);
        }