예제 #1
0
        public void DecodeTest()
        {
            #region doc_decode
            // In this example, we will reproduce the example on the Viterbi algorithm
            // available on Wikipedia: http://en.wikipedia.org/wiki/Viterbi_algorithm

            // Create the transition matrix A
            double[,] transition =
            {
                { 0.7, 0.3 },
                { 0.4, 0.6 }
            };

            // Create the emission matrix B
            double[,] emission =
            {
                { 0.1, 0.4, 0.5 },
                { 0.6, 0.3, 0.1 }
            };

            // Create the initial probabilities pi
            double[] initial = { 0.6, 0.4 };

            // Create a new hidden Markov model
            var hmm = new HiddenMarkovModel(transition, emission, initial);

            // After that, one could, for example, query the probability
            // of a sequence occurring. We will consider the sequence
            int[] sequence = new int[] { 0, 1, 2 };

            // And now we will evaluate its likelihood
            double logLikelihood = hmm.LogLikelihood(sequence);

            // At this point, the log-likelihood of the sequence
            // occurring within the model is -3.3928721329161653.

            // We can also get the Viterbi path of the sequence
            int[] path = hmm.Decode(sequence);

            // And the likelihood along the Viterbi path is
            double viterbi; hmm.Decode(sequence, out viterbi);

            // At this point, the state path will be 1-0-0 and the
            // log-likelihood will be -4.3095199438871337
            #endregion

            Assert.AreEqual(-3.3928721329161653, logLikelihood);
            Assert.AreEqual(-4.3095199438871337, viterbi);
            Assert.AreEqual(path[0], 1);
            Assert.AreEqual(path[1], 0);
            Assert.AreEqual(path[2], 0);
        }
예제 #2
0
 /// <summary>
 ///   Gets the probability mass function (pmf) for
 ///   this distribution evaluated at point <c>x</c>.
 /// </summary>
 ///
 /// <param name="x">A single point in the distribution range.</param>
 ///
 /// <returns>
 ///   The probability of <c>x</c> occurring
 ///   in the current distribution.
 /// </returns>
 ///
 /// <remarks>
 ///   The Probability Mass Function (PMF) describes the
 ///   probability that a given value <c>x</c> will occur.
 /// </remarks>
 ///
 public override double ProbabilityMassFunction(int[] x)
 {
     return(Math.Exp(model.LogLikelihood(x)));
 }
 /// <summary>
 ///   Gets the probability mass function (pmf) for
 ///   this distribution evaluated at point <c>x</c>.
 /// </summary>
 ///
 /// <param name="x">A single point in the distribution range.</param>
 ///
 /// <returns>
 ///   The probability of <c>x</c> occurring
 ///   in the current distribution.
 /// </returns>
 ///
 /// <remarks>
 ///   The Probability Mass Function (PMF) describes the
 ///   probability that a given value <c>x</c> will occur.
 /// </remarks>
 ///
 protected internal override double InnerProbabilityMassFunction(int[] x)
 {
     return(Math.Exp(model.LogLikelihood(x)));
 }
        private void Video1_Proccess1()
        {
            //if (_capture1 != null && _capture1.Ptr != IntPtr.Zero)
            //{

            int  war_at_frame = 0;
            bool warning      = false;

            while (camera_1.frameNum < total_frames1 - 10)
            {
                //Console.WriteLine(camera_1.frameNum);
                if (camera_1.frameNum % 20 == 0)
                {
                    count = 0;
                }

                abnormal_vote = 0;
                normal_vote   = 0;
                try
                {
                    double[] fe = F_E.extract(vid1, camera_1.frameNum);
                    if (fe[0] == null || fe[1] == null)
                    {
                        fe[0] = 240;
                        fe[0] = 170;
                    }
                    int[] fff = new int[] { (int)fe[0], (int)fe[1] };

                    //int knn_answer = knn.Decide(fe);
                    int  RF_answer = RF.Decide(fe);
                    bool LR_answer = LR.Decide(fe);
                    //bool SVM_answer = SVM.Decide(fe);
                    int    NB_answer = NB.Decide(fff);
                    double fl1       = HMM.LogLikelihood(fff);

                    if (chocking || lying)
                    {
                        Console.WriteLine(fl1);
                        if (fl1.CompareTo(-8.3) == 1)
                        {
                            hmm_count++;
                        }
                    }


                    else if (violence)
                    {
                        if (RF_answer == 1)
                        {
                            abnormal_vote += 0.978546619845336;
                        }
                        else
                        {
                            normal_vote += 0.978546619845336;
                        }

                        if (LR_answer)
                        {
                            abnormal_vote += 0.8428031393318365;
                        }
                        else
                        {
                            normal_vote += 0.8428031393318365;
                        }

                        if (NB_answer == 1)
                        {
                            abnormal_vote += 0.8746569953754341;
                        }
                        else
                        {
                            normal_vote += 0.8746569953754341;
                        }

                        if (abnormal_vote.CompareTo(normal_vote) == 1)
                        {
                            count++;
                        }
                    }

                    if (hmm_count >= 2 || count >= 4)
                    {
                        if (count >= 4)
                        {
                            count = 0;
                        }
                        if (hmm_count >= 2)
                        {
                            hmm_count = 0;
                        }

                        this.pictureBox3.Invoke((MethodInvoker) delegate
                        {
                            // Running on the UI thread
                            pictureBox3.Image = Properties.Resources.warning;
                        });

                        if (alarm)
                        {
                            wplayer.URL = "D:\\2\\Real-Time Abnormal Event Detection And Tracking In Video\\Alarm.mp3";
                            wplayer.controls.play();
                        }



                        //pictureBox3.Image = Properties.Resources.warning;
                        warning      = true;
                        war_at_frame = camera_1.frameNum;

                        Media.Crop_video(vid1, (int)camera_1.frameNum / (fbs + 5), 30);
                        Media.thumbnail(vid1, (int)camera_1.frameNum / (fbs + 5));
                        Image image = Image.FromFile(@"D:\2\Real-Time Abnormal Event Detection And Tracking In Video\croped_videos\crop" + Media.num.ToString() + ".jpg");
                        dataGridView1.Rows.Add(image, @"D:\2\Real-Time Abnormal Event Detection And Tracking In Video\croped_videos\crop" + Media.num.ToString() + ".mpg");
                        Media.num++;
                    }

                    if (warning && camera_1.frameNum >= (war_at_frame + 10))
                    {
                        this.pictureBox3.Invoke((MethodInvoker) delegate
                        {
                            // Running on the UI thread
                            pictureBox3.Image = Properties.Resources._checked;
                        });
                        //pictureBox3.Image = Properties.Resources._checked;
                        warning = false;
                    }
                }
                catch (Exception e)
                {
                    Console.WriteLine("1--- ", e.Message);
                }
            }
        }
예제 #5
0
        public void LearnTest3()
        {
            #region doc_learn
            // We will create a Hidden Markov Model to detect
            // whether a given sequence starts with a zero.
            int[][] sequences = new int[][]
            {
                new int[] { 0, 1, 1, 1, 1, 0, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 0, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
            };

            // Create a new Hidden Markov Model with 3 states for
            //  an output alphabet of two characters (zero and one)
            var hmm = new HiddenMarkovModel(states: 3, symbols: 2);

            // Create the learning algorithm
            var teacher = new BaumWelchLearning(hmm)
            {
                Tolerance  = 0.0001, // until log-likelihood changes less than 0.0001
                Iterations = 0       // and use as many iterations as needed
            };

            // Estimate the model
            teacher.Learn(sequences);


            // Now we can calculate the probability that the given
            // sequences originated from the model. We can compute
            // those probabilities using the Viterbi algorithm:
            double vl1; hmm.Decode(new int[] { 0, 1 }, out vl1);        // -0.69317855
            double vl2; hmm.Decode(new int[] { 0, 1, 1, 1 }, out vl2);  // -2.16644878

            // Sequences which do not start with zero have much lesser probability.
            double vl3; hmm.Decode(new int[] { 1, 1 }, out vl3);        // -11.3580034
            double vl4; hmm.Decode(new int[] { 1, 0, 0, 0 }, out vl4);  // -38.6759130

            // Sequences which contains few errors have higher probability
            //  than the ones which do not start with zero. This shows some
            //  of the temporal elasticity and error tolerance of the HMMs.
            double vl5; hmm.Decode(new int[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }, out vl5); // -8.22665
            double vl6; hmm.Decode(new int[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }, out vl6); // -8.22665


            // Additionally, we can also compute the probability
            // of those sequences using the forward algorithm:
            double fl1 = hmm.LogLikelihood(new int[] { 0, 1 });        // -0.000031369
            double fl2 = hmm.LogLikelihood(new int[] { 0, 1, 1, 1 });  // -0.087005121

            // Sequences which do not start with zero have much lesser probability.
            double fl3 = hmm.LogLikelihood(new int[] { 1, 1 });        // -10.66485629
            double fl4 = hmm.LogLikelihood(new int[] { 1, 0, 0, 0 });  // -36.61788687

            // Sequences which contains few errors have higher probability
            //  than the ones which do not start with zero. This shows some
            //  of the temporal elasticity and error tolerance of the HMMs.
            double fl5 = hmm.LogLikelihood(new int[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }); // -3.3744416
            double fl6 = hmm.LogLikelihood(new int[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }); // -3.3744416
            #endregion


            Assert.AreEqual(-0.69317855044301457, vl1, 1e-4);
            Assert.AreEqual(-2.166448784882073, vl2, 1e-4);
            Assert.AreEqual(-11.358003471944887, vl3, 1e-4);
            Assert.AreEqual(-38.675913006221506, vl4, 1e-4);
            Assert.AreEqual(-8.22664996599565, vl5, 1e-4);
            Assert.AreEqual(-8.2266499659956516, vl6, 1e-4);

            Assert.IsTrue(vl1 > vl3 && vl1 > vl4);
            Assert.IsTrue(vl2 > vl3 && vl2 > vl4);

            Assert.AreEqual(-0.000031369883069287674, fl1, 1e-4);
            Assert.AreEqual(-0.087005121634496585, fl2, 1e-4);
            Assert.AreEqual(-10.664856291384941, fl3, 1e-4);
            Assert.AreEqual(-36.617886878165528, fl4, 1e-4);
            Assert.AreEqual(-3.3744415883604058, fl5, 1e-4);
            Assert.AreEqual(-3.3744426259067066, fl6, 1e-4);
        }
예제 #6
0
        public void learn_test()
        {
            #region doc_learn
            Accord.Math.Random.Generator.Seed = 0;

            // Create continuous sequences. In the sequences below, there
            //  seems to be two states, one for values between 0 and 1 and
            //  another for values between 5 and 7. The states seems to be
            //  switched on every observation.
            double[][] sequences = new double[][]
            {
                new double[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 },
                new double[] { 0.2, 6.2, 0.3, 6.3, 0.1, 5.0 },
                new double[] { 0.1, 7.0, 0.1, 7.0, 0.2, 5.6 },
            };


            // Specify a initial normal distribution
            var density = new NormalDistribution();

            // Create a continuous hidden Markov Model with two states organized in a forward
            //  topology and an underlying univariate Normal distribution as probability density.
            var model = new HiddenMarkovModel <NormalDistribution, double>(new Forward(2), density);

            // Configure the learning algorithms to train the sequence classifier until the
            // difference in the average log-likelihood changes only by as little as 0.0001
            var teacher = new ViterbiLearning <NormalDistribution, double>(model)
            {
                Tolerance  = 0.0001,
                Iterations = 0,
            };

            // Fit the model
            teacher.Learn(sequences);

            // See the probability of the sequences learned
            double a1 = model.LogLikelihood(new[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }); // log(0.40)
            double a2 = model.LogLikelihood(new[] { 0.2, 6.2, 0.3, 6.3, 0.1, 5.0 }); // log(0.46)

            // See the probability of an unrelated sequence
            double a3 = model.LogLikelihood(new[] { 1.1, 2.2, 1.3, 3.2, 4.2, 1.0 }); // log(1.42)
            #endregion

            a1 = Math.Exp(a1);
            a2 = Math.Exp(a2);
            a3 = Math.Exp(a3);

            Assert.AreEqual(0.4048936808991913, a1, 1e-10);
            Assert.AreEqual(0.4656014344844673, a2, 1e-10);
            Assert.AreEqual(1.4232710878429383E-48, a3, 1e-10);

            Assert.AreEqual(2, model.Emissions.Length);
            var state1 = (model.Emissions[0] as NormalDistribution);
            var state2 = (model.Emissions[1] as NormalDistribution);
            Assert.AreEqual(0.16666666666666, state1.Mean, 1e-10);
            Assert.AreEqual(6.11111111111111, state2.Mean, 1e-10);
            Assert.IsFalse(Double.IsNaN(state1.Mean));
            Assert.IsFalse(Double.IsNaN(state2.Mean));

            Assert.AreEqual(0.007499999999999, state1.Variance, 1e-10);
            Assert.AreEqual(0.538611111111111, state2.Variance, 1e-10);
            Assert.IsFalse(Double.IsNaN(state1.Variance));
            Assert.IsFalse(Double.IsNaN(state2.Variance));

            Assert.AreEqual(2, model.LogTransitions.GetLength(0));
            Assert.AreEqual(2, model.LogTransitions.Columns());

            var A = model.LogTransitions.Exp();
            Assert.AreEqual(0.090, A[0][0], 1e-3);
            Assert.AreEqual(0.909, A[0][1], 1e-3);
            Assert.AreEqual(0.875, A[1][0], 1e-3);
            Assert.AreEqual(0.125, A[1][1], 1e-3);

            Assert.IsFalse(A.HasNaN());
        }