Exemple #1
0
        public void TestKalman()
        {
            double[] measurements = new double[] { 5, 6, 7, 9, 10 };
            double[] motion       = new double[] { 1, 1, 2, 1, 1 };

            double mu    = 0;
            double sigma = 1000;

            double measureSigma = 4.0;
            double motionSig    = 2.0;

            double[] expectedUpdateMu     = new double[] { 4.98, 5.992, 6.996, 8.998, 9.999 };
            double[] expectedPredictMu    = new double[] { 5.98, 6.992, 8.996, 9.998, 10.999 };
            double[] expectedUpdateSigma  = new double[] { 3.984, 2.397, 2.095, 2.023, 2.005 };
            double[] expectedPredictSigma = new double[] { 5.984, 4.397, 4.095, 4.023, 4.006 };

            KalmanFilter k = new KalmanFilter(new Gaussian(mu, sigma));

            for (int i = 0; i < measurements.Length; i++)
            {
                k.Update(measurements[i], measureSigma);
                Assert.AreEqual(expectedUpdateMu[i], k.Mu, 0.001);
                Assert.AreEqual(expectedUpdateSigma[i], k.Sigma, 0.001);

                k.Predict(motion[i], motionSig);
                Assert.AreEqual(expectedPredictMu[i], k.Mu, 0.001);
                Assert.AreEqual(expectedPredictSigma[i], k.Sigma, 0.001);
            }
        }
        public PointF[] filterPoints(PointF pt)
        {
            syntheticData.state[0, 0] = pt.X;
            syntheticData.state[1, 0] = pt.Y;

            Mat prediction = kal.Predict();


            float[] data = GetMatData(prediction);


            PointF predictPoint = new PointF(data[0], data[1]);
            PointF measurePoint = new PointF(syntheticData.GetMeasurement()[0, 0],
                                             syntheticData.GetMeasurement()[1, 0]);

            Mat estimated = kal.Correct(syntheticData.GetMeasurement().Mat);

            data = GetMatData(estimated);
            PointF estimatedPoint = new PointF(data[0], data[1]);

            syntheticData.GoToNextState();
            PointF[] results = new PointF[2];
            results[0] = predictPoint;
            results[1] = estimatedPoint;
            px         = predictPoint.X;
            py         = predictPoint.Y;
            cx         = estimatedPoint.X;
            cy         = estimatedPoint.Y;
            return(results);
        }
Exemple #3
0
        public override double Update(double newValue)
        {
            // Initialise the KF with the first data value we get.
            if (Window.Length == 0)
            {
                KF.State[0, 0] = newValue;
            }

            // Prediction step.
            KF.Predict();

            // Vol calculation, and update if ready.
            Window.Insert(newValue);
            ad[0, 0] = Multiplier * Window.AD();
            if (Window.Full)
            {
                KF.UpdateObservationCovariance(ad);
            }

            // Update step.
            z[0, 0] = newValue;
            KF.Correct(z);

            return(KF.State[0, 0]);
        }
Exemple #4
0
        /// <summary>
        /// Perform a single timestep of the simulation/measurement to remove most of the noise from position/velocity measurements.
        /// </summary>
        /// <param name="x">the measured (noisy) position</param>
        /// <param name="dt1">the current timestamp</param>
        /// <param name="x_better">a better position estimate, taking into account the last few readings and control inputs</param>
        /// <param name="v">a really smooth but not laggy velocity estimate, taking into account the last few readings and control inputs</param>
        /// <param name="a">best guess for the current acceleration that will be applied if we don't move the mouse</param>
        public void KalmanStep(double x, out double x_better, out double v, out double a)
        {
            // Given the last tick's input, project forward and see where we end up
            k.Predict(accelerationInput);

            // add the current (noisy) measurement of our position to the state
            positionMeasurement.SetIdentity(x);
            k.Correct(positionMeasurement);

            // retrieve the best-guess of our current position and velocity, which take into account the last few measurements
            x_better = k.StatePost.At <float>(0, 0);
            v        = k.StatePost.At <float>(0, 1);
            a        = k.StatePost.At <float>(0, 2);
        }
    /// <summary>
    /// Update data estimation with new measurement
    /// </summary>
    public void Update(Vector3?measurePosition, Vector3?measureVelocity)
    {
        // Predict phase Kalman Filter
        _kalmanFilter.Predict(_transitionMat, _processNoiseCovMat);

        var newPosition = measurePosition ?? GetPositionFromFilter();
        var newVelocity = measureVelocity ?? GetVelocityFromFilter();

        var measure = new Matrix(new double[, ] {
            { newPosition.x,
              newPosition.y,
              newPosition.z,
              newVelocity.x,
              newVelocity.y,
              newVelocity.z }
        }, false);

        // Update Kalman Filter with new measures
        _kalmanFilter.Update(measure, _measurementMat, _measurementNoiseCovMat);
    }
        private void timer_Tick(object sender, EventArgs e)
        {
            float accelNoise       = (float)numProcessNoise.Value;
            float measurementNoise = (float)numMeasurementNoise.Value;

            /**************************************** get data *******************************************/
            var processPosition = process.GetNoisyState(accelNoise).Position;

            bool measurementExist;
            var  measurement = process.TryGetNoisyMeasurement(measurementNoise, out measurementExist);

            kalman.Predict();
            if (measurementExist)
            {
                kalman.Correct(measurement);
            }
            var correctedPosition = kalman.State.Position;

            /**************************************** get data *******************************************/

            //plot process state (what we do not know)
            plotData(processPosition, 0);
            //try plot measuremnt (what we see)
            if (measurementExist)
            {
                plotData(measurement, 1);
            }
            //plot corrected state (Kalman)
            plotData(correctedPosition, 2, new DataPoint {
                BorderWidth = 5
            });

            bool doneFullCycle;

            process.GoToNextState(out doneFullCycle);

            if (doneFullCycle)
            {
                clearChart();
            }
        }
        private void trackOneStep(Bgr <byte>[,] frame, out Gray <byte>[,] probabilityMap, out Box2D foundBox)
        {
            const float SEARCH_AREA_INFLATE_FACTOR = 0.05f;

            /**************************** KALMAN predict **************************/
            kalman.Predict();
            searchArea = createRect(kalman.State.Position, searchArea.Size, frame.Size());
            /**************************** KALMAN predict **************************/

            trackCamshift(frame, searchArea, out probabilityMap, out foundBox);

            if (!foundBox.IsEmpty)
            {
                /**************************** KALMAN correct **************************/
                kalman.Correct(new PointF(foundBox.Center.X, foundBox.Center.Y)); //correct predicted state by measurement
                /**************************** KALMAN correct **************************/

                var foundArea = Rectangle.Round(foundBox.GetMinArea());
                searchArea      = foundArea.Inflate(SEARCH_AREA_INFLATE_FACTOR, SEARCH_AREA_INFLATE_FACTOR, frame.Size()); //inflate found area for search (X factor)...
                nonVisibleCount = 0;
            }
            else
            {
                nonVisibleCount++;
                if (nonVisibleCount == 1)                                                                                                //for the first time
                {
                    searchArea = searchArea.Inflate(-SEARCH_AREA_INFLATE_FACTOR * 1.5, -SEARCH_AREA_INFLATE_FACTOR * 1.5, frame.Size()); //shrink (hysteresis)
                }

                searchArea = createRect(kalman.State.Position, searchArea.Size, frame.Size());
            }

            if (nonVisibleCount > 100) //if not visible for a longer time => reset tracking
            {
                nonVisibleCount = 0;
                isROISelected   = false;
            }
        }
Exemple #8
0
        /// <summary>
        /// kalman 初期化ルーチン
        /// </summary>
        /// <param name="elem">読み出した要素</param>
        public void kalman_update()
        {
            if (kalman_id == 0)
            {
                kalman_init();
                // 初期値設定
                double errcov = 1.0; //仮
                kv_kalman.StatePost.Set(0, (float)az2_c);
                kv_kalman.StatePost.Set(1, (float)alt2_c);
                Cv2.SetIdentity(kv_kalman.ErrorCovPost, new Scalar(errcov));
            }
            kalman_id++;

            // 観測値(kalman)
            //float[] m = { (float)(az2_c), (float)(alt2_c) };
            //measurement = Cv.Mat(2, 1, MatrixType.F32C1, m);
            measurement.Set(0, 0, (float)az2_c);
            measurement.Set(1, 0, (float)alt2_c);

            // 観測誤差評価 FWHM=2.35σ
            double fwhm_az   = 0.005 * vaz2_kv / 2.0;
            double fwhm_alt  = 0.005 * valt2_kv / 2.0;
            double sigma_az  = (fwhm_az / 2.35);
            double sigma_alt = (fwhm_alt / 2.35);

            kv_kalman.MeasurementNoiseCov.Set(0, 0, sigma_az * sigma_az);
            kv_kalman.MeasurementNoiseCov.Set(1, 1, sigma_alt * sigma_alt);
            //Cv.SetIdentity(kv_kalman.MeasurementNoiseCov, Cv.RealScalar(0.001));

            // 修正フェーズ(kalman)
            correction = kv_kalman.Correct(measurement); // correction = cv.KalmanCorrect(kv_kalman, measurement);
            // 予測フェーズ(kalman)
            prediction = kv_kalman.Predict();            // Cv.KalmanPredict(kv_kalman);
            kvaz       = prediction.At <float>(0);       // .DataArraySingle[0]; //ans           byte b = mat.At<byte>(y, x);
            kvalt      = prediction.At <float>(1);       // .DataArraySingle[1]; //ans
            //kvx = prediction.At<double>(2);// .DataArraySingle[2];
            //kvy = prediction.At<double>(3);// .DataArraySingle[3];
        }
        public static void Kalman()
        {
            // https://www.youtube.com/watch?v=FkCT_LV9Syk -- what is a Kalman filter
            // http://dsp.stackexchange.com/a/8869/25966 -- example of how good they can be
            // https://en.wikipedia.org/wiki/Kalman_filter -- technical description
            // http://www.morethantechnical.com/2011/06/17/simple-kalman-filter-for-tracking-using-opencv-2-2-w-code/ -- example implementation for a kinematic system
            // http://dsp.stackexchange.com/questions/3292/estimating-velocity-from-known-position-and-acceleration
            // https://www.researchgate.net/post/What_are_the_most_efficient_methods_for_tuning_Kalman_Filter_process_noise_covariance_matrix_Q
            // A kalman filter with three state variables (position, velocity, acceleration); one measurement (position); and one control input (acceleration input)
            KalmanFilter k         = new KalmanFilter(dynamParams: 3, measureParams: 1, controlParams: 1);
            float        timedelta = 0.03f;

            // this matrix is used to update our state estimate at each step
            k.TransitionMatrix.SetArray(row: 0, col: 0, data: new float[, ] {
                { 1f, timedelta, timedelta * timedelta / 2 }, // position = old position + v Δt + a Δt^2 /2
                { 0, 0.98f, timedelta },                      // velocity = old velocity + a Δt, with slight damping in FA off (MUCH MORE IN FA-ON)
                { 0, 0, 0.8f }
            });                                               // acceleration = old acceleration * 0.8 (natural decay due to relative mouse)

            // this matrix indicates how much each control parameter affects each state variable
            // 0.01*mouseinput = Δa at 0 or full throttle; 0.024*mouseinput = Δa at 50% throttle.
            k.ControlMatrix.SetArray(row: 0, col: 0, data: new float[, ] {
                { 0 }, { 0 }, { 40f }
            });

            // No idea what these are, messed around with values until they seemed good
            k.MeasurementMatrix.SetIdentity();
            k.ProcessNoiseCov.SetArray(row: 0, col: 0, data: new float[, ] {
                { 1f, 0, 0 },                               // position prediction is pretty rough (avg. 1 px error)
                { 0, (float)(Math.Pow(timedelta, -2)), 0 }, // velocity prediction is okay (avg. 1 px/(0.03s) error)
                { 0, 0, (float)(Math.Pow(timedelta, -3)) }
            });                                             // acceleration prediction is good (avg. 1 px/(0.03s)^2 error)
            k.MeasurementNoiseCov.SetIdentity(1);           // stddev^2 of measurement gaussian distribution. increase this for slower and smoother response
            k.ErrorCovPost.SetIdentity(1);                  // large values (100) make initial transients huge
            k.ErrorCovPre.SetTo(1);                         // large values make initial transients huge

            // get data {position measurement, accel control input}
            List <Tuple <double, double> > points = new List <Tuple <double, double> >();

            using (var file = new System.IO.StreamReader(@"C:\users\public\trajectory.txt"))
            {
                while (true)
                {
                    var line = file.ReadLine();
                    if (line == null)
                    {
                        break;
                    }
                    string[] parts = line.Split(new string[] { ", " }, StringSplitOptions.RemoveEmptyEntries);
                    if (parts.Count() < 4)
                    {
                        break;
                    }
                    float measure = float.Parse(parts[0]);
                    float control = float.Parse(parts[3]);
                    points.Add(new Tuple <double, double>(measure, control));
                }
            }

            Mat matMeasure = new Mat(new OpenCvSharp.Size(1, 1), MatType.CV_32F);
            Mat matControl = new Mat(new OpenCvSharp.Size(1, 1), MatType.CV_32F);

            Mat graph = new Mat(new OpenCvSharp.Size(1500, 500), MatType.CV_8UC3);

            graph.SetTo(new Scalar(0, 0, 0));

            List <PlotState> plots = new List <PlotState>();

            plots.Add(new PlotState {
                color = new Scalar(255, 0, 0)
            });                                                         // measured position -- BLUE
            plots.Add(new PlotState {
                color = new Scalar(255, 255, 0)
            });                                                           // predicted position -- BLUEGREEN
            plots.Add(new PlotState {
                color = new Scalar(255, 0, 255)
            });                                                           // control signal -- PURPLE
            plots.Add(new PlotState {
                color = new Scalar(0, 0, 255)
            });                                                         // predicted velocity -- RED
            plots.Add(new PlotState {
                color = new Scalar(0, 255, 255)
            });                                                           // predicted acceleration -- YELLOW
            plots.Add(new PlotState {
                color = new Scalar(255, 255, 255)
            });                                                             // gain (prediction accuracy)

            k.StatePre.SetArray(row: 0, col: 0, data: new float[] { 0, 0, 0 });
            graph.Line(0, 250, 5000, 250, new Scalar(255, 255, 255));

            for (int i = 10; i < points.Count && i < 1500; i++)
            {
                float control = (float)points[i].Item2;
                matControl.Set(0, 0, control);
                k.Predict(matControl);
                float position_predicted     = k.StatePost.At <float>(0, 0);
                float velocity_predicted     = k.StatePost.At <float>(0, 1);
                float acceleration_predicted = k.StatePost.At <float>(0, 2);
                float position_measured      = (float)points[i].Item1;

                if (true || (i / 50) % 2 == 0)
                {
                    matMeasure.Set(0, 0, position_measured);
                    k.Correct(matMeasure);
                }
                int xratio  = 3;
                int xoffset = 0 * xratio;
                foreach (var plot in plots)
                {
                    graph.Line(i * xratio + xoffset, 2 * plot.prevValue + 250, (i + 1) * xratio + xoffset, 2 * plot.currentValue + 250, plot.color);
                    plot.prevValue = plot.currentValue;
                }
                float measuredVelocity = (int)((position_measured - plots[0].prevValue) / timedelta);
                plots[0].currentValue = (int)(position_measured);
                plots[1].currentValue = (int)position_predicted;
                plots[2].currentValue = (int)control;
                plots[3].currentValue = (int)(velocity_predicted / 3);
                plots[4].currentValue = (int)(acceleration_predicted * .1f);
                plots[5].currentValue = (int)(measuredVelocity / 3);
                // this should go to 0
                //plots[5].currentValue = (int)(10*k.ErrorCovPost.Norm()); // typical 0.03 -> 30
                if (i == 100)
                {
                    for (int row = 0; row < 3; row++)
                    {
                        // if everything is right, these will go to 0
                        for (int col = 0; col < 3; col++)
                        {
                            Console.Write(k.ErrorCovPost.At <float>(row, col).ToString() + " ");
                        }
                        Console.WriteLine();
                    }
                }
            }
            Window w1 = new Window(graph);
        }
Exemple #10
0
        public void Update(KinectSensor sensor, Skeleton skeleton, System.Windows.Point skel2DCenter)
        {
            var left_foot  = skeleton.Joints[JointType.AnkleLeft];
            var right_foot = skeleton.Joints[JointType.AnkleRight];

            if (left_foot.TrackingState != JointTrackingState.Tracked ||
                right_foot.TrackingState != JointTrackingState.Tracked)
            {
                Reset();
                return;
            }

            // Update filter
            var   left_foot_pos  = left_foot.Position;
            var   right_foot_pos = right_foot.Position;
            float pos_feet_x     = (left_foot_pos.X + right_foot_pos.X) / 2.0f;
            float pos_feet_y     = (left_foot_pos.Y + right_foot_pos.Y) / 2.0f;

            syntheticData.state[0, 0] = pos_feet_x;
            syntheticData.state[1, 0] = pos_feet_y;
            // Prediction from Kalman for this timestep.
            float[] pred = new float[4];
            kal.Predict();
            kal.StatePre.CopyTo(pred);
            PointF predictedPositionPoint = new PointF(pred[0], pred[1]);
            // Update kalman state with a noice induced measurement.
            Matrix <float> measurement  = syntheticData.GetMeasurement();
            PointF         measurePoint = new PointF(measurement[0, 0], measurement[1, 0]);

            kal.Correct(measurement.Mat);
            // Get an adjusted internal state measurement.
            float[] estimated = new float[4];
            kal.StatePost.CopyTo(estimated);
            PointF estimatedPositionPoint = new PointF(estimated[0], estimated[1]);

            //
            syntheticData.GoToNextState();

            // Note: This is the data passed into the segment recognizer
            float[][] kal_results = new float[2][]
            {
                pred,
                estimated
            };

            GesturePartResult result = segments[currentSegmentIdx].Update(sensor, skeleton, kal_results);

            if (result == GesturePartResult.Succeeded)
            {
                if (currentSegmentIdx + 1 < segments.Length)
                {
                    currentSegmentIdx++;
                    frameCount = 0;
                }
                else
                {
                    if (OnRecognized != null)
                    {
                        OnRecognized(this, new GestureEventArgs(skeleton.TrackingId, skel2DCenter));
                        Reset();
                    }
                }
            }
            else if (result == GesturePartResult.Failed || frameCount == WINDOW_SIZE)
            {
                Reset();
            }
            else
            {
                frameCount++;
            }
        }
Exemple #11
0
        public void step(Rectangle[] yuzler)
        {
            prev = now;
            now  = timer.ElapsedMilliseconds;
            dT   = (now - prev) / 400;
            if (found)
            {
                at(kf.TransitionMatrix, 2, (float)dT);
                at(kf.TransitionMatrix, 9, (float)dT);

                state = kf.Predict();

                sonuc.Width  = (int)at(state, 4);
                sonuc.Height = (int)at(state, 5);
                sonuc.X      = (int)at(state, 0) - sonuc.Width / 2;
                sonuc.Y      = (int)at(state, 1) - sonuc.Height / 2;
            }

            if (yuzler.Length == 0)
            {
                notfoundcount++;
                if (notfoundcount >= 25)
                {
                    found = false;
                }
                else
                {
                    for (int i = 0; i < 6; i++)
                    {
                        at(kf.StatePost, i, at(state, i));
                    }
                }
            }
            else
            {
                notfoundcount = 0;

                at(meas, 0, yuzler[0].X + yuzler[0].Width / 2);
                at(meas, 1, yuzler[0].Y + yuzler[0].Height / 2);
                at(meas, 2, (float)yuzler[0].Width);
                at(meas, 3, (float)yuzler[0].Width);

                if (!found)///ilk tanıma olayı
                {
                    at(kf.ErrorCovPre, 0, 1f);
                    at(kf.ErrorCovPre, 7, 1f);
                    at(kf.ErrorCovPre, 14, 1f);
                    at(kf.ErrorCovPre, 21, 1f);
                    at(kf.ErrorCovPre, 28, 1f);
                    at(kf.ErrorCovPre, 35, 1f);

                    at(state, 0, at(meas, 0));
                    at(state, 1, at(meas, 1));
                    at(state, 2, 0);
                    at(state, 3, 0);
                    at(state, 4, at(meas, 2));
                    at(state, 5, at(meas, 3));

                    found = true;
                }
                else
                {
                    kf.Correct(meas);
                }
            }
        }