/// <summary> /// Update the state by measurement m at dt time from last measurement. /// </summary> /// <param name="mx">The next position.</param> /// <param name="mv">The next velocity.</param> /// <param name="dt">The time interval.</param> /// <returns></returns> public float Update(float mx, float mv, float dt) { // Predict to now, then update. // Predict: // X = F*X + H*U // P = F*P*F^T + Q. // Update: // Y = M – H*X Called the innovation = measurement – state transformed by H. // S = H*P*H^T + R S= Residual covariance = covariane transformed by H + R // K = P * H^T *S^-1 K = Kalman gain = variance / residual covariance. // X = X + K*Y Update with gain the new measurement // P = (I – K * H) * P Update covariance to this time. // // Same as 1D but mv is used instead of delta m_x[0], and H = [1,1]. // X = F*X + H*U FxMatrixF f = new FxMatrixF(2, 2) { Data = new float[] { 1, dt, 0, 1 } }; // U = {0,0}; m_x = f.Multiply(m_x) as FxMatrixF; // P = F*P*F^T + Q m_p = f.MultiplyABAT(m_p) as FxMatrixF; m_p.Add(m_q); // Y = M – H*X FxMatrixF y = new FxMatrixF(1, 2) { Data = new float[] { mx - m_x[0], mv - m_x[1] } }; // S = H*P*H^T + R FxMatrixF s = m_p.Copy(); s[0] += m_r; s[3] += m_r * 0.1f; // K = P * H^T *S^-1 FxMatrixF sinv = s.Inverse() as FxMatrixF; FxMatrixF k = new FxMatrixF(2, 2, 0f); // inited to zero. if (sinv as Object != null) { k = m_p.Multiply(sinv) as FxMatrixF; } // X = X + K*Y m_x.Add(k.Multiply(y)); // P = (I – K * H) * P FxMatrixF id = new FxMatrixF(2, 2) { Data = new float[] { 1, 0, 0, 1 } }; id.Subtract(k); id.Multiply(m_p); m_p = id; // return latest estimate return m_x[0]; }
private void toolStripButton1_Click(object sender, EventArgs e) { // create a stopwatch for FPS calculation this.stopwatch = new Stopwatch(); // for Alpha, one sensor is supported kinectSensor = KinectSensor.GetDefault(); if (kinectSensor != null) { // open sensor kinectSensor.Open(); var frameDescription = kinectSensor.DepthFrameSource.FrameDescription; // get the coordinate mapper coordinateMapper = kinectSensor.CoordinateMapper; // create a new particle depth infos #region Point Cloud #if false List<FxVector3f> Points = new List<FxVector3f>(); List<FxVector3f> Colors = new List<FxVector3f>(); for (int x = 0; x < 512; x += 2) { for (int y = 0; y < 420; y += 2) { for (int z = 0; z < 1; z++) { FxVector3f p; p.x = x * 0.1f; p.z = y * 0.1f; p.y = (float)Math.Log(p.x * p.x * p.x + p.z * p.z * p.z - 3 * p.x - 3 * p.z); Points.Add(p); Colors.Add(rand.NextFxVector3f()); } } } PointCloud pc = new PointCloud(Points, Colors); /// add the mesh to the engine mesh list Engine.g_MeshManager.AddMesh(pc); #endif #endregion // open the reader for the depth frames reader = kinectSensor.OpenMultiSourceFrameReader(FrameSourceTypes.Depth | FrameSourceTypes.Color); // allocate space to put the pixels being received and converted frameData = new ushort[frameDescription.Width * frameDescription.Height]; depthImageMatrix = new FxMatrixF(frameDescription.Width, frameDescription.Height); depthImageMatrixAve = depthImageMatrix.Copy(); colorPoints = new ColorSpacePoint[frameDescription.Width * frameDescription.Height]; reader.MultiSourceFrameArrived += reader_MultiSourceFrameArrived; } else MessageBox.Show("Error: failed to open kinect sensor"); }
public FxBlobTracker(FxMatrixF firstFrame) { m = firstFrame.Copy(); s = new FxMatrixF(firstFrame.Width, firstFrame.Height,0.05f); G = m != -1; /* force a mask with all 1 */ a = 0.005f; G_small = new FxMatrixMask(G_small_width, G_small_height); step_w = (int)Math.Ceiling(G.Width / (float)G_small_width); step_h = (int)Math.Ceiling(G.Height / (float)G_small_height); cG_thd = step_w * step_h / 2; ListBlobs = new List<FxBlob>(); numProcessingFrames = 0; }