private void ProcessLucasKanade() { lock (mutex) { CvImageWrapper.ConvertImageColor(_curFrame, _grey, ColorConversion.BGR2GRAY); SwapPoints(ref _current_track_points[0], ref _last_track_points[0]); cvCalcOpticalFlowPyrLK(_prev_grey._rawPtr, _grey._rawPtr, _prev_pyramid._rawPtr, _pyramid._rawPtr, _last_track_points, _current_track_points, 1, _pwinsz, 3, _status, null, _criteria, _flowflags); if (!state.Equals(AHMTrackingState.NoFeature) && _status[0] == 0) { SetState(AHMTrackingState.NoFeature); } LimitTPDelta(ref _current_track_points[0], _last_track_points[0]); //CvPoint2D32f p = _current_track_points[0]; SwapImages(ref _grey, ref _prev_grey); SwapImages(ref _pyramid, ref _prev_pyramid); if (!state.Equals(AHMTrackingState.NoFeature)) { imagePoint.X = _current_track_points[0].x; imagePoint.Y = _current_track_points[0].y; } } }
private void ProcessLucasKanade() { lock (mutex) { CvImageWrapper.ConvertImageColor(_curFrame, _grey, ColorConversion.BGR2GRAY); for (int i = 0; i < NumberOfTrackingPoints; ++i) { SwapPoints(ref _current_track_points[i], ref _last_track_points[i]); } cvCalcOpticalFlowPyrLK(_prev_grey._rawPtr, _grey._rawPtr, _prev_pyramid._rawPtr, _pyramid._rawPtr, _last_track_points, _current_track_points, NumberOfTrackingPoints, _pwinsz, 3, _status, null, _criteria, _flowflags); if (!state.Equals(AHMTrackingState.NoFeature)) { for (int i = 0; i < NumberOfTrackingPoints; ++i) { if (_status[i] == 0) { SetState(AHMTrackingState.NoFeature); } } } for (int i = 0; i < NumberOfTrackingPoints; i++) { LimitTPDelta(ref _current_track_points[i], _last_track_points[i]); } //CvPoint2D32f p = _current_track_points[0]; SwapImages(ref _grey, ref _prev_grey); SwapImages(ref _pyramid, ref _prev_pyramid); if (!state.Equals(AHMTrackingState.NoFeature)) { imagePoint.X = _current_track_points[0].x; imagePoint.Y = _current_track_points[0].y; eyeImagePoints[0].X = _current_track_points[LeftEyePointIndex].x; eyeImagePoints[0].Y = _current_track_points[LeftEyePointIndex].y; eyeImagePoints[1].X = _current_track_points[RightEyePointIndex].x; eyeImagePoints[1].Y = _current_track_points[RightEyePointIndex].y; } } }
public override void Process(Bitmap[] frames) { extraTrackingInfo = null; Bitmap frame = frames[0]; if (frame == null) { throw new Exception("Frame is null!"); } if (frame.Width != imageSize.Width || frame.Height != imageSize.Height) { throw new Exception("Invalid frame sizes"); } _curFrame.setImage(frame); CvImageWrapper.ConvertImageColor(_curFrame, _grey, ColorConversion.BGR2GRAY); if (!validTrackPoints) { eyeLocator.AddImage(frame); if (eyeLocator.TrackingPointsFound) { _current_track_points[MousePointIndex] = eyeLocator.MouseTrackingPoint; _current_track_points[LeftEyePointIndex] = eyeLocator.LeftEyeTrackingPoint; _current_track_points[RightEyePointIndex] = eyeLocator.RightEyeTrackingPoint; leftEyeOffset.x = eyeLocator.LeftEyePoint.x - eyeLocator.LeftEyeTrackingPoint.x; leftEyeOffset.y = eyeLocator.LeftEyePoint.y - eyeLocator.LeftEyeTrackingPoint.y; rightEyeOffset.x = eyeLocator.RightEyePoint.x - eyeLocator.RightEyeTrackingPoint.x; rightEyeOffset.y = eyeLocator.RightEyePoint.y - eyeLocator.RightEyeTrackingPoint.y; validTrackPoints = true; } else { trackingSuiteAdapter.SendMessage(InitMessage); } } for (int i = 0; i < NumberOfTrackingPoints; ++i) { SwapPoints(ref _current_track_points[i], ref _last_track_points[i]); } cvCalcOpticalFlowPyrLK(_prev_grey._rawPtr, _grey._rawPtr, _prev_pyramid._rawPtr, _pyramid._rawPtr, _last_track_points, _current_track_points, NumberOfTrackingPoints, _pwinsz, 3, _status, null, _criteria, _flowflags); if (validTrackPoints) { for (int i = 0; i < NumberOfTrackingPoints; ++i) { if (_status[i] == 0) { validTrackPoints = false; trackingSuiteAdapter.ToggleSetup(true); eyeLocator.Reset(); break; } } } for (int i = 0; i < NumberOfTrackingPoints; ++i) { LimitTPDelta(ref _current_track_points[i], _last_track_points[i]); } SwapImages(ref _grey, ref _prev_grey); SwapImages(ref _pyramid, ref _prev_pyramid); if (validTrackPoints) { extraTrackingInfo = new BlinkLinkClickControlModule.BlinkLinkCMSExtraTrackingInfo( new CvPoint2D32f(_current_track_points[LeftEyePointIndex].x + leftEyeOffset.x, _current_track_points[LeftEyePointIndex].y + leftEyeOffset.y), new CvPoint2D32f(_current_track_points[RightEyePointIndex].x + rightEyeOffset.x, _current_track_points[RightEyePointIndex].y + rightEyeOffset.y)); imagePoint.X = _current_track_points[MousePointIndex].x; imagePoint.Y = _current_track_points[MousePointIndex].y; eyeImagePoints[0].X = _current_track_points[LeftEyePointIndex].x; eyeImagePoints[0].Y = _current_track_points[LeftEyePointIndex].y; eyeImagePoints[1].X = _current_track_points[RightEyePointIndex].x; eyeImagePoints[1].Y = _current_track_points[RightEyePointIndex].y; } }