Пример #1
0
        private void ProcessLucasKanade()
        {
            lock (mutex)
            {
                CvImageWrapper.ConvertImageColor(_curFrame, _grey, ColorConversion.BGR2GRAY);

                SwapPoints(ref _current_track_points[0], ref _last_track_points[0]);

                cvCalcOpticalFlowPyrLK(_prev_grey._rawPtr, _grey._rawPtr, _prev_pyramid._rawPtr,
                                       _pyramid._rawPtr, _last_track_points, _current_track_points, 1, _pwinsz, 3,
                                       _status, null, _criteria, _flowflags);

                if (!state.Equals(AHMTrackingState.NoFeature) && _status[0] == 0)
                {
                    SetState(AHMTrackingState.NoFeature);
                }


                LimitTPDelta(ref _current_track_points[0], _last_track_points[0]);

                //CvPoint2D32f p = _current_track_points[0];


                SwapImages(ref _grey, ref _prev_grey);
                SwapImages(ref _pyramid, ref _prev_pyramid);

                if (!state.Equals(AHMTrackingState.NoFeature))
                {
                    imagePoint.X = _current_track_points[0].x;
                    imagePoint.Y = _current_track_points[0].y;
                }
            }
        }
        public override void Init(Size[] imageSizes)
        {
            Clean();

            _flowflags      |= CMSConstants.CV_LKFLOW_PYR_A_READY;
            validTrackPoints = false;
            _pwinsz          = new CvSize(10, 10);
            _status          = new byte[NumberOfTrackingPoints];
            imageSize.Width  = imageSizes[0].Width;
            imageSize.Height = imageSizes[0].Height;

            _last_track_points    = new CvPoint2D32f[NumberOfTrackingPoints];
            _current_track_points = new CvPoint2D32f[NumberOfTrackingPoints];

            _criteria = new CvTermCriteria(CMSConstants.CV_TERMCRIT_ITER | CMSConstants.CV_TERMCRIT_EPS, 20, 0.03);

            _curFrame = CvImageWrapper.CreateImage(imageSize, PixelDepth, PixelColorChannels);

            _grey = CvImageWrapper.CreateImage(imageSize, PixelDepth, PixelChannels);

            _prev_grey = CvImageWrapper.CreateImage(imageSize, PixelDepth, PixelChannels);

            _pyramid = CvImageWrapper.CreateImage(imageSize, PixelDepth, PixelChannels);

            _prev_pyramid = CvImageWrapper.CreateImage(imageSize, PixelDepth, PixelChannels);

            _swap_temp = CvImageWrapper.CreateImage(imageSize, PixelDepth, PixelChannels);

            eyeLocator = new EyeLocator(EyeLocationImageCount);
            eyeLocator.Reset();

            eyeImagePoints = new PointF[2];

            CMSTrackingSuiteAdapter.SendMessage(InitMessage);
        }
Пример #3
0
        private void CaptureImages(PointF [] imagePoints, Bitmap frame)
        {
            try
            {
                if (curFrame == null)
                {
                    curFrame = new CvImageWrapper(frame);
                }
                else
                {
                    curFrame.setImage(frame);
                }

                for (int i = 0; i < imagePoints.Length; i++)
                {
                    PointF imagePoint = imagePoints[i];

                    CvRect cropDimensions = new CvRect();
                    cropDimensions.x      = (int)imagePoint.X - obsSize / 2;
                    cropDimensions.y      = (int)imagePoint.Y - obsSize / 2;
                    cropDimensions.width  = obsSize;
                    cropDimensions.height = obsSize;

                    CvImageWrapper curObs = curFrame.cropSubImage(cropDimensions);

                    this.templatesList[i].Add(curObs);
                }
            }
            catch (Exception e)
            {
            }
        }
 public override void Clean()
 {
     _flowflags = 0;
     if (this._curFrame != null)
     {
         CvImageWrapper.ReleaseImage(_curFrame);
     }
     if (_grey != null)
     {
         CvImageWrapper.ReleaseImage(_grey);
     }
     if (_prev_grey != null)
     {
         CvImageWrapper.ReleaseImage(_prev_grey);
     }
     if (_pyramid != null)
     {
         CvImageWrapper.ReleaseImage(_pyramid);
     }
     if (_prev_pyramid != null)
     {
         CvImageWrapper.ReleaseImage(_prev_pyramid);
     }
     if (_swap_temp != null)
     {
         CvImageWrapper.ReleaseImage(_swap_temp);
     }
     if (eyeLocator != null)
     {
         eyeLocator.Dispose();
     }
 }
Пример #5
0
        private void InitStandard(int imageWidth, int imageHeight)
        {
            lock (mutex)
            {
                _flowflags |= CMSConstants.CV_LKFLOW_PYR_A_READY;
                //validTrackPoint = false;
                _pwinsz          = new CvSize(10, 10);
                _status          = new byte[1];
                imageSize.Width  = imageWidth;
                imageSize.Height = imageHeight;

                _last_track_points    = new CvPoint2D32f[1];
                _current_track_points = new CvPoint2D32f[1];

                _criteria = new CvTermCriteria(CMSConstants.CV_TERMCRIT_ITER | CMSConstants.CV_TERMCRIT_EPS, 20, 0.03);

                _curFrame = CvImageWrapper.CreateImage(imageSize, PIXEL_DEPTH, PIXEL_COLOR_CHANNELS);

                _grey = CvImageWrapper.CreateImage(imageSize, PIXEL_DEPTH, PIXEL_CHANNELS);

                _prev_grey = CvImageWrapper.CreateImage(imageSize, PIXEL_DEPTH, PIXEL_CHANNELS);

                _pyramid = CvImageWrapper.CreateImage(imageSize, PIXEL_DEPTH, PIXEL_CHANNELS);

                _prev_pyramid = CvImageWrapper.CreateImage(imageSize, PIXEL_DEPTH, PIXEL_CHANNELS);

                _swap_temp = CvImageWrapper.CreateImage(imageSize, PIXEL_DEPTH, PIXEL_CHANNELS);
            }
        }
Пример #6
0
        public override bool Process(PointF [] imagePoints, System.Drawing.Bitmap [] frames)
        {
            lock (mutex)
            {
                if (finished)
                {
                    return(false);
                }

                for (int i = 0; i < imagePoints.Length; i++)
                {
                    curPoints[i].X = imagePoints[i].X;
                    curPoints[i].Y = imagePoints[i].Y;
                }
                if (curFrame == null)
                {
                    curFrame = new CvImageWrapper(frames[0]);
                }
                else
                {
                    curFrame.setImage(frames[0]);
                }
                return(true);
            }
        }
Пример #7
0
 private void CleanStandard()
 {
     lock (mutex)
     {
         _flowflags = 0;
         if (this._curFrame != null)
         {
             CvImageWrapper.ReleaseImage(_curFrame);
         }
         if (_grey != null)
         {
             CvImageWrapper.ReleaseImage(_grey);
         }
         if (_prev_grey != null)
         {
             CvImageWrapper.ReleaseImage(_prev_grey);
         }
         if (_pyramid != null)
         {
             CvImageWrapper.ReleaseImage(_pyramid);
         }
         if (_prev_pyramid != null)
         {
             CvImageWrapper.ReleaseImage(_prev_pyramid);
         }
         if (_swap_temp != null)
         {
             CvImageWrapper.ReleaseImage(_swap_temp);
         }
     }
 }
Пример #8
0
        private void CleanAHM()
        {
            lock (mutex)
            {
                if (_AHMBackFeature != null)
                {
                    CvImageWrapper.ReleaseImage(_AHMBackFeature);
                    _AHMBackFeature = null;
                }

                if (_AHMCurFeature != null)
                {
                    CvImageWrapper.ReleaseImage(_AHMCurFeature);
                    _AHMCurFeature = null;
                }

                if (_AHMRealtimeObs != null)
                {
                    CvImageWrapper.ReleaseImage(_AHMRealtimeObs);
                    _AHMRealtimeObs = null;
                }

                if (cameraMouseAssist != null)
                {
                    cameraMouseAssist.Dispose();
                    cameraMouseAssist = null;
                }

                if (ahmSetup != null)
                {
                    ahmSetup.Clean();
                    ahmSetup = null;
                }
            }
        }
        private void SwapImages(ref CvImageWrapper a, ref CvImageWrapper b)
        {
            CvImageWrapper temp;

            temp = a;
            a    = b;
            b    = temp;
        }
Пример #10
0
        public override void ProcessKeys(Keys keys)
        {
            if (finished)
            {
                return;
            }

            lock (mutex)
            {
                if (keys.Equals(Keys.Shift) || keys.Equals(Keys.ShiftKey) ||
                    keys.Equals(Keys.LShiftKey) || keys.Equals(Keys.RShiftKey))
                {
                    for (int i = 0; i < curPoints.Length; i++)
                    {
                        PointF curPoint = curPoints[i];

                        if (curPoint.X <= obsSize / 2 || curPoint.X >= curFrame.Size.Width - obsSize / 2)
                        {
                            return;
                        }
                        if (curPoint.Y <= obsSize / 2 || curPoint.Y >= curFrame.Size.Height - obsSize / 2)
                        {
                            return;
                        }

                        if (curFrame == null)
                        {
                            return;
                        }


                        CvRect cropDimensions = new CvRect();
                        cropDimensions.x      = (int)curPoint.X - obsSize / 2;
                        cropDimensions.y      = (int)curPoint.Y - obsSize / 2;
                        cropDimensions.width  = obsSize;
                        cropDimensions.height = obsSize;

                        CvImageWrapper curObs = curFrame.cropSubImage(cropDimensions);

                        this.templatesList[i].Add(curObs);
                        numKeyPressed++;
                        if (numKeyPressed == this.numTemplates)
                        {
                            Thread.Sleep(300);
                            finished = true;
                        }
                        else
                        {
                            SendMessage();
                        }
                    }
                }
            }
        }
Пример #11
0
        public override void Clean()
        {
            foreach (List <CvImageWrapper> templates in templatesList)
            {
                templates.Clear();
            }

            if (curFrame != null)
            {
                CvImageWrapper.ReleaseImage(curFrame);
            }
            curFrame = null;
        }
Пример #12
0
        private void ProcessLucasKanade()
        {
            lock (mutex)
            {
                CvImageWrapper.ConvertImageColor(_curFrame, _grey, ColorConversion.BGR2GRAY);

                for (int i = 0; i < NumberOfTrackingPoints; ++i)
                {
                    SwapPoints(ref _current_track_points[i], ref _last_track_points[i]);
                }


                cvCalcOpticalFlowPyrLK(_prev_grey._rawPtr, _grey._rawPtr, _prev_pyramid._rawPtr,
                                       _pyramid._rawPtr, _last_track_points, _current_track_points, NumberOfTrackingPoints, _pwinsz, 3,
                                       _status, null, _criteria, _flowflags);

                if (!state.Equals(AHMTrackingState.NoFeature))
                {
                    for (int i = 0; i < NumberOfTrackingPoints; ++i)
                    {
                        if (_status[i] == 0)
                        {
                            SetState(AHMTrackingState.NoFeature);
                        }
                    }
                }

                for (int i = 0; i < NumberOfTrackingPoints; i++)
                {
                    LimitTPDelta(ref _current_track_points[i], _last_track_points[i]);
                }

                //CvPoint2D32f p = _current_track_points[0];


                SwapImages(ref _grey, ref _prev_grey);
                SwapImages(ref _pyramid, ref _prev_pyramid);

                if (!state.Equals(AHMTrackingState.NoFeature))
                {
                    imagePoint.X = _current_track_points[0].x;
                    imagePoint.Y = _current_track_points[0].y;

                    eyeImagePoints[0].X = _current_track_points[LeftEyePointIndex].x;
                    eyeImagePoints[0].Y = _current_track_points[LeftEyePointIndex].y;

                    eyeImagePoints[1].X = _current_track_points[RightEyePointIndex].x;
                    eyeImagePoints[1].Y = _current_track_points[RightEyePointIndex].y;
                }
            }
        }
Пример #13
0
        private void CleanAHM()
        {
            lock (mutex)
            {
                if (_AHMBackFeature != null)
                {
                    CvImageWrapper.ReleaseImage(_AHMBackFeature);
                    _AHMBackFeature = null;
                }

                if (_AHMCurFeature != null)
                {
                    CvImageWrapper.ReleaseImage(_AHMCurFeature);
                    _AHMCurFeature = null;
                }

                if (_AHMRealtimeObs != null)
                {
                    CvImageWrapper.ReleaseImage(_AHMRealtimeObs);
                    _AHMRealtimeObs = null;
                }

                if (cameraMouseAssists != null)
                {
                    for (int i = 0; i < NumberOfTrackingPoints; i++)
                    {
                        if (cameraMouseAssists[i] != null)
                        {
                            cameraMouseAssists[i].Dispose();
                            cameraMouseAssists[i] = null;
                        }
                    }
                }

                if (ahmSetup != null)
                {
                    ahmSetup.Clean();
                    ahmSetup = null;
                }
            }
        }
Пример #14
0
        public override void Clean()
        {
            lock (mutex)
            {
                if (curFrame != null)
                {
                    //foreach (CvImageWrapper img in templates)
                    //CvImageWrapper.ReleaseImage(img);
                    CvImageWrapper.ReleaseImage(curFrame);
                    curFrame = null;
                }

                foreach (List <CvImageWrapper> templates in templatesList)
                {
                    templates.Clear();
                }

                if (countdownTimer != null)
                {
                    this.countdownTimer.Dispose();
                }
            }
        }
        public override void Process(Bitmap[] frames)
        {
            extraTrackingInfo = null;

            Bitmap frame = frames[0];

            if (frame == null)
            {
                throw new Exception("Frame is null!");
            }

            if (frame.Width != imageSize.Width || frame.Height != imageSize.Height)
            {
                throw new Exception("Invalid frame sizes");
            }


            _curFrame.setImage(frame);

            CvImageWrapper.ConvertImageColor(_curFrame, _grey, ColorConversion.BGR2GRAY);

            if (!validTrackPoints)
            {
                eyeLocator.AddImage(frame);

                if (eyeLocator.TrackingPointsFound)
                {
                    _current_track_points[MousePointIndex]    = eyeLocator.MouseTrackingPoint;
                    _current_track_points[LeftEyePointIndex]  = eyeLocator.LeftEyeTrackingPoint;
                    _current_track_points[RightEyePointIndex] = eyeLocator.RightEyeTrackingPoint;

                    leftEyeOffset.x = eyeLocator.LeftEyePoint.x - eyeLocator.LeftEyeTrackingPoint.x;
                    leftEyeOffset.y = eyeLocator.LeftEyePoint.y - eyeLocator.LeftEyeTrackingPoint.y;

                    rightEyeOffset.x = eyeLocator.RightEyePoint.x - eyeLocator.RightEyeTrackingPoint.x;
                    rightEyeOffset.y = eyeLocator.RightEyePoint.y - eyeLocator.RightEyeTrackingPoint.y;

                    validTrackPoints = true;
                }
                else
                {
                    trackingSuiteAdapter.SendMessage(InitMessage);
                }
            }

            for (int i = 0; i < NumberOfTrackingPoints; ++i)
            {
                SwapPoints(ref _current_track_points[i], ref _last_track_points[i]);
            }

            cvCalcOpticalFlowPyrLK(_prev_grey._rawPtr, _grey._rawPtr, _prev_pyramid._rawPtr,
                                   _pyramid._rawPtr, _last_track_points, _current_track_points, NumberOfTrackingPoints, _pwinsz, 3,
                                   _status, null, _criteria, _flowflags);

            if (validTrackPoints)
            {
                for (int i = 0; i < NumberOfTrackingPoints; ++i)
                {
                    if (_status[i] == 0)
                    {
                        validTrackPoints = false;
                        trackingSuiteAdapter.ToggleSetup(true);
                        eyeLocator.Reset();
                        break;
                    }
                }
            }

            for (int i = 0; i < NumberOfTrackingPoints; ++i)
            {
                LimitTPDelta(ref _current_track_points[i], _last_track_points[i]);
            }


            SwapImages(ref _grey, ref _prev_grey);
            SwapImages(ref _pyramid, ref _prev_pyramid);

            if (validTrackPoints)
            {
                extraTrackingInfo = new BlinkLinkClickControlModule.BlinkLinkCMSExtraTrackingInfo(
                    new CvPoint2D32f(_current_track_points[LeftEyePointIndex].x + leftEyeOffset.x,
                                     _current_track_points[LeftEyePointIndex].y + leftEyeOffset.y),
                    new CvPoint2D32f(_current_track_points[RightEyePointIndex].x + rightEyeOffset.x,
                                     _current_track_points[RightEyePointIndex].y + rightEyeOffset.y));

                imagePoint.X = _current_track_points[MousePointIndex].x;
                imagePoint.Y = _current_track_points[MousePointIndex].y;

                eyeImagePoints[0].X = _current_track_points[LeftEyePointIndex].x;
                eyeImagePoints[0].Y = _current_track_points[LeftEyePointIndex].y;

                eyeImagePoints[1].X = _current_track_points[RightEyePointIndex].x;
                eyeImagePoints[1].Y = _current_track_points[RightEyePointIndex].y;
            }
        }
Пример #16
0
        private void ProcessAHM()
        {
            lock (mutex)
            {
                if (cameraMouseAssist == null)
                {
                    return;
                }

                double ccMinProjSqdDist = 0.0;
                double ccTanSqdDist     = 0.0;

                try
                {
                    if (cameraMouseAssist != null && cameraMouseAssist.isReady())
                    {
                        if (_AHMWeights == null || _AHMWeights.Length != this.numTemplates)
                        {
                            _AHMWeights = new double[numTemplates];
                        }

                        float relX = 0.0f;
                        float relY = 0.0f;

                        if ((imagePoint.X >= obsSize / 2 + 10) && (imagePoint.Y >= obsSize / 2 + 10) &&
                            ((imagePoint.X + imagePoint.Y / 2 + 10) < imageSize.Width) &&
                            ((imagePoint.Y + obsSize / 2 + 10) < imageSize.Height))
                        {
                            if (this._AHMRealtimeObs == null)
                            {
                                this._AHMRealtimeObs = CvImageWrapper.CreateImage(new CvSize(obsSize + 20, obsSize + 20), 8, 3);
                            }
                            if (this._AHMBackFeature == null)
                            {
                                this._AHMBackFeature = CvImageWrapper.CreateImage(new CvSize(obsSize, obsSize), 8, 3);
                            }
                            if (this._AHMCurFeature == null)
                            {
                                this._AHMCurFeature = CvImageWrapper.CreateImage(new CvSize(obsSize, obsSize), 8, 3);
                            }

                            _AHMRect.x      = (int)imagePoint.X - obsSize / 2 - 10;
                            _AHMRect.y      = (int)imagePoint.Y - obsSize / 2 - 10;;
                            _AHMRect.height = obsSize + 20;
                            _AHMRect.width  = obsSize + 20;

                            _curFrame.cropSubImage(_AHMRect, this._AHMRealtimeObs);

                            unsafe
                            {
                                int dx = 0, dy = 0;

                                IntPtr pMinProjSqdDist = new IntPtr(&ccMinProjSqdDist);
                                IntPtr pTanSqdDist     = new IntPtr(&ccTanSqdDist);
                                IntPtr pDx             = new IntPtr(&dx);
                                IntPtr pDy             = new IntPtr(&dy);

                                long newTicks = Environment.TickCount;

                                if (newTicks - ticks > updateFrequency || updateFrequency == 0)
                                {
                                    cameraMouseAssist.computeRelativePos(pMinProjSqdDist, pTanSqdDist, this._AHMRealtimeObs._rawPtr, pDx, pDy, _AHMWeights);

                                    if (CMSLogger.CanCreateLogEvent(true, false, false, "AHMLogRealtimeEvent"))
                                    {
                                        AHMLogRealtimeEvent lEvent = new AHMLogRealtimeEvent();
                                        if (lEvent != null)
                                        {
                                            lEvent.ProjSqrdDist = (float)ccMinProjSqdDist;
                                            lEvent.TanSqrdDist  = (float)ccTanSqdDist;
                                            CMSLogger.SendLogEvent(lEvent);
                                        }
                                    }

                                    imagePoint.X = imagePoint.X + dx;
                                    imagePoint.Y = imagePoint.Y + dy;
                                    _current_track_points[0].x = imagePoint.X;
                                    _current_track_points[0].y = imagePoint.Y;

                                    ticks = newTicks;

                                    AHMLogRealtimeFeatureImagesEvent realtimeEvent = null;
                                    if (CMSLogger.CanCreateLogEvent(true, true, false, "AHMLogRealtimeFeatureImagesEvent"))
                                    {
                                        realtimeEvent = new AHMLogRealtimeFeatureImagesEvent();
                                    }
                                    if (this.extraDisplay || realtimeEvent != null)
                                    {
                                        prevExtraDisplay = true;
                                        cameraMouseAssist.retrieveBackFeature(this._AHMBackFeature._rawPtr);
                                        cameraMouseAssist.retrieveCurFeature(this._AHMCurFeature._rawPtr);

                                        extraImages[0] = _AHMBackFeature.GetBitMap().Clone() as Bitmap;
                                        extraImages[1] = _AHMCurFeature.GetBitMap().Clone() as Bitmap;

                                        extraImages[0].RotateFlip(RotateFlipType.RotateNoneFlipX);
                                        extraImages[1].RotateFlip(RotateFlipType.RotateNoneFlipX);


                                        if (realtimeEvent != null)
                                        {
                                            realtimeEvent.SetImages(extraImages[0], extraImages[1]);
                                            CMSLogger.SendLogEvent(realtimeEvent);
                                        }

                                        if (extraDisplay)
                                        {
                                            trackingSuiteAdapter.SendMessages(extraImages, new string[] { "Background", "Reconstruction" });
                                        }
                                        //trackingSuiteAdapter.SendMessages(new Bitmap[]{}, new string[] { "Current Feature", "Background Feature" });
                                    }
                                    else if (!extraDisplay && prevExtraDisplay)
                                    {
                                        prevExtraDisplay = false;
                                        trackingSuiteAdapter.SendMessages(null, null);
                                    }
                                }
                            }
                        }
                    }
                }
                catch (Exception e)
                {
                }
            }
        }
Пример #17
0
        private void ProcessAHM()
        {
            lock (mutex)
            {
                if (cameraMouseAssists == null)
                {
                    return;
                }

                double ccMinProjSqdDist = 0.0;
                double ccTanSqdDist     = 0.0;

                try
                {
                    List <Bitmap> images = null;
                    if (showMessages)
                    {
                        images = new List <Bitmap>();
                    }

                    long newTicks = Environment.TickCount;

                    if (newTicks - ticks > updateFrequency || updateFrequency == 0)
                    {
                        ticks = newTicks;
                        for (int i = 0; i < NumberOfTrackingPoints; i++)
                        {
                            AHMCameraMouseAssist cameraMouseAssist = cameraMouseAssists[i];
                            if (cameraMouseAssist == null)
                            {
                                continue;
                            }

                            if (cameraMouseAssist != null && cameraMouseAssist.isReady())
                            {
                                if (_AHMWeights == null || _AHMWeights.Length != this.numTemplates)
                                {
                                    _AHMWeights = new double[numTemplates];
                                }

                                float relX = 0.0f;
                                float relY = 0.0f;

                                if ((this._current_track_points[i].x >= obsSize / 2 + 10) && (_current_track_points[i].y >= obsSize / 2 + 10) &&
                                    ((_current_track_points[i].x + obsSize / 2 + 10) < imageSize.Width) &&
                                    ((_current_track_points[i].y + obsSize / 2 + 10) < imageSize.Height))
                                {
                                    if (this._AHMRealtimeObs == null)
                                    {
                                        this._AHMRealtimeObs = CvImageWrapper.CreateImage(new CvSize(obsSize + 20, obsSize + 20), 8, 3);
                                    }
                                    if (this._AHMBackFeature == null)
                                    {
                                        this._AHMBackFeature = CvImageWrapper.CreateImage(new CvSize(obsSize, obsSize), 8, 3);
                                    }
                                    if (this._AHMCurFeature == null)
                                    {
                                        this._AHMCurFeature = CvImageWrapper.CreateImage(new CvSize(obsSize, obsSize), 8, 3);
                                    }

                                    _AHMRect.x      = (int)_current_track_points[i].x - obsSize / 2 - 10;
                                    _AHMRect.y      = (int)_current_track_points[i].y - obsSize / 2 - 10;;
                                    _AHMRect.height = obsSize + 20;
                                    _AHMRect.width  = obsSize + 20;

                                    _curFrame.cropSubImage(_AHMRect, this._AHMRealtimeObs);

                                    unsafe
                                    {
                                        int dx = 0, dy = 0;

                                        IntPtr pMinProjSqdDist = new IntPtr(&ccMinProjSqdDist);
                                        IntPtr pTanSqdDist     = new IntPtr(&ccTanSqdDist);
                                        IntPtr pDx             = new IntPtr(&dx);
                                        IntPtr pDy             = new IntPtr(&dy);

                                        cameraMouseAssist.computeRelativePos(pMinProjSqdDist, pTanSqdDist, this._AHMRealtimeObs._rawPtr, pDx, pDy, _AHMWeights);

                                        /*
                                         * if (CMSLogger.CanCreateLogEvent(true, false, false, "AHMLogRealtimeEvent"))
                                         * {
                                         *  AHMLogRealtimeEvent lEvent = new AHMLogRealtimeEvent();
                                         *  if (lEvent != null)
                                         *  {
                                         *      lEvent.ProjSqrdDist = (float)ccMinProjSqdDist;
                                         *      lEvent.TanSqrdDist = (float)ccTanSqdDist;
                                         *      CMSLogger.SendLogEvent(lEvent);
                                         *  }
                                         * }*/


                                        _current_track_points[i].x = _current_track_points[i].x + dx;
                                        _current_track_points[i].y = _current_track_points[i].y + dy;

                                        if (i == 0)
                                        {
                                            imagePoint.X = _current_track_points[0].x;
                                            imagePoint.Y = _current_track_points[0].y;
                                        }
                                        else
                                        {
                                            eyeImagePoints[i - 1].X = (int)_current_track_points[i].x;
                                            eyeImagePoints[i - 1].Y = (int)_current_track_points[i].y;
                                        }



                                        if (showMessages)
                                        {
                                            cameraMouseAssist.retrieveBackFeature(this._AHMBackFeature._rawPtr);
                                            cameraMouseAssist.retrieveCurFeature(this._AHMCurFeature._rawPtr);

                                            images.Add(_AHMBackFeature.GetBitMap().Clone() as Bitmap);
                                            images.Add(_AHMCurFeature.GetBitMap().Clone() as Bitmap);
                                        }

                                        /*
                                         * AHMLogRealtimeFeatureImagesEvent realtimeEvent = null;
                                         * if (CMSLogger.CanCreateLogEvent(true, true, false, "AHMLogRealtimeFeatureImagesEvent"))
                                         * {
                                         *  realtimeEvent = new AHMLogRealtimeFeatureImagesEvent();
                                         * }
                                         * if (realtimeEvent != null)
                                         * {
                                         *  cameraMouseAssist.retrieveBackFeature(this._AHMBackFeature._rawPtr);
                                         *  cameraMouseAssist.retrieveCurFeature(this._AHMCurFeature._rawPtr);
                                         *
                                         *  extraImages[0] = _AHMBackFeature.GetBitMap().Clone() as Bitmap;
                                         *  extraImages[1] = _AHMCurFeature.GetBitMap().Clone() as Bitmap;
                                         *
                                         *  extraImages[0].RotateFlip(RotateFlipType.RotateNoneFlipX);
                                         *  extraImages[1].RotateFlip(RotateFlipType.RotateNoneFlipX);
                                         *
                                         *  realtimeEvent.SetImages(extraImages[0], extraImages[1]);
                                         *  CMSLogger.SendLogEvent(realtimeEvent);
                                         * }*/
                                    }
                                }
                            }
                        }
                    }
                    if (showMessages && images.Count > 0)
                    {
                        trackingSuiteAdapter.SendMessages(images.ToArray(), new string[] { "Left Back", "Left Cur", "Right Back", "Right Cur", "Mouth Back", "Mouth Cur" });
                    }
                }
                catch (Exception e)
                {
                }
            }
        }
Пример #18
0
        public override bool Process(PointF  [] imagePoints, System.Drawing.Bitmap[] frames)
        {
            lock (mutex)
            {
                if (finished)
                {
                    return(false);
                }

                if (centerPoint.IsEmpty)
                {
                    centerPoint.X = imagePoints[0].X;
                    centerPoint.Y = imagePoints[0].Y;
                    return(true);
                }

                if (curFrame == null)
                {
                    curFrame = new CvImageWrapper(frames[0]);
                }
                else
                {
                    curFrame.setImage(frames[0]);
                }


                if (setupFrame == null)
                {
                    setupFrame = new Bitmap(curFrame.Size.Width, curFrame.Size.Height);
                    InitSetupFrame();
                }

                relCurPoint = mouseControlStandard.ComputeRelCursorInWindow(imagePoints[0], centerPoint);
                int xRectIndex = (int)Math.Floor((double)rectangleLength * relCurPoint.X);
                int yRectIndex = (int)Math.Floor((double)rectangleLength * relCurPoint.Y);

                if (xRectIndex >= rectangleLength)
                {
                    xRectIndex = rectangleLength - 1;
                }
                if (yRectIndex >= rectangleLength)
                {
                    yRectIndex = rectangleLength - 1;
                }
                if (xRectIndex < 0)
                {
                    xRectIndex = 0;
                }
                if (yRectIndex < 0)
                {
                    yRectIndex = 0;
                }

                if (rectangles[xRectIndex, yRectIndex] && !finished)
                {
                    rectangles[xRectIndex, yRectIndex] = false;
                    curNumRectangles++;

                    for (int i = 0; i < imagePoints.Length; i++)
                    {
                        PointF imagePoint = imagePoints[i];

                        if (!(imagePoint.X <= obsSize / 2 || imagePoint.X >= curFrame.Size.Width - obsSize / 2) &&
                            !(imagePoint.Y <= obsSize / 2 || imagePoint.Y >= curFrame.Size.Height - obsSize / 2))
                        {
                            CvRect cropDimensions = new CvRect();
                            cropDimensions.x      = (int)imagePoint.X - obsSize / 2;
                            cropDimensions.y      = (int)imagePoint.Y - obsSize / 2;
                            cropDimensions.width  = obsSize;
                            cropDimensions.height = obsSize;

                            CvImageWrapper curObs = curFrame.cropSubImage(cropDimensions);
                            this.templatesList[i].Add(curObs);
                            UpdateSetupFrame();
                        }
                    }

                    if (curNumRectangles == this.numTemplates)
                    {
                        this.finished = true;
                        return(false);
                    }
                    else
                    {
                        SendMessage();
                    }
                }

                else if (shouldSendMessage)
                {
                    shouldSendMessage = false;
                    SendMessage();
                }
                return(true);
            }
        }