Ejemplo n.º 1
0
        public Form1()
        {
            InitializeComponent();

            // PictureBox original
            pbxOriginal.SizeMode             = PictureBoxSizeMode.Zoom;
            pbxOriginal.LoadCompleted       += pbxOriginal_LoadCompleted;
            pbxOriginal.LoadProgressChanged += pbxOriginal_LoadProgressChanged;

            // ImageBox forground
            forgroundImageBox          = new ImageBox();
            forgroundImageBox.Size     = pbxOriginal.Size;
            forgroundImageBox.SizeMode = PictureBoxSizeMode.Zoom;
            flowLayoutPanel.Controls.Add(forgroundImageBox);

            // ImageBox Edit
            ibxEdit = new ImageBox();
            //ibxEdit.Size = pbxOriginal.Size;
            ibxEdit.Size     = new Size(800, 600);
            ibxEdit.SizeMode = PictureBoxSizeMode.Zoom;
            flowLayoutPanel.Controls.Add(ibxEdit);

            // FGDetector
            Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE detectorType = FORGROUND_DETECTOR_TYPE.FGD;
            forgroundDetector = new FGDetector <Bgr>(detectorType);

            // Timer local
            timerLocal          = new Timer();
            timerLocal.Tick    += timer_Tick;
            timerLocal.Interval = 500;

            // Rectangle zone de détection
            zoneDetection = new Rectangle(new Point(0, 0), pbxOriginal.Size);
        }
Ejemplo n.º 2
0
        void Run()
        {
            try
            {
                _cameraCapture = new Capture();
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                return;
            }

            _detector = new FGDetector <Bgr>(FORGROUND_DETECTOR_TYPE.FGD);

            _tracker = new BlobTrackerAuto <Bgr>();

            Application.Idle += ProcessFrame;
        }
Ejemplo n.º 3
0
        void Run()
        {
            try
            {
                _cameraCapture = new Capture();
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                return;
            }

            _detector = new FGDetector <Bgr>(ForgroundDetectorType.Fgd);

            _tracker = new BlobTrackerAuto <Bgr>();

            Application.Idle += ProcessFrame;
        }
Ejemplo n.º 4
0
        void Run()
        {
            try
            {
                _cameraCapture = new Capture(@"C:\Users\Tom\Documents\Artemis\New Florence Wood Products\video_grab40.avi");
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                return;
            }

            _detector = new FGDetector <Bgr>(FORGROUND_DETECTOR_TYPE.FGD);

            _tracker = new BlobTrackerAuto <Bgr>();

            Application.Idle += ProcessFrame;
        }
Ejemplo n.º 5
0
      void Run()
      {
         try
         {
            _cameraCapture = new Capture(@"C:\Users\Tom\Documents\Artemis\New Florence Wood Products\video_grab40.avi");
         }
         catch (Exception e)
         {
            MessageBox.Show(e.Message);
            return;
         }
         
         _detector = new FGDetector<Bgr>(FORGROUND_DETECTOR_TYPE.FGD);

         _tracker = new BlobTrackerAuto<Bgr>();

         Application.Idle += ProcessFrame;
      }
Ejemplo n.º 6
0
        void Run()
        {
            try
             {
            _cameraCapture = new Capture();
             }
             catch (Exception e)
             {
            MessageBox.Show(e.Message);
            return;
             }

             _detector = new FGDetector<Bgr>(FORGROUND_DETECTOR_TYPE.FGD);

             _tracker = new BlobTrackerAuto<Bgr>();

             Application.Idle += ProcessFrame;
        }
Ejemplo n.º 7
0
 private void ProcessFrame(object sender, EventArgs e)//캠 함수
 {
     try
     {
         using (Image <Bgr, Byte> img2 = cap.RetrieveBgrFrame())
             using (MemStorage storage = new MemStorage()) //create storage for motion components
             {
                 if (_forgroundDetector == null)
                 {
                     _forgroundDetector = new FGDetector <Bgr>(Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE.FGD);
                     _forgroundDetector = new BGStatModel <Bgr>(img2, Emgu.CV.CvEnum.BG_STAT_TYPE.FGD_STAT_MODEL);
                 }
                 _forgroundDetector.Update(img2);
                 imageBox1.Image = img2;
                 storage.Clear();
             }
     }
     catch
     {
     }
 }
Ejemplo n.º 8
0
        //motion detection processing
        private Image<Bgr, Byte> ProcessFrame(Image<Bgr, Byte> image)
        {
            // using (Image<Bgr, Byte> image = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC))
            using (MemStorage storage = new MemStorage()) //create storage for motion components
            {
                if (_forgroundDetector == null)
                {
                    //_forgroundDetector = new BGCodeBookModel<Bgr>();
                    // _forgroundDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE.FGD);

                    _forgroundDetector = new BGStatModel<Bgr>(image, Emgu.CV.CvEnum.BG_STAT_TYPE.FGD_STAT_MODEL);

                }

                _forgroundDetector.Update(image);

                //    imageBoxFrameGrabber.Image = image;

                //update the motion history
                _motionHistory.Update(_forgroundDetector.ForgroundMask);

                #region get a copy of the motion mask and enhance its color
                double[] minValues, maxValues;
                Point[] minLoc, maxLoc;
                _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);
                Image<Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]);
                #endregion

                //create the motion image
                Image<Bgr, Byte> motionImage = new Image<Bgr, byte>(motionMask.Size);
                //display the motion pixels in blue (first channel)
                motionImage[0] = motionMask;

                //Threshold to define a motion area, reduce the value to detect smaller motion
                double minArea = 100;

                storage.Clear(); //clear the storage
                Seq<MCvConnectedComp> motionComponents = _motionHistory.GetMotionComponents(storage);

                if (showGridLines)
                {
                    LineSegment2D line = new LineSegment2D(new Point(0, 169), new Point(520, 169));
                    LineSegment2D line2 = new LineSegment2D(new Point(259, 0), new Point(259, 340));

                    image.Draw(line, new Bgr(Color.White), 2);
                    image.Draw(line2, new Bgr(Color.White), 2);
                }

                if (displayPosNum)
                {
                    for (int i = 0; i < dsPos.Tables[0].Rows.Count; i++)
                    {
                        if (showPos)
                            image.Draw("# " + dsPos.Tables[0].Rows[i][0].ToString(), ref font, new Point(int.Parse(dsPos.Tables[0].Rows[i][1].ToString()) - 120, int.Parse(dsPos.Tables[0].Rows[i][2].ToString()) - 50), new Bgr(Color.Yellow));
                        if (showNames)
                            image.Draw(dsPos.Tables[0].Rows[i][3].ToString(), ref font, new Point(int.Parse(dsPos.Tables[0].Rows[i][1].ToString()) - 120, int.Parse(dsPos.Tables[0].Rows[i][2].ToString()) - 70), new Bgr(Color.Yellow));

                    }
                }

                if (red1 && red1cnt < 100)
                {
                    red1cnt++;
                    image.Draw(new Rectangle(0, 0, 255, 165), new Bgr(Color.Red), 3);

                    if (red1cnt == 99)
                    {
                        red1 = false;
                        red1cnt = 0;
                    }
                }
                if (red2 && red2cnt < 100)
                {
                    red2cnt++;
                    image.Draw(new Rectangle(262, 0, 257, 167), new Bgr(Color.Red), 3);

                    if (red2cnt == 99)
                    {
                        red2 = false;
                        red2cnt = 0;
                    }
                }
                if (red3 && red3cnt < 100)
                {
                    red3cnt++;
                    image.Draw(new Rectangle(0, 170, 260, 170), new Bgr(Color.Red), 3);

                    if (red3cnt == 99)
                    {
                        red3 = false;
                        red3cnt = 0;
                    }
                }
                if (red4 && red4cnt < 100)
                {
                    red4cnt++;
                    image.Draw(new Rectangle(260, 170, 260, 170), new Bgr(Color.Red), 3);

                    if (red4cnt == 99)
                    {
                        red4 = false;
                        red4cnt = 0;
                    }
                }

                if (green1 && green1cnt < 200)
                {
                    green1cnt++;
                    image.Draw(new Rectangle(0, 0, 255, 165), new Bgr(Color.Green), 3);

                    if (green1cnt == 199)
                    {
                        green1 = false;
                        green1cnt = 0;
                    }
                }
                if (green2 && green2cnt < 200)
                {
                    green2cnt++;
                    image.Draw(new Rectangle(262, 0, 257, 167), new Bgr(Color.Green), 3);

                    if (green2cnt == 199)
                    {
                        green2 = false;
                        green2cnt = 0;
                    }
                }
                if (green3 && green3cnt < 200)
                {
                    green3cnt++;
                    image.Draw(new Rectangle(0, 170, 260, 170), new Bgr(Color.Green), 3);

                    if (green3cnt == 199)
                    {
                        green3 = false;
                        green3cnt = 0;
                    }
                }
                if (green4 && green4cnt < 200)
                {
                    green4cnt++;
                    image.Draw(new Rectangle(260, 170, 260, 170), new Bgr(Color.Green), 3);

                    if (green4cnt == 199)
                    {
                        green4 = false;
                        green4cnt = 0;
                    }
                }

                //iterate through each of the motion component
                foreach (MCvConnectedComp comp in motionComponents)
                {
                    //reject the components that have small area;
                    if (comp.area < minArea) continue;

                    // find the angle and motion pixel count of the specific area
                    double angle, motionPixelCount;

                    _motionHistory.MotionInfo(comp.rect, out angle, out motionPixelCount);

                    //if (motionPixelCount > 100000) { image.Draw(l5 , new Bgr(Color.Red), 10);  } else { image.Draw(l5 , new Bgr(Color.Green), 10);  }
                    //reject the area that contains too few motion
                    // if (motionPixelCount < comp.area * 0.8) continue;
                    if (motionPixelCount < comp.area * 0.05) continue;

                    int nearpos = nearestPosition(comp.rect.X, comp.rect.Y);
                    //if (1000 > comp.area) continue;

                    //Draw each individual motion in red
                    //  DrawMotion(motionImage, comp.rect, angle, new Bgr(Color.Red));
                    if (nearpos == 3 && comp.area < 500) continue;
                    if (nearpos == 4 && comp.area < 500) continue;

                    if (comp.rect.X > 60 && comp.rect.Y > 60)
                    {
                        if (motionQueue.Count == 100)
                        {
                            motionQueue.Dequeue();
                            motionQueue.Enqueue(nearpos);

                        }
                        else
                        {
                            motionQueue.Enqueue(nearpos);
                        }

                        // LineSegment2D l5 = new LineSegment2D(new Point(comp.rect.X, comp.rect.Y), new Point(comp.rect.X, comp.rect.Y));
                        // image.Draw(l5, new Bgr(Color.Red), 10);
                        //  image.Draw(comp.area.ToString(), ref font, new Point(comp.rect.X, comp.rect.Y), new Bgr(Color.LightGreen));
                        if (showMotion)
                            image.Draw(comp.rect, new Bgr(Color.Yellow), 2);

                    }

                }

                // find and draw the overall motion angle
                double overallAngle, overallMotionPixelCount;
                _motionHistory.MotionInfo(motionMask.ROI, out overallAngle, out overallMotionPixelCount);
                // DrawMotion(motionImage, motionMask.ROI, overallAngle, new Bgr(Color.Green));

                //Display the amount of motions found on the current image
                //   UpdateText(String.Format("Total Motions found: {0}; Motion Pixel count: {1}", motionComponents.Total, overallMotionPixelCount));

                //Display the image of the motion
                // imageBoxFrameGrabber.Image = motionImage;  ///motion image

                return image;
            }
        }
Ejemplo n.º 9
0
        void FrameGrabber(object sender, EventArgs e)
        {
            try
            {
                //Get the current frame form capture device
                currentFrame = grabber.QueryFrame().Resize(520, 340, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }
            catch (NullReferenceException e1)
            {
                _motionHistory = new MotionHistory(2.0, 0.05, 0.5);
                _forgroundDetector = null;
                motionQueue.Clear(); helpQueue.Clear();
                grabber = new Capture(vidlist[excnt]);
                excnt++;
                if (excnt == 5) { excnt = 0; }
                currentFrame = grabber.QueryFrame().Resize(520, 340, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                green1 = false; green2 = false; green3 = false; green4 = false;
                red1 = false; red2 = false; red3 = false; red4 = false;
            }

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
              face,
              1.2,
              10,
              Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
              new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {

                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //MessageBox.Show("wiidth " + f.rect.Width + " height " + f.rect.Height + " area " + f.rect.Width * f.rect.Height);
                if (f.rect.Width > 80) continue;

                //draw the face detected in the 0th (gray) channel with blue color
                if (showHand)
                    currentFrame.Draw(f.rect, new Bgr(Color.LightGreen), 2);

                int nearespos = nearestPosition(f.rect.X, f.rect.Y);

                if (helpQueue.ToArray().ToList().IndexOf(nearespos) == -1)
                {
                    //lbAlerts.Items.Add("Help request at #" + nearespos.ToString());

                    dgAlerts.Rows.Add("Help Request", nearespos.ToString());
                    DB_Connect.InsertQuery("INSERT INTO alert_tab(exam_id,position_id,alert_type,alert_time) VALUES(" + examid + "," + nearespos.ToString() + ",'H','" + DateTime.Now + "')");
                    dgAlerts.FirstDisplayedScrollingRowIndex = dgAlerts.RowCount - 1;

                    //GCM - help
                    //AndroidGCMPushNotification apnGCM = new AndroidGCMPushNotification();
                    //string strResponse = apnGCM.SendNotification(regID, nearespos.ToString() + " "+ DateTime.Now, "H");

                    if (nearespos == 1) green1 = true;
                    else if (nearespos == 2) green2 = true;
                    else if (nearespos == 3) green3 = true;
                    else if (nearespos == 4) green4 = true;

                    if (helpQueue.Count == 10)
                    {
                        helpQueue.Dequeue();
                        helpQueue.Enqueue(nearespos);
                    }
                    else
                    {
                        helpQueue.Enqueue(nearespos);
                    }
                }
            }

            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = ProcessFrame(currentFrame);

            if (captureOutput == null && xdoc.Descendants("RecordVideo").First().Value == "1")
            {
                MessageBox.Show("reording start");
                captureOutput = new VideoWriter(@"video" + examid + ".avi", (int)grabber.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FOURCC), 15, 520, 340, true);
            }

            if (currentFrame != null && xdoc.Descendants("RecordVideo").First().Value == "1")
            {
                captureOutput.WriteFrame<Bgr, Byte>(currentFrame);
            }
        }
Ejemplo n.º 10
0
        public void ProcessFrame(ref Image <Bgr, Byte> image, bool SetBitmap, GameTime gameTime)
        {
            using (MemStorage storage = new MemStorage()) //create storage for motion components
            {
                if (_forgroundDetector == null)
                {
                    //Whats the differnce between these?
                    //_forgroundDetector = new BGCodeBookModel<Bgr>();
                    //_forgroundDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE.FGD);
                    //_forgroundDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE.FGD_SIMPLE);
                    //_forgroundDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE.MOG);
                    //_forgroundDetector = new BGStatModel<Bgr>(image, Emgu.CV.CvEnum.BG_STAT_TYPE.FGD_STAT_MODEL);
                    _forgroundDetector = new BGStatModel <Bgr>(image, Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL);
                }

                _forgroundDetector.Update(image);

                //update the motion history
                _motionHistory.Update(_forgroundDetector.ForgroundMask);

                #region get a copy of the motion mask and enhance its color
                double[] minValues, maxValues;
                System.Drawing.Point[] minLoc, maxLoc;
                _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);
                Image <Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]);
                #endregion

                //create the motion image
                Image <Bgr, Byte> motionImage = new Image <Bgr, byte>(motionMask.Size);
                //display the motion pixels one of the colors
                motionImage[gameTime.TotalGameTime.Milliseconds % 3] = motionMask;

                //Threshold to define a motion area, reduce the value to detect smaller motion
                //default 100;
                double minArea = 100;

                storage.Clear(); //clear the storage
                Seq <MCvConnectedComp> motionComponents = _motionHistory.GetMotionComponents(storage);

                Vector2 partVect, partDir;
                float   partRadius, partXDirection, partYDirection;

                //iterate through each of the motion component
                foreach (MCvConnectedComp comp in motionComponents)
                {
                    //reject the components that have small area;
                    if (comp.area < minArea)
                    {
                        continue;
                    }

                    // find the angle and motion pixel count of the specific area
                    double angle, motionPixelCount;
                    _motionHistory.MotionInfo(comp.rect, out angle, out motionPixelCount);

                    //Motion Particles
                    if (ParticleManager.Instance().Enabled)
                    {
                        partVect = new Vector2(comp.rect.X, comp.rect.Y);
                        partVect = FaceController.ScaleFromVideoResolution(partVect);

                        //Get the overall motion and set it to a vector2
                        partRadius     = (motionMask.ROI.Width + motionMask.ROI.Height) >> 2;
                        partXDirection = (float)(Math.Cos(angle * (Math.PI / 180.0)) * partRadius);
                        partYDirection = (float)(Math.Sin(angle * (Math.PI / 180.0)) * partRadius);
                        partDir        = new Vector2(partXDirection, partYDirection);
                        ParticleManager.Instance().ParticleSystems["motionparticles"].AddParticles(partVect, Vector2.Normalize(partDir));
                    }
                    //reject the area that contains too few motion
                    if (motionPixelCount < comp.area * 0.05)
                    {
                        continue;
                    }

                    //Draw each individual motion in red
                    if (SetBitmap)
                    {
                        DrawMotion(motionImage, comp.rect, angle, new Bgr(System.Drawing.Color.Red));
                    }
                }

                // find and draw the overall motion angle
                double overallAngle, overallMotionPixelCount;
                _motionHistory.MotionInfo(motionMask.ROI, out overallAngle, out overallMotionPixelCount);

                //Get the overall motion and set it to a vector2
                float circleRadius = (motionMask.ROI.Width + motionMask.ROI.Height) >> 2;
                float xDirection   = (float)(Math.Cos(overallAngle * (Math.PI / 180.0)) * circleRadius);
                float yDirection   = (float)(Math.Sin(overallAngle * (Math.PI / 180.0)) * circleRadius);

                MotionSum = new Vector2(xDirection, yDirection);
                OverallMotionPixelCount = (int)overallMotionPixelCount;
                TotalMotionsFound       = (int)motionComponents.Total;
                if (SetBitmap)
                {
                    DrawMotion(motionImage, motionMask.ROI, overallAngle, new Bgr(System.Drawing.Color.Green));
                    image = image.Add(motionImage);
                    gameConsole.DebugText = String.Format("Total Motions found: {0};\n Motion Pixel count: {1}\nMotionSum:\n{2}"
                                                          , motionComponents.Total, overallMotionPixelCount, MotionSum);
                    gameConsole.DebugText += String.Format("\nOverallAngle: {0};",
                                                           overallAngle);
                    gameConsole.DebugText += String.Format("\nMotionSumLength(): {0};",
                                                           MotionSum.Length());
                }
            }
        }
Ejemplo n.º 11
0
        void FrameGrabber(object sender, EventArgs e)
        {
            try
            {
                //Get the current frame form capture device
                currentFrame = grabber.QueryFrame().Resize(520, 340, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }
            catch (NullReferenceException e1)
            {
                _motionHistory     = new MotionHistory(2.0, 0.05, 0.5);
                _forgroundDetector = null;
                motionQueue.Clear(); helpQueue.Clear();
                grabber = new Capture(vidlist[excnt]);
                excnt++;
                if (excnt == 5)
                {
                    excnt = 0;
                }
                currentFrame = grabber.QueryFrame().Resize(520, 340, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                green1       = false; green2 = false; green3 = false; green4 = false;
                red1         = false; red2 = false; red3 = false; red4 = false;
            }

            //Convert it to Grayscale
            gray = currentFrame.Convert <Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
                face,
                1.2,
                10,
                Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {
                t      = t + 1;
                result = currentFrame.Copy(f.rect).Convert <Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //MessageBox.Show("wiidth " + f.rect.Width + " height " + f.rect.Height + " area " + f.rect.Width * f.rect.Height);
                if (f.rect.Width > 80)
                {
                    continue;
                }

                //draw the face detected in the 0th (gray) channel with blue color
                if (showHand)
                {
                    currentFrame.Draw(f.rect, new Bgr(Color.LightGreen), 2);
                }

                int nearespos = nearestPosition(f.rect.X, f.rect.Y);

                if (helpQueue.ToArray().ToList().IndexOf(nearespos) == -1)
                {
                    //lbAlerts.Items.Add("Help request at #" + nearespos.ToString());

                    dgAlerts.Rows.Add("Help Request", nearespos.ToString());
                    DB_Connect.InsertQuery("INSERT INTO alert_tab(exam_id,position_id,alert_type,alert_time) VALUES(" + examid + "," + nearespos.ToString() + ",'H','" + DateTime.Now + "')");
                    dgAlerts.FirstDisplayedScrollingRowIndex = dgAlerts.RowCount - 1;

                    //GCM - help
                    //AndroidGCMPushNotification apnGCM = new AndroidGCMPushNotification();
                    //string strResponse = apnGCM.SendNotification(regID, nearespos.ToString() + " "+ DateTime.Now, "H");

                    if (nearespos == 1)
                    {
                        green1 = true;
                    }
                    else if (nearespos == 2)
                    {
                        green2 = true;
                    }
                    else if (nearespos == 3)
                    {
                        green3 = true;
                    }
                    else if (nearespos == 4)
                    {
                        green4 = true;
                    }

                    if (helpQueue.Count == 10)
                    {
                        helpQueue.Dequeue();
                        helpQueue.Enqueue(nearespos);
                    }
                    else
                    {
                        helpQueue.Enqueue(nearespos);
                    }
                }
            }


            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = ProcessFrame(currentFrame);

            if (captureOutput == null && xdoc.Descendants("RecordVideo").First().Value == "1")
            {
                MessageBox.Show("reording start");
                captureOutput = new VideoWriter(@"video" + examid + ".avi", (int)grabber.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FOURCC), 15, 520, 340, true);
            }

            if (currentFrame != null && xdoc.Descendants("RecordVideo").First().Value == "1")
            {
                captureOutput.WriteFrame <Bgr, Byte>(currentFrame);
            }
        }
Ejemplo n.º 12
0
      private void ProcessFrame(object sender, EventArgs e)
      {
         using (Image<Bgr, Byte> image = _capture.QueryFrame())
         using (MemStorage storage = new MemStorage()) //create storage for motion components
         {
            if (_forgroundDetector == null)
            {
               //_forgroundDetector = new BGCodeBookModel<Bgr>();
               //_forgroundDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE.FGD);
               _forgroundDetector = new BGStatModel<Bgr>(image, Emgu.CV.CvEnum.BG_STAT_TYPE.FGD_STAT_MODEL);
            }

            _forgroundDetector.Update(image);

            capturedImageBox.Image = image;

            //update the motion history
            _motionHistory.Update(_forgroundDetector.ForgroundMask);

            #region get a copy of the motion mask and enhance its color
            double[] minValues, maxValues;
            Point[] minLoc, maxLoc;
            _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);
            Image<Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]);
            #endregion

            //create the motion image 
            Image<Bgr, Byte> motionImage = new Image<Bgr, byte>(motionMask.Size);
            //display the motion pixels in blue (first channel)
            motionImage[0] = motionMask;

            //Threshold to define a motion area, reduce the value to detect smaller motion
            double minArea = 100;

            storage.Clear(); //clear the storage
            Seq<MCvConnectedComp> motionComponents = _motionHistory.GetMotionComponents(storage);

            //iterate through each of the motion component
            foreach (MCvConnectedComp comp in motionComponents)
            {
               //reject the components that have small area;
               if (comp.area < minArea) continue;

               // find the angle and motion pixel count of the specific area
               double angle, motionPixelCount;
               _motionHistory.MotionInfo(comp.rect, out angle, out motionPixelCount);

               //reject the area that contains too few motion
               if (motionPixelCount < comp.area * 0.05) continue;

               //Draw each individual motion in red
               DrawMotion(motionImage, comp.rect, angle, new Bgr(Color.Red));
            }

            // find and draw the overall motion angle
            double overallAngle, overallMotionPixelCount;
            _motionHistory.MotionInfo(motionMask.ROI, out overallAngle, out overallMotionPixelCount);
            DrawMotion(motionImage, motionMask.ROI, overallAngle, new Bgr(Color.Green));

            //Display the amount of motions found on the current image
            UpdateText(String.Format("Total Motions found: {0}; Motion Pixel count: {1}", motionComponents.Total, overallMotionPixelCount));

            //Display the image of the motion
            motionImageBox.Image = motionImage;
         }
      }
Ejemplo n.º 13
0
        public IEnumerator <ITask> ProcessImageFrame()
        {
            DateTime started = DateTime.Now;

            // default RGB image size is 640 x 480
            // setting other value (i.e. 320x240) in ...\TrackRoamer\TrackRoamerServices\Config\TrackRoamer.TrackRoamerBot.Kinect.Config.xml does not seem to work (causes NUI initialization failure).

            byte[] srcImageBits          = this.RawFrames.RawColorFrameData;
            int    srcImageBytesPerPixel = this.RawFrames.RawColorFrameInfo.BytesPerPixel;
            int    srcImageWidth         = this.RawFrames.RawColorFrameInfo.Width;
            int    srcImageHeight        = this.RawFrames.RawColorFrameInfo.Height;

            //if (ProcessedImageWidth != this.RawFrames.RawImageFrameData.Image.Width || ProcessedImageHeight != this.RawFrames.RawImageFrameData.Image.Height)
            //{
            //    ProcessedImageWidth = this.RawFrames.RawImageFrameData.Image.Width;
            //    ProcessedImageHeight = this.RawFrames.RawImageFrameData.Image.Height;

            //    ImageBitsProcessed = new byte[ProcessedImageWidth * ProcessedImageHeight * 4];
            //}

            // we need to convert Kinect/MRDS service Image to OpenCV Image - that takes converting first to a BitmapSource and then to System.Drawing.Bitmap:
            BitmapSource srcBitmapSource = BitmapSource.Create(srcImageWidth, srcImageHeight, 96, 96, PixelFormats.Bgr32, null, srcImageBits, srcImageWidth * srcImageBytesPerPixel);

            if (doSaveOneImage)
            {
                doSaveOneImage = false;

                SaveBitmapSource(srcBitmapSource);
            }

            Image <Bgr, byte>  img      = new Image <Bgr, byte>(BitmapSourceToBitmap(srcBitmapSource));
            Image <Gray, byte> gimg     = null;
            Image <Bgr, byte>  filtered = null;

            img._SmoothGaussian(11); //filter out noises

            // from here we can operate OpenCV / Emgu Image, at the end converting Image to BitmapProcessed:

            if (videoSurveillanceDecider == null)
            {
                videoSurveillanceDecider = new VideoSurveillanceDecider(img.Width, img.Height);
            }

            videoSurveillanceDecider.Init();

            if (doColorRecognition)
            {
                // color detection (T-shirt, cone...):

                //lock (videoSurveillanceDecider)
                //{
                //    videoSurveillanceDecider.purgeColorBlobs();
                //}

                filtered = img.Clone().SmoothBlur(13, 13);       //.SmoothGaussian(9);

                byte[, ,] data = filtered.Data;
                int    nRows = filtered.Rows;
                int    nCols = filtered.Cols;
                double averageBrightnessTmp = 0.0d;

                colorTresholdMain = averageBrightness / 2.0d;
                double colorFactorMain = 256.0d * colorFactor / averageBrightness;

                /*
                 */
                // leave only pixels with distinct red color in the "filtered":
                for (int i = nRows - 1; i >= 0; i--)
                {
                    for (int j = nCols - 1; j >= 0; j--)
                    {
                        // R component (2) must be greater than B (0) and G (1) by the colorFactor; dark areas are excluded:
                        double compR = data[i, j, 2];
                        double compG = data[i, j, 1];
                        double compB = data[i, j, 0];

                        double compSum = compR + compG + compB;         // brightness
                        averageBrightnessTmp += compSum;

                        if (compR > colorTresholdMain)                   //&& compG > colorTreshold && compB > colorTreshold)
                        {
                            compR = (compR / compSum) / colorFactorMain; // adjusted for brightness
                            compG = compG / compSum;
                            compB = compB / compSum;
                            if (compR > compG && compR > compB)
                            {
                                data[i, j, 0] = data[i, j, 1] = 0;    // B, G
                                data[i, j, 2] = 255;                  // R
                            }
                            else
                            {
                                data[i, j, 0] = data[i, j, 1] = data[i, j, 2] = 0;
                            }
                        }
                        else
                        {
                            // too dark.
                            data[i, j, 0] = data[i, j, 1] = data[i, j, 2] = 0;
                        }
                    }
                }

                averageBrightness = averageBrightnessTmp / (nRows * nCols * 3.0d); // save it for the next cycle

                gimg = filtered.Split()[2];                                        // make a grey image out of the Red channel, supposedly containing all red objects.

                // contour detection:

                int areaTreshold = 300;     // do not consider red contours with area in pixels less than areaTreshold.

                Contour <System.Drawing.Point> contours;
                MemStorage store = new MemStorage();

                // make a linked list of contours from the red spots on the screen:
                contours = gimg.FindContours(Emgu.CV.CvEnum.CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, Emgu.CV.CvEnum.RETR_TYPE.CV_RETR_LIST, store);
                CvInvoke.cvZero(gimg.Ptr);

                if (contours != null)
                {
                    CvInvoke.cvDrawContours(img.Ptr, contours.Ptr, new MCvScalar(255, 0, 0), new MCvScalar(255, 255, 255), 2, 2, Emgu.CV.CvEnum.LINE_TYPE.CV_AA, System.Drawing.Point.Empty);

                    List <ContourContainer> contourContainers = new List <ContourContainer>();

                    for (; contours != null; contours = contours.HNext)
                    {
                        contours.ApproxPoly(contours.Perimeter * 0.02, 0, contours.Storage);
                        if (contours.Area > areaTreshold)
                        {
                            contourContainers.Add(new ContourContainer()
                            {
                                contour = contours
                            });

                            //int centerX = contours.BoundingRectangle.X + contours.BoundingRectangle.Width / 2;
                            //int centerY = contours.BoundingRectangle.Y + contours.BoundingRectangle.Height / 2;
                            //img.Draw(contours.BoundingRectangle, new Bgr(64.0, 64.0, 255.0), 2);
                            //img.Draw(Math.Round((double)(((int)contours.Area) / 100) * 100).ToString(), ref _font, new System.Drawing.Point(centerX, centerY), new Bgr(64.0, 64.0, 255.0));
                        }
                    }

                    // for the VideoSurveillanceDecider to work, we need to supply blobs IDs - generate them as numbers in a size-ordered list, offset by 1000:
                    var ccs = from cc in contourContainers
                              orderby cc.contour.Area descending
                              select cc;

                    int ccId        = 0;
                    int goodCounter = 0;
                    lock (videoSurveillanceDecider)
                    {
                        videoSurveillanceDecider.purgeColorBlobs();

                        foreach (ContourContainer cc in ccs)
                        {
                            cc.ID = 1000 + ccId;      // offset not to overlap with VideoSurveillance-generated blobs
                            VideoSurveillanceTarget target = videoSurveillanceDecider.Update(cc, currentPanKinect, currentTiltKinect);
                            ccId++;
                            if (target != null && target.Rank > 1.0d)
                            {
                                goodCounter++;
                                if (goodCounter > 10000)  // take 10 largest good ones
                                {
                                    break;
                                }
                            }
                        }

                        if (!doSurveillance)
                        {
                            videoSurveillanceDecider.Commit();
                            videoSurveillanceDecider.ComputeMainColorTarget();
                            videoSurveillanceDecider.Draw(img);             // must run under lock
                        }
                    }
                }
            }

            if (doSurveillance)
            {
                // blob detection by Emgu.CV.VideoSurveillance:

                if (_tracker == null)
                {
                    _tracker  = new BlobTrackerAuto <Bgr>();
                    _detector = new FGDetector <Bgr>(FORGROUND_DETECTOR_TYPE.FGD);
                }

                Image <Bgr, byte> imgSmall = img.Resize(0.5d, INTER.CV_INTER_NN);      // for the full image - _tracker.Process() fails to allocate 91Mb of memory

                #region use the BG/FG detector to find the forground mask
                _detector.Update(imgSmall);
                Image <Gray, byte> forgroundMask = _detector.ForgroundMask;
                #endregion

                _tracker.Process(imgSmall, forgroundMask);

                lock (videoSurveillanceDecider)
                {
                    videoSurveillanceDecider.PurgeAndCommit();      // make sure that obsolete Surveillance targets are removed

                    foreach (MCvBlob blob in _tracker)
                    {
                        // keep in mind that we were working on the scaled down (to 1/2 size) image. So all points should be multiplied by two.
                        VideoSurveillanceTarget target = videoSurveillanceDecider.Update(blob, currentPanKinect, currentTiltKinect);
                    }

                    videoSurveillanceDecider.ComputeMainColorTarget();
                    videoSurveillanceDecider.Draw(img);             // must run under lock
                }
            }

            Bgr color = new Bgr(0.0, 128.0, 128.0);

            // draw center vertical line:
            System.Drawing.Point[] pts = new System.Drawing.Point[2];
            pts[0] = new System.Drawing.Point(img.Width / 2, 0);
            pts[1] = new System.Drawing.Point(img.Width / 2, img.Height);

            img.DrawPolyline(pts, false, color, 1);

            // draw center horizontal line:
            pts[0] = new System.Drawing.Point(0, img.Height / 2);
            pts[1] = new System.Drawing.Point(img.Width, img.Height / 2);

            img.DrawPolyline(pts, false, color, 1);

            // draw a sighting frame for precise alignment:
            // Horisontally the frame spans 16.56 degrees on every side and 12.75 degrees either up or down (at 74" the size it covers is 44"W by 33.5"H, i.e. 33.12 degrees by 25.5 degrees)
            System.Drawing.Point[] pts1 = new System.Drawing.Point[5];
            pts1[0] = new System.Drawing.Point(img.Width / 4, img.Height / 4);
            pts1[1] = new System.Drawing.Point(img.Width * 3 / 4, img.Height / 4);
            pts1[2] = new System.Drawing.Point(img.Width * 3 / 4, img.Height * 3 / 4);
            pts1[3] = new System.Drawing.Point(img.Width / 4, img.Height * 3 / 4);
            pts1[4] = new System.Drawing.Point(img.Width / 4, img.Height / 4);

            img.DrawPolyline(pts1, false, color, 1);

            // end of OpenCV / Emgu Image processing, converting the Image to BitmapProcessed:

            BitmapProcessed = img.ToBitmap();     // image with all processing marked
            //BitmapProcessed = filtered.ToBitmap();  // red image out of the Red channel
            //BitmapProcessed = gimg.ToBitmap();      // grey image; is CvZero'ed by this point
            //BitmapProcessed = forgroundMask.ToBitmap();

            //Tracer.Trace("Video processed in " + (DateTime.Now - started).TotalMilliseconds + " ms");       // usually 40...70ms

            yield break;
        }
Ejemplo n.º 14
0
        /// <summary>
        /// PROCESS EACH FRAME
        /// </summary>
        /// <param name="sender"></param>
        /// <param name="arg"></param>
        private void ProcessFrame(object sender, EventArgs arg)
        {
            try
            {
                frame    = _capture1.QueryFrame();
                Framesno = _capture1.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_POS_FRAMES);


                framecopy = frame.Rotate(180, new Bgr());


                #region framming

                if (frame != null)
                {
                    if (_forgroundDetector == null)
                    {
                        _forgroundDetector = new FGDetector <Bgr>(Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE.FGD_SIMPLE);
                        //_forgroundDetector = new BGStatModel<Bgr>(frame, Emgu.CV.CvEnum.BG_STAT_TYPE.FGD_STAT_MODEL);
                        _tracker = new BlobTrackerAuto <Bgr>();
                    }


                    _forgroundDetector.Update(frame);
                    Image <Gray, Byte> todetectF = FillHoles(_forgroundDetector.ForegroundMask);



                    Image <Gray, Byte> todetect = todetectF.Rotate(180, new Gray(255));


                    _tracker.Process(todetect.Convert <Bgr, Byte>(), todetect);


                    {
                        Contour <Point> contours = todetect.FindContours(CHAIN_APPROX_METHOD.CV_CHAIN_APPROX_SIMPLE, RETR_TYPE.CV_RETR_LIST, stor);


                        #region contours
                        while (contours != null)
                        {
                            if (contours.Area > 200)// && (contours.BoundingRectangle.Width > 50 && contours.BoundingRectangle.Width < 100) && contours.BoundingRectangle.Height > 200)
                            {
                                Rectangle rect = contours.BoundingRectangle;
                                rect.X      = rect.X - 5;
                                rect.Y      = rect.Y - 5;
                                rect.Height = (rect.Height + 10);
                                rect.Width  = (rect.Width + 10);
                                Bitmap frame22 = framecopy.Bitmap;

                                framecopy.Draw(rect, new Bgr(Color.Red), 2);

                                label1.Text = "Vehicle Count:" + (_tracker.Count);

                                foreach (MCvBlob blob in _tracker)
                                {
                                    int blob_id = (blob.ID + 1);
                                    // framecopy.Draw(blob_id.ToString(), ref _font, Point.Round(blob.Center), new Bgr(Color.Red));

                                    if (Point.Round(blob.Center).X > rectangleShape2.Location.X)
                                    {
                                        if (!rectCnt.Contains(blob_id))
                                        {
                                            rectCnt.Add(blob_id);
                                        }
                                    }
                                }
                                label1.Text = "Vehicle Count:" + rectCnt.Count;
                                SqlCommand cmd = new SqlCommand("", Connection);
                                cmd.CommandType = CommandType.Text;
                                SqlDataAdapter dap = new SqlDataAdapter(cmd);
                                DataTable      dt  = new DataTable();
                                dap.Fill(dt);
                                //MessageBox.Show(contours.Area.ToString());
                            }
                            contours = contours.HNext;
                        }
                        #endregion contours



                        pictureBox3.Image = framecopy.ToBitmap();
                    }
                    if (cam == 0)
                    {
                        double time_index = _capture1.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_POS_MSEC);
                        //Time_Label.Text = "Time: " + TimeSpan.FromMilliseconds(time_index).ToString().Substring(0, 8);

                        double framenumber = _capture1.GetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_POS_FRAMES);
                        // Frame_lbl.Text = "Frame: " + framenumber.ToString();
                    }

                    if (cam == 1)
                    {
                        // Frame_lbl.Text = "Frame: " + (webcam_frm_cnt++).ToString();
                    }
                }
                #endregion framming
            }
            catch (Exception ex)
            {
                //MessageBox.Show(ex.StackTrace);
            }
        }
Ejemplo n.º 15
0
        private void ProcessFrame(object sender, EventArgs e)
        {
            using (Image <Bgr, Byte> image = _capture.RetrieveBgrFrame())
                using (MemStorage storage = new MemStorage()) //create storage for motion components
                {
                    if (_forgroundDetector == null)
                    {
                        //_forgroundDetector = new BGCodeBookModel<Bgr>();
                        _forgroundDetector = new FGDetector <Bgr>(Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE.FGD);
                        //_forgroundDetector = new BGStatModel<Bgr>(image, Emgu.CV.CvEnum.BG_STAT_TYPE.FGD_STAT_MODEL);
                    }

                    _forgroundDetector.Update(image);

                    capturedImageBox.Image = image;

                    //update the motion history
                    _motionHistory.Update(_forgroundDetector.ForegroundMask);

                    forgroundImageBox.Image = _forgroundDetector.ForegroundMask;

                    #region get a copy of the motion mask and enhance its color
                    double[] minValues, maxValues;
                    Point[]  minLoc, maxLoc;
                    _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);
                    Image <Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]);
                    #endregion

                    //create the motion image
                    Image <Bgr, Byte> motionImage = new Image <Bgr, byte>(motionMask.Size);
                    //display the motion pixels in blue (first channel)
                    motionImage[0] = motionMask;

                    //Threshold to define a motion area, reduce the value to detect smaller motion
                    double minArea = 100;

                    storage.Clear(); //clear the storage
                    Seq <MCvConnectedComp> motionComponents = _motionHistory.GetMotionComponents(storage);

                    //iterate through each of the motion component
                    foreach (MCvConnectedComp comp in motionComponents)
                    {
                        //reject the components that have small area;
                        if (comp.area < minArea)
                        {
                            continue;
                        }

                        // find the angle and motion pixel count of the specific area
                        double angle, motionPixelCount;
                        _motionHistory.MotionInfo(comp.rect, out angle, out motionPixelCount);

                        //reject the area that contains too few motion
                        if (motionPixelCount < comp.area * 0.05)
                        {
                            continue;
                        }

                        //Draw each individual motion in red
                        DrawMotion(motionImage, comp.rect, angle, new Bgr(Color.Red));
                    }

                    // find and draw the overall motion angle
                    double overallAngle, overallMotionPixelCount;
                    _motionHistory.MotionInfo(motionMask.ROI, out overallAngle, out overallMotionPixelCount);
                    DrawMotion(motionImage, motionMask.ROI, overallAngle, new Bgr(Color.Green));

                    //Display the amount of motions found on the current image
                    UpdateText(String.Format("Total Motions found: {0}; Motion Pixel count: {1}", motionComponents.Total, overallMotionPixelCount));

                    //Display the image of the motion
                    motionImageBox.Image = motionImage;
                }
        }
        private void Pulse()
        {
            using (ColorImageFrame imageFrame = _kinectSensor.ColorStream.OpenNextFrame(200))
            {
                if (imageFrame == null)
                {
                    return;
                }

                using (Image <Bgr, byte> image = imageFrame.ToOpenCVImage <Bgr, byte>())
                    using (MemStorage storage = new MemStorage()) //create storage for motion components
                    {
                        if (_forgroundDetector == null)
                        {
                            _forgroundDetector = new BGStatModel <Bgr>(image
                                                                       , Emgu.CV.CvEnum.BG_STAT_TYPE.GAUSSIAN_BG_MODEL);
                        }

                        _forgroundDetector.Update(image);

                        //update the motion history
                        _motionHistory.Update(_forgroundDetector.ForgroundMask);

                        //get a copy of the motion mask and enhance its color
                        double[] minValues, maxValues;
                        System.Drawing.Point[] minLoc, maxLoc;
                        _motionHistory.Mask.MinMax(out minValues, out maxValues
                                                   , out minLoc, out maxLoc);
                        Image <Gray, Byte> motionMask = _motionHistory.Mask
                                                        .Mul(255.0 / maxValues[0]);

                        //create the motion image
                        Image <Bgr, Byte> motionImage = new Image <Bgr, byte>(motionMask.Size);
                        motionImage[0] = motionMask;

                        //Threshold to define a motion area
                        //reduce the value to detect smaller motion
                        double minArea = 100;

                        storage.Clear(); //clear the storage
                        Seq <MCvConnectedComp> motionComponents = _motionHistory.GetMotionComponents(storage);
                        bool isMotionDetected = false;
                        //iterate through each of the motion component
                        for (int c = 0; c < motionComponents.Count(); c++)
                        {
                            MCvConnectedComp comp = motionComponents[c];
                            //reject the components that have small area;
                            if (comp.area < minArea)
                            {
                                continue;
                            }

                            OnDetection();
                            isMotionDetected = true;
                            break;
                        }
                        if (isMotionDetected == false)
                        {
                            OnDetectionStopped();
                            this.Dispatcher.Invoke(new Action(() => rgbImage.Source = null));
                            StopRecording();
                            return;
                        }

                        this.Dispatcher.Invoke(
                            new Action(() => rgbImage.Source = imageFrame.ToBitmapSource())
                            );
                        Record(imageFrame);
                    }
            }
        }
Ejemplo n.º 17
0
        //motion detection processing
        private Image <Bgr, Byte> ProcessFrame(Image <Bgr, Byte> image)
        {
            // using (Image<Bgr, Byte> image = grabber.QueryFrame().Resize(320, 240, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC))
            using (MemStorage storage = new MemStorage()) //create storage for motion components
            {
                if (_forgroundDetector == null)
                {
                    //_forgroundDetector = new BGCodeBookModel<Bgr>();
                    // _forgroundDetector = new FGDetector<Bgr>(Emgu.CV.CvEnum.FORGROUND_DETECTOR_TYPE.FGD);

                    _forgroundDetector = new BGStatModel <Bgr>(image, Emgu.CV.CvEnum.BG_STAT_TYPE.FGD_STAT_MODEL);
                }

                _forgroundDetector.Update(image);

                //    imageBoxFrameGrabber.Image = image;

                //update the motion history
                _motionHistory.Update(_forgroundDetector.ForgroundMask);

                #region get a copy of the motion mask and enhance its color
                double[] minValues, maxValues;
                Point[]  minLoc, maxLoc;
                _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);
                Image <Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]);
                #endregion

                //create the motion image
                Image <Bgr, Byte> motionImage = new Image <Bgr, byte>(motionMask.Size);
                //display the motion pixels in blue (first channel)
                motionImage[0] = motionMask;

                //Threshold to define a motion area, reduce the value to detect smaller motion
                double minArea = 100;

                storage.Clear(); //clear the storage
                Seq <MCvConnectedComp> motionComponents = _motionHistory.GetMotionComponents(storage);

                if (showGridLines)
                {
                    LineSegment2D line  = new LineSegment2D(new Point(0, 169), new Point(520, 169));
                    LineSegment2D line2 = new LineSegment2D(new Point(259, 0), new Point(259, 340));

                    image.Draw(line, new Bgr(Color.White), 2);
                    image.Draw(line2, new Bgr(Color.White), 2);
                }


                if (displayPosNum)
                {
                    for (int i = 0; i < dsPos.Tables[0].Rows.Count; i++)
                    {
                        if (showPos)
                        {
                            image.Draw("# " + dsPos.Tables[0].Rows[i][0].ToString(), ref font, new Point(int.Parse(dsPos.Tables[0].Rows[i][1].ToString()) - 120, int.Parse(dsPos.Tables[0].Rows[i][2].ToString()) - 50), new Bgr(Color.Yellow));
                        }
                        if (showNames)
                        {
                            image.Draw(dsPos.Tables[0].Rows[i][3].ToString(), ref font, new Point(int.Parse(dsPos.Tables[0].Rows[i][1].ToString()) - 120, int.Parse(dsPos.Tables[0].Rows[i][2].ToString()) - 70), new Bgr(Color.Yellow));
                        }
                    }
                }


                if (red1 && red1cnt < 100)
                {
                    red1cnt++;
                    image.Draw(new Rectangle(0, 0, 255, 165), new Bgr(Color.Red), 3);

                    if (red1cnt == 99)
                    {
                        red1    = false;
                        red1cnt = 0;
                    }
                }
                if (red2 && red2cnt < 100)
                {
                    red2cnt++;
                    image.Draw(new Rectangle(262, 0, 257, 167), new Bgr(Color.Red), 3);


                    if (red2cnt == 99)
                    {
                        red2    = false;
                        red2cnt = 0;
                    }
                }
                if (red3 && red3cnt < 100)
                {
                    red3cnt++;
                    image.Draw(new Rectangle(0, 170, 260, 170), new Bgr(Color.Red), 3);


                    if (red3cnt == 99)
                    {
                        red3    = false;
                        red3cnt = 0;
                    }
                }
                if (red4 && red4cnt < 100)
                {
                    red4cnt++;
                    image.Draw(new Rectangle(260, 170, 260, 170), new Bgr(Color.Red), 3);


                    if (red4cnt == 99)
                    {
                        red4    = false;
                        red4cnt = 0;
                    }
                }


                if (green1 && green1cnt < 200)
                {
                    green1cnt++;
                    image.Draw(new Rectangle(0, 0, 255, 165), new Bgr(Color.Green), 3);

                    if (green1cnt == 199)
                    {
                        green1    = false;
                        green1cnt = 0;
                    }
                }
                if (green2 && green2cnt < 200)
                {
                    green2cnt++;
                    image.Draw(new Rectangle(262, 0, 257, 167), new Bgr(Color.Green), 3);


                    if (green2cnt == 199)
                    {
                        green2    = false;
                        green2cnt = 0;
                    }
                }
                if (green3 && green3cnt < 200)
                {
                    green3cnt++;
                    image.Draw(new Rectangle(0, 170, 260, 170), new Bgr(Color.Green), 3);


                    if (green3cnt == 199)
                    {
                        green3    = false;
                        green3cnt = 0;
                    }
                }
                if (green4 && green4cnt < 200)
                {
                    green4cnt++;
                    image.Draw(new Rectangle(260, 170, 260, 170), new Bgr(Color.Green), 3);


                    if (green4cnt == 199)
                    {
                        green4    = false;
                        green4cnt = 0;
                    }
                }

                //iterate through each of the motion component
                foreach (MCvConnectedComp comp in motionComponents)
                {
                    //reject the components that have small area;
                    if (comp.area < minArea)
                    {
                        continue;
                    }


                    // find the angle and motion pixel count of the specific area
                    double angle, motionPixelCount;

                    _motionHistory.MotionInfo(comp.rect, out angle, out motionPixelCount);


                    //if (motionPixelCount > 100000) { image.Draw(l5 , new Bgr(Color.Red), 10);  } else { image.Draw(l5 , new Bgr(Color.Green), 10);  }
                    //reject the area that contains too few motion
                    // if (motionPixelCount < comp.area * 0.8) continue;
                    if (motionPixelCount < comp.area * 0.05)
                    {
                        continue;
                    }

                    int nearpos = nearestPosition(comp.rect.X, comp.rect.Y);
                    //if (1000 > comp.area) continue;

                    //Draw each individual motion in red
                    //  DrawMotion(motionImage, comp.rect, angle, new Bgr(Color.Red));
                    if (nearpos == 3 && comp.area < 500)
                    {
                        continue;
                    }
                    if (nearpos == 4 && comp.area < 500)
                    {
                        continue;
                    }

                    if (comp.rect.X > 60 && comp.rect.Y > 60)
                    {
                        if (motionQueue.Count == 100)
                        {
                            motionQueue.Dequeue();
                            motionQueue.Enqueue(nearpos);
                        }
                        else
                        {
                            motionQueue.Enqueue(nearpos);
                        }



                        // LineSegment2D l5 = new LineSegment2D(new Point(comp.rect.X, comp.rect.Y), new Point(comp.rect.X, comp.rect.Y));
                        // image.Draw(l5, new Bgr(Color.Red), 10);
                        //  image.Draw(comp.area.ToString(), ref font, new Point(comp.rect.X, comp.rect.Y), new Bgr(Color.LightGreen));
                        if (showMotion)
                        {
                            image.Draw(comp.rect, new Bgr(Color.Yellow), 2);
                        }
                    }
                }

                // find and draw the overall motion angle
                double overallAngle, overallMotionPixelCount;
                _motionHistory.MotionInfo(motionMask.ROI, out overallAngle, out overallMotionPixelCount);
                // DrawMotion(motionImage, motionMask.ROI, overallAngle, new Bgr(Color.Green));

                //Display the amount of motions found on the current image
                //   UpdateText(String.Format("Total Motions found: {0}; Motion Pixel count: {1}", motionComponents.Total, overallMotionPixelCount));

                //Display the image of the motion
                // imageBoxFrameGrabber.Image = motionImage;  ///motion image

                return(image);
            }
        }