The motion history class
For help on using this class, take a look at the Motion Detection example
Наследование: DisposableObject
Пример #1
0
      public Form1()
      {
         InitializeComponent();

         //try to create the capture
         if (_capture == null)
         {
            try
            {
               _capture = new Capture();
            }
            catch (NullReferenceException excpt)
            {   //show errors if there is any
               MessageBox.Show(excpt.Message);
            }
         }

         if (_capture != null) //if camera capture has been successfully created
         {
            _motionHistory = new MotionHistory(
                1.0, //in second, the duration of motion history you wants to keep
                0.05, //in second, parameter for cvCalcMotionGradient
                0.5); //in second, parameter for cvCalcMotionGradient

            Application.Idle += ProcessFrame;
         }
      }
Пример #2
0
        public Form1()
        {
            InitializeComponent();

             //try to create the capture
             if (_capture == null)
             {
            try
            {
               _capture = new Capture();
            }
            catch (NullReferenceException excpt)
            {   //show errors if there is any
               MessageBox.Show(excpt.Message);
            }
             }

             if (_capture != null) //if camera capture has been successfully created
             {
            _motionHistory = new MotionHistory(
                6, //number of images to store in buffer, adjust it to fit your camera's frame rate
                20, //0-255, the amount of pixel intensity change to consider it as motion pixel
                1.0, //in second, the duration of motion history you wants to keep
                0.05, //in second, parameter for cvCalcMotionGradient
                0.5); //in second, parameter for cvCalcMotionGradient

            Application.Idle += new EventHandler(ProcessFrame);

             }
        }
Пример #3
0
        void FrameGrabber(object sender, EventArgs e)
        {
            try
            {
                //Get the current frame form capture device
                currentFrame = grabber.QueryFrame().Resize(520, 340, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            }
            catch (NullReferenceException e1)
            {
                _motionHistory = new MotionHistory(2.0, 0.05, 0.5);
                _forgroundDetector = null;
                motionQueue.Clear(); helpQueue.Clear();
                grabber = new Capture(vidlist[excnt]);
                excnt++;
                if (excnt == 5) { excnt = 0; }
                currentFrame = grabber.QueryFrame().Resize(520, 340, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
                green1 = false; green2 = false; green3 = false; green4 = false;
                red1 = false; red2 = false; red3 = false; red4 = false;
            }

            //Convert it to Grayscale
            gray = currentFrame.Convert<Gray, Byte>();

            //Face Detector
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(
              face,
              1.2,
              10,
              Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
              new Size(20, 20));

            //Action for each element detected
            foreach (MCvAvgComp f in facesDetected[0])
            {

                t = t + 1;
                result = currentFrame.Copy(f.rect).Convert<Gray, byte>().Resize(100, 100, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);

                //MessageBox.Show("wiidth " + f.rect.Width + " height " + f.rect.Height + " area " + f.rect.Width * f.rect.Height);
                if (f.rect.Width > 80) continue;

                //draw the face detected in the 0th (gray) channel with blue color
                if (showHand)
                    currentFrame.Draw(f.rect, new Bgr(Color.LightGreen), 2);

                int nearespos = nearestPosition(f.rect.X, f.rect.Y);

                if (helpQueue.ToArray().ToList().IndexOf(nearespos) == -1)
                {
                    //lbAlerts.Items.Add("Help request at #" + nearespos.ToString());

                    dgAlerts.Rows.Add("Help Request", nearespos.ToString());
                    DB_Connect.InsertQuery("INSERT INTO alert_tab(exam_id,position_id,alert_type,alert_time) VALUES(" + examid + "," + nearespos.ToString() + ",'H','" + DateTime.Now + "')");
                    dgAlerts.FirstDisplayedScrollingRowIndex = dgAlerts.RowCount - 1;

                    //GCM - help
                    //AndroidGCMPushNotification apnGCM = new AndroidGCMPushNotification();
                    //string strResponse = apnGCM.SendNotification(regID, nearespos.ToString() + " "+ DateTime.Now, "H");

                    if (nearespos == 1) green1 = true;
                    else if (nearespos == 2) green2 = true;
                    else if (nearespos == 3) green3 = true;
                    else if (nearespos == 4) green4 = true;

                    if (helpQueue.Count == 10)
                    {
                        helpQueue.Dequeue();
                        helpQueue.Enqueue(nearespos);
                    }
                    else
                    {
                        helpQueue.Enqueue(nearespos);
                    }
                }
            }

            //Show the faces procesed and recognized
            imageBoxFrameGrabber.Image = ProcessFrame(currentFrame);

            if (captureOutput == null && xdoc.Descendants("RecordVideo").First().Value == "1")
            {
                MessageBox.Show("reording start");
                captureOutput = new VideoWriter(@"video" + examid + ".avi", (int)grabber.GetCaptureProperty(CAP_PROP.CV_CAP_PROP_FOURCC), 15, 520, 340, true);
            }

            if (currentFrame != null && xdoc.Descendants("RecordVideo").First().Value == "1")
            {
                captureOutput.WriteFrame<Bgr, Byte>(currentFrame);
            }
        }
Пример #4
0
        private void button1_Click(object sender, EventArgs e)
        {
            isStarted = true;
            lblRemianingTime.Visible = true;
            button2.Enabled = true;
            button1.Enabled = false;
            xdoc = XDocument.Load("AEISConfig.xml");

            grabber = new Capture(path + "vids\\MVI_3649_1_1.avi");

            // grabber.QueryFrame();
            //Initialize the FrameGraber event
            Application.Idle += new EventHandler(FrameGrabber);
            button1.Enabled = false;
            button2.Enabled = true;

            ///motion detection start
            if (grabber == null)
            {
                try
                {
                    grabber = new Capture();
                }
                catch (NullReferenceException excpt)
                {   //show errors if there is any
                    MessageBox.Show(excpt.Message);
                }
            }

            if (grabber != null) //if camera capture has been successfully created
            {
                _motionHistory = new MotionHistory(
                    2.0, //in second, the duration of motion history you wants to keep
                    0.05, //in second, parameter for cvCalcMotionGradient
                    0.5); //in second, parameter for cvCalcMotionGradient

                //Application.Idle += ProcessFrame;
            }
            ///motion detection end
        }
Пример #5
0
        public FaceCapture(string faceTrainingFile, string eyeTrainingFile, double scale, int neighbors, int minSize)
        {
            loadConfig();

            FaceTrainingFile = faceTrainingFile;
            EyeTrainingFile = eyeTrainingFile;
            Scale = scale;
            Neighbors = neighbors;
            FaceMinSize = minSize;
            FaceMaxSize = 200;
            try
            {
                if (HasCuda && CudaInvoke.HasCuda)
                {
                    FaceDetector = new FaceDetectCuda();
                }
                else
                {
                    FaceDetector = new FaceDetect();
                }
            }
            catch (Exception errCuda)
            {
                Console.WriteLine("ERROR - FaceCapture CudaInvoke.HasCuda errCuda: " + errCuda);
                FaceDetector = new FaceDetect();
            }

            _motionHistory = new MotionHistory(
                motionHistoryDuration, //in second, the duration of motion history you wants to keep
                maxDelta, //in second, maxDelta for cvCalcMotionGradient
                minDelta); //in second, minDelta for cvCalcMotionGradient
            //capture = new Capture();
        }
Пример #6
0
 public void StartCapture()
 {
     Console.WriteLine("StartCapture");
     Faces = new List<Face>();
     capture = new Capture();
     _motionHistory = new MotionHistory(1.0, 0.05, 0.5);
     Application.Idle += ProcessFrame;
 }
Пример #7
0
        public List<Face> GetFaces(int numFaces, int minScore)
        {
            int frameCount = 0;
            capture = new Capture();
            _motionHistory = new MotionHistory(1.0, 0.05, 0.5);
            List<Face> foundfaces = new List<Face>();

            while (foundfaces.Count() < numFaces)
            {
                Mat mat = capture.QueryFrame();
                Image<Bgr, Byte> ImageFrame = mat.ToImage<Bgr, Byte>();

                frameCount = frameCount + 1;
                MotionInfo motion = this.GetMotionInfo(mat);
                List<Face> detectedFaces = FaceDetector.FindFaces(ImageFrame, this.FaceTrainingFile, this.EyeTrainingFile, this.Scale, this.Neighbors, this.FaceMinSize);

                if (frameCount > 2)
                {
                    foreach (Face face in detectedFaces)
                    {
                        face.MotionObjects = motion.MotionObjects;
                        face.MotionPixels = motion.MotionPixels;

                        if (face.FaceScore > minScore)
                        {
                            foundfaces.Add(face);
                        }
                    }
                }
            }

            capture.Dispose();
            capture = null;
            return foundfaces;
        }
Пример #8
0
        private void StartCapture()
        {
            if (_capture == null)
            {
                try
                {
                    if (SourceComboBox.SelectedIndex == 0)
                    {                       
                        _capture = new Capture();
                    }
                    else if (SourceComboBox.SelectedIndex == 1)
                    {
                        _capture = new Capture(videoSource);
                    }
                }
                catch (NullReferenceException excpt)
                {   //show errors if there is any
                    MessageBox.Show(excpt.Message);
                }
            }

            if (_capture != null) //if camera capture has been successfully created
            {
                _motionHistory = new MotionHistory(
                    1.0, //in second, the duration of motion history you wants to keep 1.0
                    0.05, //in second, maxDelta for cvCalcMotionGradient 0.05
                    0.5); //in second, minDelta for cvCalcMotionGradient 0.5

                _capture.ImageGrabbed += ProcessFrame;
                _capture.Start();
            }
        }
Пример #9
0
        /// <summary>
        /// Motion Detector Constructor
        /// </summary>
        /// <param name="imagingData">Common Image Processing Imaging Data</param>
        public MotionDetector(IImageData imagingData )
        {
            ImagingData = imagingData;

            //Set values for properties
            MinMotionAreaThreshold = DEFAULT_MIN_MOTION_AREA_THRESHOLD;
            MinMotionPixelFactor = DEFAULT_MIN_MOTION_PIXEL_FACTOR;
            GrayThreshold = DEFAULT_GRAY_THRESHOLD;

            //Instantiate private members
            _motionHistory = new MotionHistory(MOTION_HISTORY_DURATION, MOTION_HISTORY_MAX_DELTA, MOTION_HISTORY_MIN_DELTA);
            _forgroundDetector = new BackgroundSubtractorMOG2();
            _segMask = new Mat();
            _foreground = new Mat();

            ComputerVisionMonitors = new Dictionary<eComputerVisionMonitor, IComputerVisionMonitor>();
        }