Exemplo n.º 1
0
        public mouse()
        {
            InitializeComponent();
            //CvInvoke.UseOpenCL = false;
            while (global.capture == null)
            {
            }
            try
            {
                capture = global.capture;    //new Capture();
                // capture = new Capture();
                //if (capture != null) capture.FlipHorizontal = !capture.FlipHorizontal;
                capture.ImageGrabbed += ProcessFrame;
            }
            catch (NullReferenceException excpt)
            {
                MessageBox.Show(excpt.Message);
            }

            // MessageBox.Show("" + Screen.PrimaryScreen.Bounds + "         __        " + capture.Width + " " + capture.Height);

            fgDetector       = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
            blobDetector     = new Emgu.CV.Cvb.CvBlobDetector();
            cursor_history_x = new Queue <int>();
            cursor_history_y = new Queue <int>();

            //initilize queue with initial
            for (int i = 0; i < queue_cursor_length; i++)
            {
                cursor_history_x.Enqueue(Screen.PrimaryScreen.Bounds.Width / 2);
                cursor_history_y.Enqueue(Screen.PrimaryScreen.Bounds.Height / 2);
            }
        }
Exemplo n.º 2
0
 public gesture()
 {
     InitializeComponent();
     fow_prop = new gesture_recog();
     fow_prop.Show();
     //CvInvoke.UseOpenCL = false;
     try
     {
         grabber = global.capture;
     }
     catch (NullReferenceException excpt)
     {
         MessageBox.Show(excpt.Message);
     }
     grabber.QueryFrame();
     frameWidth  = grabber.Width;
     frameHeight = grabber.Height;
     //   detector = new AdaptiveSkinDetector(1, AdaptiveSkinDetector.MorphingMethod.NONE);
     hsv_min = new Hsv(0, 45, 0);
     hsv_max = new Hsv(20, 254, 254);
     // YCrCb_min = new Ycc(0, 131, 80);
     //YCrCb_max = new Ycc(255, 185, 135);
     YCrCb_min = new Ycc(0, 130, 80);
     YCrCb_max = new Ycc(255, 185, 135);
     index     = 0;
     for (int i = 0; i < 10; i++)
     {
         na[i] = 1;
     }
     fgDetector        = new BackgroundSubtractorMOG2();
     blobDetector      = new Emgu.CV.Cvb.CvBlobDetector();
     Application.Idle += new EventHandler(FrameGrabber);
 }
Exemplo n.º 3
0
 public JavsMotion()
 {
     AdjustableParameters = new Dictionary <string, ParameterProfile>();
     AdjustableParameters["MinMotionArea"] = new ParameterProfile
     {
         Description  = "Minimum Motion Size Threshold",
         MaxValue     = 1000,
         MinValue     = 5,
         CurrentValue = 100,
         Interval     = 1
     };
     AdjustableParameters["MinMotionDistance"] = new ParameterProfile
     {
         Description  = "Minimum Motion Distance Threshold",
         MaxValue     = 1,
         MinValue     = 0.005,
         CurrentValue = 0.05,
         Interval     = 0.005
     };
     //Try out various background subtractors
     _backgroundSubtractor = new BackgroundSubtractorMOG2();
     //Can the parameters taken by this constructor be adjusted during capture?
     _motionHistory = new MotionHistory(
         1.0,  //in second, the duration of motion history you wants to keep
         0.05, //in second, maxDelta for cvCalcMotionGradient
         0.5); //in second, minDelta for cvCalcMotionGradient
 }
Exemplo n.º 4
0
        public CameraTracking(int subtractionHistory, int subtractionThreshold, int frameBlurStrength, int largestDetectionHeightSizeDivisor, int largestDetectionWidthSizeDivisor, int smallestDetectionHeightSizeDivisor, int smallestDetectionWidthSizeDivisor)
        {
            FrameBlurStrength = frameBlurStrength;
            LargestDetectionHeightSizeDivisor  = largestDetectionHeightSizeDivisor;
            LargestDetectionWidthSizeDivisor   = largestDetectionWidthSizeDivisor;
            SmallestDetectionHeightSizeDivisor = smallestDetectionHeightSizeDivisor;
            SmallestDetectionWidthSizeDivisor  = smallestDetectionWidthSizeDivisor;

            try
            {
                _cameraCapture = new VideoCapture();

                // I had to set this by hand to match our camera as opencv doesn't always pull these properties correctly and sometimes shows funky frames or nothing at all
                // _cameraCapture.SetCaptureProperty(CapProp.FrameWidth, 1600);
                // _cameraCapture.SetCaptureProperty(CapProp.FrameHeight, 1200);
                // _cameraCapture.SetCaptureProperty(CapProp.FourCC, Emgu.CV.VideoWriter.Fourcc('Y', 'U', 'Y', '2'));

                _fgDetector   = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2(subtractionHistory, subtractionThreshold);
                _blobDetector = new CvBlobDetector();
                _tracker      = new CvTracks();
                Ready         = true;
            }
            catch (Exception e)
            {
                Ready = false;
            }
        }
Exemplo n.º 5
0
        public Detector()
        {
            cascade       = new CascadeClassifier(@"../cascade/cars.xml");
            dScaleFactor  = 1.1;
            uMinNeighbors = 2;
            minSize       = new Size(100, 100);


            this.bckSub  = BackgroundSubtractorKNN.Create();
            this.fgMask  = new Mat();
            this.kernel1 = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(5, 5));
            this.kernel2 = Cv2.GetStructuringElement(MorphShapes.Rect, new Size(5, 5));
        }
Exemplo n.º 6
0
        /// <summary>
        /// Motion Detector Constructor
        /// </summary>
        /// <param name="imagingData">Common Image Processing Imaging Data</param>
        public MotionDetector(IImageData imagingData)
        {
            ImagingData = imagingData;

            //Set values for properties
            MinMotionAreaThreshold = DEFAULT_MIN_MOTION_AREA_THRESHOLD;
            MinMotionPixelFactor   = DEFAULT_MIN_MOTION_PIXEL_FACTOR;
            GrayThreshold          = DEFAULT_GRAY_THRESHOLD;

            //Instantiate private members
            _motionHistory     = new MotionHistory(MOTION_HISTORY_DURATION, MOTION_HISTORY_MAX_DELTA, MOTION_HISTORY_MIN_DELTA);
            _forgroundDetector = new BackgroundSubtractorMOG2();
            _segMask           = new Mat();
            _foreground        = new Mat();

            ComputerVisionMonitors = new Dictionary <eComputerVisionMonitor, IComputerVisionMonitor>();
        }
Exemplo n.º 7
0
        void Run()
        {
            try
            {
                _cameraCapture = new VideoCapture(1);


                _fgDetector   = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
                _blobDetector = new CvBlobDetector();
                _tracker      = new CvTracks();

                Application.Idle += ProcessFrame;
            }
            catch (Exception e)
            {
            }
        }
Exemplo n.º 8
0
        public camera()
        {
            InitializeComponent();
            CvInvoke.UseOpenCL = false;
            try
            {
                capture = global.capture;
                capture.ImageGrabbed += ProcessFrame;
            }
            catch (NullReferenceException excpt)
            {
                MessageBox.Show(excpt.Message);
            }

            fgDetector   = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
            blobDetector = new Emgu.CV.Cvb.CvBlobDetector();
        }
Exemplo n.º 9
0
        void Run()
        {
            try
            {
                _cameraCapture = new Capture();
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                return;
            }

            _fgDetector   = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
            _blobDetector = new CvBlobDetector();
            //_tracker = new BlobTrackerAuto<Bgr>();

            Application.Idle += ProcessFrame;
        }
Exemplo n.º 10
0
        void Run()
        {
            try
            {
                _cameraCapture = new VideoCapture();
            }
            catch (Exception e)
            {
                MessageBox.Show(e.Message);
                return;
            }

            _fgDetector   = new BackgroundSubtractorMOG2();
            _blobDetector = new CvBlobDetector();
            _tracker      = new CvTracks();

            Application.Idle += ProcessFrame;
        }
Exemplo n.º 11
0
        static void Main(string[] args)
        {
            if (args.Length == 0)
            {
                Console.WriteLine("Please pass in camera name and address");
                return;
            }

            var codeFiles = @"C:\Users\jakka\Documents\code.txt";

            _code = File.ReadAllText(codeFiles);


            AppDomain.CurrentDomain.ProcessExit += CurrentDomain_ProcessExit;

            _fgDetector   = new BackgroundSubtractorMOG2();
            _blobDetector = new CvBlobDetector();
            _tracker      = new CvTracks();

            _name = args[0];
            var address = args[1];

            var fn = Path.Combine(Path.GetTempPath(), "survel");

            if (!Directory.Exists(fn))
            {
                Directory.CreateDirectory(fn);
            }
            else
            {
                //foreach (var f in Directory.GetFiles(fn))
                //{
                //   File.Delete(f);
                //}
            }

            Task.Run(async() =>
            {
                await _processor(address, fn);
            });

            _watcher(_name, fn).GetAwaiter().GetResult();
        }
Exemplo n.º 12
0
        LineSegment2DF Line2 = new LineSegment2DF();                                  //十字光标
        public Form1()
        {
            InitializeComponent();

            _motionDetect = new BackgroundSubtractorMOG2();//默认参数实例化类。
            //_motionhistory = new MotionHistory(0.01, 0.05, 0.5);
            OpenFileDialog op   = new OpenFileDialog();
            string         path = "";

            _capture = new Capture("1.avi"); //打开运行目录下的1.avi视频。
            if (_capture != null)            //if camera capture has been successfully created
            {
                _motionhistory = new MotionHistory(
                    0.1,                        //运动持续时间。
                    0.5,                        //运动最长时间。
                    0.05);                      //运动最短时间。
                _capture.ImageGrabbed += frame; //捕捉帧触发事件
                _capture.Start();               //开启捕捉真事件。
            }
        }
Exemplo n.º 13
0
        static void Main(string[] args)
        {
            _cameraCapture = new VideoCapture(1);


            _fgDetector   = new Emgu.CV.VideoSurveillance.BackgroundSubtractorMOG2();
            _blobDetector = new CvBlobDetector();
            _tracker      = new CvTracks();


            Task.Run(() =>
            {
                DetectFaces();
            })
            .ContinueWith((p) =>
            {
                if (p != null && p.IsFaulted)
                {
                    Console.WriteLine(p.Exception.InnerException.Message);
                }
            });

            Task.Run(() =>
            {
                IdentifyFaces();
            })
            .ContinueWith((p) =>
            {
                if (p != null && p.IsFaulted)
                {
                    Console.WriteLine(p.Exception.InnerException.Message);
                }
            });

            Console.ReadKey();
        }
Exemplo n.º 14
0
        private void ProcImage3(ref System.Drawing.Bitmap src, ref System.Drawing.Bitmap srcB, out System.Drawing.Bitmap dst)
        {
            dst = null;
            Mat srcImg = BitmapConverter.ToMat(src);

            Cv2.CvtColor(srcImg, srcImg, ColorConversionCodes.BGRA2BGR);

            Mat srcImgB = BitmapConverter.ToMat(srcB);

            Cv2.CvtColor(srcImgB, srcImgB, ColorConversionCodes.BGRA2BGR);

            Mat    mask                  = new Mat();
            double threshold             = App.appSettings.DarkAreaThreshold;
            BackgroundSubtractor backSub = BackgroundSubtractorMOG2.Create(1, threshold, true);

            //BackgroundSubtractor backSub = BackgroundSubtractorMOG.Create(1, 5, 0.7, 0);
            //BackgroundSubtractor backSub = BackgroundSubtractorGMG.Create(1, 0.5);
            backSub.Apply(srcImgB, mask, 1);
            backSub.Apply(srcImg, mask, 0);

            Cv2.Threshold(mask, mask, 180, 255, ThresholdTypes.Binary);

            var element = Cv2.GetStructuringElement(
                MorphShapes.Rect,
                new OpenCvSharp.Size(2 * 2 + 1, 2 * 2 + 1),
                new OpenCvSharp.Point(2, 2));

            Mat tmp = new Mat();

            Cv2.MorphologyEx(mask, tmp, MorphTypes.Close, element, null, App.appSettings.Iterations);

            Cv2.MorphologyEx(tmp, mask, MorphTypes.Open, element, null, App.appSettings.Iterations2);
            Cv2.Erode(mask, tmp, element);

            dst = BitmapConverter.ToBitmap(tmp);
        }
Exemplo n.º 15
0
        protected override MotionDetectorOutput DoProcess(MotionDetectorInput input)
        {
            var output = new MotionDetectorOutput();

            var subtractorConfig = input.Settings.SubtractorConfig;

            if (_foregroundDetector == null || !_currentSubtractorConfig.Equals(subtractorConfig))
            {
                if (_foregroundDetector != null)
                {
                    _foregroundDetector.Dispose();
                }

                _foregroundDetector = new BackgroundSubtractorMOG2(
                    subtractorConfig.History
                    , subtractorConfig.Threshold
                    , subtractorConfig.ShadowDetection);

                _currentSubtractorConfig = subtractorConfig;
            }

            _foregroundDetector.Apply(input.Captured, _forgroundMask);

            _motionHistory.Update(_forgroundMask);

            #region get a copy of the motion mask and enhance its color
            double[] minValues, maxValues;
            Point[]  minLoc, maxLoc;

            _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);

            var motionMask = new Mat();
            using (var sa = new ScalarArray(255.0 / maxValues[0]))
            {
                CvInvoke.Multiply(_motionHistory.Mask, sa, motionMask, 1, DepthType.Cv8U);
            }
            #endregion

            if (input.SetCapturedImage)
            {
                output.ForegroundImage = _forgroundMask.ToImage <Bgr, byte>();
                output.MotionImage     = new Image <Bgr, byte>(motionMask.Size);
                CvInvoke.InsertChannel(motionMask, output.MotionImage, 0);
            }

            Rectangle[] motionComponents;
            using (var boundingRect = new VectorOfRect())
            {
                _motionHistory.GetMotionComponents(_segMask, boundingRect);
                motionComponents = boundingRect.ToArray();
            }

            foreach (Rectangle motionComponent in motionComponents)
            {
                int area = motionComponent.Area();

                //reject the components that have small area;
                if (area < input.Settings.MinimumArea || area > input.Settings.MaximumArea)
                {
                    continue;
                }

                // find the angle and motion pixel count of the specific area
                double angle, motionPixelCountDouble;
                _motionHistory.MotionInfo(_forgroundMask, motionComponent, out angle, out motionPixelCountDouble);

                int motionPixelCount = (int)motionPixelCountDouble;

                //reject the area that contains too few motion
                if (motionPixelCount < area * input.Settings.MinimumPercentMotionInArea)
                {
                    continue;
                }

                var motionSection = new MotionSection();
                motionSection.Area   = area;
                motionSection.Region = motionComponent;
                motionSection.Angle  = angle;
                motionSection.PixelsInMotionCount = motionPixelCount;

                output.MotionSections.Add(motionSection);
            }

            if (output.IsDetected)
            {
                switch (input.Settings.BiggestMotionType)
                {
                case BiggestMotionType.Unspecified:
                    break;

                case BiggestMotionType.Area:
                    output.MotionSections.Sort((x, y) => y.Area.CompareTo(x.Area));
                    break;

                case BiggestMotionType.Pixels:
                    output.MotionSections.Sort((x, y) => y.PixelsInMotionCount.CompareTo(x.PixelsInMotionCount));
                    break;
                }
                output.BiggestMotion = output.MotionSections.FirstOrDefault();
            }

            double overallAngle, overallMotionPixelCount;

            _motionHistory.MotionInfo(_forgroundMask, new Rectangle(Point.Empty, motionMask.Size), out overallAngle, out overallMotionPixelCount);

            output.OverallAngle            = overallAngle;
            output.OverallMotionPixelCount = Convert.ToInt32(overallMotionPixelCount);

            return(output);
        }
Exemplo n.º 16
0
        private void ProcessFrame(object sender, EventArgs e)
        {
            Mat image = new Mat();

            _capture.Retrieve(image);
            if (_forgroundDetector == null)
            {
                _forgroundDetector = new BackgroundSubtractorMOG2();
            }

            _forgroundDetector.Apply(image, _forgroundMask);

            //update the motion history
            _motionHistory.Update(_forgroundMask);

            #region get a copy of the motion mask and enhance its color
            double[] minValues, maxValues;
            Point[]  minLoc, maxLoc;
            _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);
            Mat motionMask = new Mat();
            using (ScalarArray sa = new ScalarArray(255.0 / maxValues[0]))
                CvInvoke.Multiply(_motionHistory.Mask, sa, motionMask, 1, DepthType.Cv8U);
            //Image<Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]);
            #endregion

            //create the motion image
            Mat motionImage = new Mat(motionMask.Size.Height, motionMask.Size.Width, DepthType.Cv8U, 3);
            //display the motion pixels in blue (first channel)
            //motionImage[0] = motionMask;
            CvInvoke.InsertChannel(motionMask, motionImage, 0);

            //Threshold to define a motion area, reduce the value to detect smaller motion
            double minArea = 100;

            //storage.Clear(); //clear the storage
            Rectangle[] rects;
            using (VectorOfRect boundingRect = new VectorOfRect())
            {
                _motionHistory.GetMotionComponents(_segMask, boundingRect);
                rects = boundingRect.ToArray();
            }

            //iterate through each of the motion component
            foreach (Rectangle comp in rects)
            {
                int area = comp.Width * comp.Height;
                //reject the components that have small area;
                if (area < minArea)
                {
                    continue;
                }

                // find the angle and motion pixel count of the specific area
                double angle, motionPixelCount;
                _motionHistory.MotionInfo(_forgroundMask, comp, out angle, out motionPixelCount);

                //reject the area that contains too few motion
                if (motionPixelCount < area * 0.05)
                {
                    continue;
                }

                //Draw each individual motion in red
                DrawMotion(motionImage, comp, angle, new Bgr(Color.Red));
            }

            // find and draw the overall motion angle
            double overallAngle, overallMotionPixelCount;

            _motionHistory.MotionInfo(_forgroundMask, new Rectangle(Point.Empty, motionMask.Size), out overallAngle, out overallMotionPixelCount);
            DrawMotion(motionImage, new Rectangle(Point.Empty, motionMask.Size), overallAngle, new Bgr(Color.Green));

            if (this.Disposing || this.IsDisposed)
            {
                return;
            }

            capturedImageBox.Image  = image;
            forgroundImageBox.Image = _forgroundMask;

            //Display the amount of motions found on the current image
            UpdateText(String.Format("Total Motions found: {0}; Motion Pixel count: {1}", rects.Length, overallMotionPixelCount));

            //Display the image of the motion
            motionImageBox.Image = motionImage;
        }
Exemplo n.º 17
0
        private void ProcessFrame(object sender, EventArgs e)
        {
            Mat image = new Mat();

            _capture.Retrieve(image);
            if (_forgroundDetector == null)
            {
                _forgroundDetector = new BackgroundSubtractorMOG2();
            }

            _forgroundDetector.Apply(image, _forgroundMask);

            //update the motion history
            _motionHistory.Update(_forgroundMask);

            #region get a copy of the motion mask and enhance its color
            double[] minValues, maxValues;
            Point[]  minLoc, maxLoc;
            _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);
            Mat motionMask = new Mat();
            using (ScalarArray sa = new ScalarArray(255.0 / maxValues[0]))
                CvInvoke.Multiply(_motionHistory.Mask, sa, motionMask, 1, DepthType.Cv8U);
            //Image<Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]);
            #endregion

            //DetectFace.Detect(image, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, tryUseCuda, out detectionTime);
            //create the motion image
            Mat motionImage = new Mat(motionMask.Size.Height, motionMask.Size.Width, DepthType.Cv8U, 3);
            //display the motion pixels in blue (first channel)
            //motionImage[0] = motionMask;
            CvInvoke.InsertChannel(motionMask, motionImage, 0);

            //Threshold to define a motion area, reduce the value to detect smaller motion
            double minArea = 100;

            //storage.Clear(); //clear the storage
            Rectangle[] rects;
            using (VectorOfRect boundingRect = new VectorOfRect())
            {
                _motionHistory.GetMotionComponents(_segMask, boundingRect);
                rects = boundingRect.ToArray();
            }
            List <Rectangle> Availablerects = new List <Rectangle>();
            foreach (Rectangle comp in rects)
            {
                int area = comp.Width * comp.Height;
                //reject the components that have small area;
                if (area < minArea)
                {
                    continue;
                }
                double angle, motionPixelCount;
                _motionHistory.MotionInfo(_forgroundMask, comp, out angle, out motionPixelCount);
                //reject the area that contains too few motion
                if (motionPixelCount < area * size)
                {
                    continue;
                }
                else
                {
                    Availablerects.Add(comp);
                }
            }

            //iterate through each of the motion component
            List <Rectangle> faces = new List <Rectangle>();
            List <Rectangle> eyes  = new List <Rectangle>();
            Task             task2 = new Task(() =>
            {
                Mat Detectmat = new Mat();
                Detectmat     = image;
                DetectFace.Detect(Detectmat, "haarcascade_frontalface_default.xml", "haarcascade_eye.xml", faces, eyes, tryUseCuda, out detectionTime);
                if (faces.Count > 0)
                {
                    label1.Text = "detectionTime:" + detectionTime.ToString();
                    for (int i = 0; i < faces.Count; i++)
                    {
                        Bitmap bt2 = DetectFace.Cutbitmap(Detectmat.Bitmap, faces[i].X, faces[i].Y, faces[i].Width, faces[i].Height);
                        Emgu.CV.Image <Bgr, Byte> currentFrame1 = new Emgu.CV.Image <Bgr, Byte>(bt2); //只能这么转
                        Mat invert1 = new Mat();
                        CvInvoke.BitwiseAnd(currentFrame1, currentFrame1, invert1);                   //这是官网上的方法,变通用。没看到提供其它方法直接转换的。
                        faceimage.Image = invert1;
                        string filePath = "G:\\motion1\\" + DateTime.Now.ToString("人脸-yyyy年MM月dd日HH点mm分ss秒") + i.ToString() + "-" + faces.Count.ToString() + ".jpg";
                        bt2.Save(filePath);
                        System.Media.SystemSounds.Beep.Play();
                    }
                    Bitmap bt1       = Detectmat.Bitmap;
                    string filePath2 = "G:\\motion1\\" + DateTime.Now.ToString("原图-yyyy年MM月dd日HH点mm分ss秒") + ".jpg";
                    //System.Diagnostics.Debug.WriteLine("准备保存原图" + detectionTime.ToString());
                    bt1.Save(filePath2);
                }
            });
            task2.Start();

            foreach (Rectangle comp in Availablerects)
            {
                int area = comp.Width * comp.Height;
                //reject the components that have small area;
                if (area < minArea)
                {
                    continue;
                }



                // find the angle and motion pixel count of the specific area
                double angle, motionPixelCount;
                _motionHistory.MotionInfo(_forgroundMask, comp, out angle, out motionPixelCount);

                //reject the area that contains too few motion
                if (motionPixelCount < area * size)
                {
                    continue;
                }

                //Draw each individual motion in red

                //=================转换mat格式为bitmap并裁切===========================
                Task task = new Task(() =>
                {
                    Bitmap bt = DetectFace.Cutbitmap(image.Bitmap, comp.X, comp.Y, comp.Width, comp.Height);
                    Emgu.CV.Image <Bgr, Byte> currentFrame = new Emgu.CV.Image <Bgr, Byte>(bt); //只能这么转
                    Mat invert = new Mat();
                    CvInvoke.BitwiseAnd(currentFrame, currentFrame, invert);                    //这是官网上的方法,变通用。没看到提供其它方法直接转换的。
                    moveimage.Image = invert;
                });
                task.Start();
                try
                {
                    DrawMotion(motionImage, comp, angle, new Bgr(Color.Red));
                    DrawMotion(capturedImageBox.Image, comp, angle, new Bgr(Color.Red));
                }
                catch (Exception a)
                {
                }

                #region//area

                /*
                 * bool time = false;
                 * if ((comp.X > 1770 && comp.X < 1830) && (comp.Y > 2 && comp.Y < 40))
                 * {
                 *  time = true;
                 * }
                 * if (youxiaorects.Count < 50&&!time)
                 * {
                 *  if (capturedImageBox.Image != null)
                 *  {
                 *      Random rd = new Random();
                 *      Bitmap bt = new Bitmap(capturedImageBox.Image.Bitmap);
                 *   //   string filePath = "G:\\motion\\" + DateTime.Now.ToString("yyyy年MM月dd日HH点mm分ss秒") + ".jpg";
                 *   //   image.Save(filePath);
                 *
                 *  }
                 * }
                 */
                #endregion
            }

            #region//垃圾堆
            //=================当检测到图像更变,获取更变区域坐标与大小时,尝试将更变区域保存 传入人脸识别函数分析=====================
            //===============根据更变区域个数来动态创建线程,增加效率======================

            /*   Thread[] downloadThread;
             * Thread face=new Thread(confirmface);
             * face.Start();*/
            /*  int areacount = Availablerects.Count;
             * //声名下载线程,这是C#的优势,即数组初始化时,不需要指定其长度,可以在使用时才指定。
             *
             * //这个声名应为类级,这样也就为其它方法控件它们提供了可能
             *
             * ThreadStart startDownload = new ThreadStart(confirmface);
             * //线程起始设置:即每个线程都执行DownLoad()
             * downloadThread = new Thread[areacount];//为线程申请资源,确定线程总数
             * for (int k = 0; k < areacount; k++)//开启指定数量的线程数
             * {
             *    downloadThread[k] = new Thread(startDownload);//指定线程起始设置
             *    downloadThread[k].Start();//逐个开启线程
             * }*/
            #endregion

            #region//_forgroundMask

            /*
             * // find and draw the overall motion angle
             * double overallAngle, overallMotionPixelCount;
             *
             * _motionHistory.MotionInfo(_forgroundMask, new Rectangle(Point.Empty, motionMask.Size), out overallAngle, out overallMotionPixelCount);
             * // DrawMotion(motionImage, new Rectangle(Point.Empty, motionMask.Size), overallAngle, new Bgr(Color.Green));
             * // DrawMotion(image, new Rectangle(Point.Empty, image.Size), overallAngle, new Bgr(Color.Green));
             * if (this.Disposing || this.IsDisposed)
             *   return;
             */
            /*  foreach (Rectangle face in faces)
             *    CvInvoke.Rectangle(image, face, new Bgr(Color.Red).MCvScalar, 2);
             * foreach (Rectangle eye in eyes)
             *    CvInvoke.Rectangle(image, eye, new Bgr(Color.Blue).MCvScalar, 2);*/
            capturedImageBox.Image = image;
            // forgroundImageBox.Image = _forgroundMask;

            //Display the amount of motions found on the current image
            //UpdateText(String.Format("Total Motions found: {0}; Motion Pixel count: {1} detectionTime:{2} ", rects.Length, overallMotionPixelCount, detectionTime));

            //Display the image of the motion
            //  motionImageBox.Image = motionImage;


            #endregion
        }
Exemplo n.º 18
0
        private bool testDiff()
        {
            if (_forgroundDetector == null)
            {
                //Emgu.CV.O
                _forgroundDetector = new BackgroundSubtractorMOG2(20, 16f, true);
            }

            bool result = false;

            using (Mat _forgroundMask = new Mat())
            {
                if (nextFrame != null)
                {
                    _forgroundDetector.Apply(nextFrame, _forgroundMask);
                }
                else
                {
                    return
                        (false);
                }

                decimal backGroundCounter = 0;
                decimal whiteColorCounter = 0;

                //double delta = 0.01;
                decimal deltaCounter        = this.numUpDown_Sens.Value;
                decimal whitePixelsSettings = this.numUpDown_WhitePixels.Value;
                int     colorLimit          = 200;
                using (Bitmap img = _forgroundMask.Bitmap)
                {
                    using (Bitmap origImg = nextFrame.Bitmap)
                    {
                        curSearchResult.RGBprofile   = "";
                        curSearchResult.RGBprofileXY = "";

                        foreach (Rectangle Rect in ListRect)
                        {
                            for (int i = 0; i < Rect.Height; ++i)
                            {
                                for (int j = 0; j < Rect.Width; ++j)
                                {
                                    //if (_forgroundMask.Bitmap.GetPixel(Rect.X + j, Rect.Y + i).GetBrightness() > delta)
                                    if (img.GetPixel(Rect.X + j, Rect.Y + i).ToArgb() != -16777216)
                                    {
                                        backGroundCounter++;
                                        if (origImg.GetPixel(Rect.X + j, Rect.Y + i).R > colorLimit &&
                                            origImg.GetPixel(Rect.X + j, Rect.Y + i).G > colorLimit &&
                                            origImg.GetPixel(Rect.X + j, Rect.Y + i).B > colorLimit
                                            )
                                        {
                                            whiteColorCounter++;
                                            curSearchResult.RGBprofile += string.Format("(R={0},G={1},B={2})|",
                                                                                        origImg.GetPixel(Rect.X + j, Rect.Y + i).R,
                                                                                        origImg.GetPixel(Rect.X + j, Rect.Y + i).G,
                                                                                        origImg.GetPixel(Rect.X + j, Rect.Y + i).B);
                                            curSearchResult.RGBprofileXY += string.Format("(X={0},Y={1})|", Rect.X + j, Rect.Y + i);
                                        }
                                    }
                                }
                            }
                        }
                    }
                }

                if (backGroundCounter > deltaCounter && whiteColorCounter > whitePixelsSettings)
                {
                    result = true;
                    if (!b_AutoRun)
                    {
                        pictureBox2.Image = _forgroundMask.Bitmap.Clone() as Bitmap;
                        pictureBox2.Refresh();
                    }

                    curSearchResult.backGroundCounter = backGroundCounter;
                    curSearchResult.frameNumber       = this.frameNum;
                    curSearchResult.whiteColorCounter = whiteColorCounter;
                }

                _forgroundMask.Dispose();
            }

            return(result);
        }
Exemplo n.º 19
0
 internal BackgroundSubtractorShould(BackgroundSubtractor subtractor) : base(subtractor)
 {
     this.subtractor = subtractor;
 }
Exemplo n.º 20
0
        private void ProcessFrame(object sender, EventArgs e)
        {
            Mat image = new Mat();

            _capture.Retrieve(image);
            if (_forgroundDetector == null)
            {
                _forgroundDetector = new BackgroundSubtractorMOG2();
            }

            _forgroundDetector.Apply(image, _forgroundMask);

            capturedImageBox.Image = image;

            //update the motion history
            _motionHistory.Update(_forgroundMask);

            foreground.Image = _forgroundMask;


            #region get a copy of the motion mask and enhance its color
            double[] minValues, maxValues;
            Point[]  minLoc, maxLoc;
            _motionHistory.Mask.MinMax(out minValues, out maxValues, out minLoc, out maxLoc);
            Mat motionMask = new Mat();
            using (ScalarArray sa = new ScalarArray(255.0 / maxValues[0]))
                CvInvoke.Multiply(_motionHistory.Mask, sa, motionMask, 1, DepthType.Cv8U);
            //Image<Gray, Byte> motionMask = _motionHistory.Mask.Mul(255.0 / maxValues[0]);
            #endregion

            //create the motion image
            //  Image<Bgr, Byte> motionImage = new Image<Bgr, byte>(motionMask.Size);
            //display the motion pixels in blue (first channel)
            //motionImage[0] = motionMask;
            // CvInvoke.InsertChannel(motionMask, motionImage, 0);

            //Threshold to define a motion area, reduce the value to detect smaller motion
            // double minArea = 100;

            //storage.Clear(); //clear the storage
            Rectangle[] rects;
            using (VectorOfRect boundingRect = new VectorOfRect())
            {
                _motionHistory.GetMotionComponents(_segMask, boundingRect);
                rects = boundingRect.ToArray();
            }

            //iterate through each of the motion component
            foreach (Rectangle comp in rects)
            {
                time.Start();
                // find the angle and motion pixel count of the specific area
                double angle, motionPixelCount;
                _motionHistory.MotionInfo(_forgroundMask, comp, out angle, out motionPixelCount);


                if (Main.security.Text == "SECURITY MODE ON")
                {
                    long x = time.ElapsedMilliseconds;
                    if (x > ellapsed_time)
                    {
                        if (motionPixelCount > pixel_count)
                        {
                            //MessageBox.Show("My message here");
                            Console.Beep(5000, 1000);
                            if (Main.connected == true)
                            {
                                chat.send(Encoding.ASCII.GetBytes("Someone is in the room"));
                            }
                            break;
                            time.Stop();
                        }
                    }
                }
            }
        }