Motion detection wrapper class, which performs motion detection and processing.

The class serves as a wrapper class for motion detection and motion processing algorithms, allowing to call them with single call. Unlike motion detection and motion processing interfaces, the class also provides additional methods for convenience, so the algorithms could be applied not only to AForge.Imaging.UnmanagedImage, but to .NET's Bitmap class as well.

In addition to wrapping of motion detection and processing algorthms, the class provides some additional functionality. Using MotionZones property it is possible to specify set of rectangular zones to observe - only motion in these zones is counted and post procesed.

Sample usage:

// create motion detector MotionDetector detector = new MotionDetector( new SimpleBackgroundModelingDetector( ), new MotionAreaHighlighting( ) ); // continuously feed video frames to motion detector while ( ... ) { // process new video frame and check motion level if ( detector.ProcessFrame( videoFrame ) > 0.02 ) { // ring alarm or do somethng else } }
Beispiel #1
2
        /// <summary>
        /// Event to trigger upon clicking the "Start" button
        /// </summary>
        /// <param name="sender">Sender of the event</param>
        /// <param name="e">Event arguments</param>
        private void buttonStart_Click(object sender, EventArgs e)
        {
            diff = new TwoFramesDifferenceDetector(true);
            bgm = new SimpleBackgroundModelingDetector(true, true);
            bgm.FramesPerBackgroundUpdate = 3;
            bgm.MillisecondsPerBackgroundUpdate = 150;
            motionDetector = new MotionDetector(bgm, new MotionAreaHighlighting());
            //motionDetector = new MotionDetector(diff, new MotionAreaHighlighting());
            video = new VideoCaptureDevice(vcds[cameras.SelectedIndex].MonikerString);
            video.NewFrame += new NewFrameEventHandler(Video_NewFrame);
            video.Start();

            trainingPic.Image = Properties.Resources.ShowPalmFull;
            trainingPic.Visible = true;
            instructions.Visible = true;
            
        }
        private void buApplySettings_Click(object sender, EventArgs e)
        {
            AForge.Vision.Motion.IMotionDetector   detector  = null;
            AForge.Vision.Motion.IMotionProcessing processor = null;


            detector = new AForge.Vision.Motion.SimpleBackgroundModelingDetector(cbSuppressNoise.Checked)
            {
                DifferenceThreshold             = (int)numDifferenceThreshold.Value,
                FramesPerBackgroundUpdate       = (int)numPerBackgroundUpdate.Value,
                KeepObjectsEdges                = true,
                MillisecondsPerBackgroundUpdate = (int)numMsPerBackgroupUpdate.Value
            };


            processor = new AForge.Vision.Motion.BlobCountingObjectsProcessing()
            {
                HighlightColor         = System.Drawing.Color.Red,
                HighlightMotionRegions = true,
                MinObjectsHeight       = 20,
                MinObjectsWidth        = 20
            };

            var x = new AForge.Vision.Motion.MotionDetector(detector, processor);

            motionDetector = x;
            mouseClickTime = (int)numericUpDown1.Value;
        }
Beispiel #3
0
 private void MotionDetection_Load(object sender, EventArgs e)
 {
     getDevices();
     detector = new AForge.Vision.Motion.MotionDetector(new TwoFramesDifferenceDetector(), new MotionBorderHighlighting());
     Detect();
     Speaker.Speak("detecção de movimento iniciada!");
 }
Beispiel #4
0
 public MotionCCTV()
 {
     
     InitializeComponent();
     isResolutionSet = false;
     webcam=new FilterInfoCollection(FilterCategory.VideoInputDevice);
     cam = new VideoCaptureDevice(webcam[0].MonikerString);
     md = new MotionDetector(new TwoFramesDifferenceDetector(), new MotionAreaHighlighting()); // creates the motion detector
     cam.NewFrame += new NewFrameEventHandler(cam_NewFrame); // defines which method to call when a new frame arrives
     cam.Start(); // starts the videoCapture
     Display.Paint += Display_Paint;
 }
        public static AForge.Vision.Motion.MotionDetector GetDefaultMotionDetector()
        {
            AForge.Vision.Motion.IMotionDetector   detector       = null;
            AForge.Vision.Motion.IMotionProcessing processor      = null;
            AForge.Vision.Motion.MotionDetector    motionDetector = null;

            //detector = new AForge.Vision.Motion.TwoFramesDifferenceDetector()
            //{
            //  DifferenceThreshold = 15,
            //  SuppressNoise = true
            //};

            //detector = new AForge.Vision.Motion.CustomFrameDifferenceDetector()
            //{
            //    DifferenceThreshold = 15,
            //    KeepObjectsEdges = true,
            //    SuppressNoise = true
            //};

            // This is currently the best one.
            detector = new AForge.Vision.Motion.SimpleBackgroundModelingDetector()
            {
                DifferenceThreshold             = 15,
                FramesPerBackgroundUpdate       = 5,
                KeepObjectsEdges                = true,
                MillisecondsPerBackgroundUpdate = 5,
                SuppressNoise = true
            };

            //processor = new AForge.Vision.Motion.GridMotionAreaProcessing()
            //{
            //  HighlightColor = System.Drawing.Color.Red,
            //  HighlightMotionGrid = true,
            //  GridWidth = 100,
            //  GridHeight = 100,
            //  MotionAmountToHighlight = 100F
            //};

            processor = new AForge.Vision.Motion.BlobCountingObjectsProcessing()
            {
                HighlightColor         = System.Drawing.Color.Red,
                HighlightMotionRegions = true,
                MinObjectsHeight       = 20,
                MinObjectsWidth        = 20
            };

            motionDetector = new AForge.Vision.Motion.MotionDetector(detector, processor);

            return(motionDetector);
        }
Beispiel #6
0
        public CameraMonitor(PictureBox display,string monikerString,String  cameraName)
        {
            this.cameraName = cameraName;
            this.display = display;
            this.display.Paint += new PaintEventHandler(DrawMessage);

            md = new MotionDetector(new TwoFramesDifferenceDetector(), new MotionAreaHighlighting()); // creates the motion detector

            cam = new VideoCaptureDevice(monikerString);
            cam.NewFrame += new NewFrameEventHandler(cam_NewFrame); // defines which method to call when a new frame arrives

            // cam.Start();
            IsRunning = false;
        }
        public MainWindow()
        {
            InitializeComponent();

            _engine = GameboyEngine.Instance;

            _processor = new BlobCountingObjectsProcessing(MinimumMovingObjectSize, MinimumMovingObjectSize, Color.Black);
            _detector = new MotionDetector(new TwoFramesDifferenceDetector(), _processor);

            _fields = new ButtonBase[MotionGrid.ColumnDefinitions.Count, MotionGrid.RowDefinitions.Count];

            RandomizeFields();

            Loaded += MainWindow_Loaded;
        }
Beispiel #8
0
 public YAMDDetector(IVideoSource source, Magnitude low, Magnitude medium, Magnitude high)
 {
     detector = new MotionDetector(
         new SimpleBackgroundModelingDetector(),
         new BlobCountingObjectsProcessing(true));
     //async video source processes images in a separate thread and uses the NewFrame event
     inputStream = new AsyncVideoSource(source);
     inputStream.NewFrame += inputStream_NewFrame;
     this.low = low;
     this.medium = medium;
     this.high = high;
     timer = new Stopwatch();
     stoptimer = new Stopwatch();
     videoRecorder = new VideoFileWriter();
     Running = false;
     buffer = new FixedSizeQueue<Bitmap>();
     buffer.Limit = 50;
     magnitudes = new Queue<int>();
 }
Beispiel #9
0
 public Camera(IVideoSource source, MotionDetector detector)
 {
     VideoSource = source;
     _motionDetector = detector;
     VideoSource.NewFrame += VideoNewFrame;
 }
Beispiel #10
0
        // 异常监测功能函数    
        private void detection()
        {
            cameraToEdit = multiplexer.LastClicked.Camera;
            if (cameraToEdit == null && jiance == false)
            {
                MessageBox.Show("你当前没有选中摄像头!");
                return;
            }
            removeform();

            if (jiance == false) 
            {  
                // 开始监测
                cameraToEdit = multiplexer.LastClicked.Camera;
                cameraDetecting = cameraToEdit;                 // 存放监测的摄像头
                tsb异常监测.Image = Properties.Resources.camera7;
                detector = new MotionDetector(new TwoFramesDifferenceDetector());
                jiance = true;
                tsb异常监测.Text = " 停止监测 ";
                tsmi异常监测.Text = "停止监测";
            }
            else
            {  
                // 关闭监测
                detector = null;
                tsb异常监测.Image = Properties.Resources.camera5;
                jiance = false;
                tsb异常监测.Text = " 异常监测 ";
                tsmi异常监测.Text = "异常监测";

                if (luzhi1 == true)
                {
                    CountVedio = 0;
                    cameraToEdit = cameraDetecting;
                    RecordClose();
                    luzhi1 = false;
                    timer2.Enabled = true;
                    MainForm.Instance().ShowForm();
                }
                //  timer2.Enabled = true;
                // MainForm.Instance().ShowForm();
            }
        }
        public LiveViewViewModel(ICameraDevice device)
        {
            CameraDevice = device;
            CameraProperty = device.LoadProperties();
            SimpleManualFocus = CameraDevice.GetCapability(CapabilityEnum.SimpleManualFocus);
            Title = TranslationStrings.LiveViewWindowTitle + " - " + CameraProperty.DeviceName;
            InitOverlay();
            InitCommands();
            if (ServiceProvider.Settings.DetectionType == 0)
            {
                _detector = new MotionDetector(
                    new TwoFramesDifferenceDetector(true),
                    new BlobCountingObjectsProcessing(
                        ServiceProvider.Settings.MotionBlockSize,
                        ServiceProvider.Settings.MotionBlockSize, true));
            }
            else
            {
                _detector = new MotionDetector(
                    new SimpleBackgroundModelingDetector(true, true),
                    new BlobCountingObjectsProcessing(
                        ServiceProvider.Settings.MotionBlockSize,
                        ServiceProvider.Settings.MotionBlockSize, true));
            }

            TriggerOnMotion = false;
            ShowHistogram = true;
            Init();
            ServiceProvider.WindowsManager.Event += WindowsManagerEvent;
        }
Beispiel #12
0
 public Camera(IVideoSource source)
 {
     VideoSource = source;
     _motionDetector = null;
     VideoSource.NewFrame += VideoNewFrame;
 }
        //(VideoCaptureDevice source)
        // Open video source
        private void OpenVideoSource(VideoCaptureDevice source, AForge.Video.MJPEGStream ipStream, Boolean ip)
        {
            MotionDetector detector = new MotionDetector(new SimpleBackgroundModelingDetector());

            string camSource;

            // create camera
            Camera camera;

            if (!ip)
            {

                camSource = source.Source;
                camera = new Camera(source, detector, camSource);

            }
            else
            {

                camSource = ipStream.Source;
                camera = new Camera(ipStream, detector, camSource);

            }

            camera.motionLevelEvent -= new motionLevelEventHandler(bubble.motionEvent);
            camera.motionLevelEvent += new motionLevelEventHandler(bubble.motionEvent);

            // start camera
            camera.Start();

            rigItem rig_item = new rigItem();
            rig_item.cameraName = camSource;//source.Source;
            rig_item.cam = camera;
            rig_item.cam.cam = CameraRig.cameraCount();
            //CameraRig.addCamera(rig_item);
            CameraRig.rig.Add(rig_item);
            int curCam = CameraRig.cameraCount() - 1;
            CameraRig.activeCam = curCam;

            config.getProfile(bubble.profileInUse).webcam = camSource;

            //populate or update rig info
            CameraRig.rigInfoPopulate(config.getProfile(bubble.profileInUse).profileName, curCam);

            CameraRig.rig[curCam].cam.cam = curCam;

            //get desired button or first available button
            int desiredButton = CameraRig.rig[curCam].displayButton;
            //check if the desired button is free and return the next free button if one is available
            int camButton = camButtons.availForClick(desiredButton, true);
            bool freeCamsExist = camButton != 999;

            //if a free camera button exists assign the camera
            if (freeCamsExist)
            {
                CameraRig.rig[curCam].displayButton = camButton;
            }

            //update info for camera
            CameraRig.updateInfo(bubble.profileInUse, config.getProfile(bubble.profileInUse).webcam, CameraRig.infoEnum.displayButton, camButton);

            if (config.getProfile(bubble.profileInUse).selectedCam == "")
            {
                cameraSwitch(CameraRig.rig[curCam].displayButton, false, false);
            }
            else
            {
                if (config.getProfile(bubble.profileInUse).selectedCam == camSource)
                {
                    cameraSwitch(CameraRig.rig[curCam].displayButton, false, false);
                }
            }

            camButtonSetColours();

            if (CameraRig.rig[curCam].cam.alarmActive)
            {

                if (CameraRig.rig[curCam].displayButton == 1) selcam(this.bttncam1sel, 1);
                if (CameraRig.rig[curCam].displayButton == 2) selcam(this.bttncam2sel, 2);
                if (CameraRig.rig[curCam].displayButton == 3) selcam(this.bttncam3sel, 3);
                if (CameraRig.rig[curCam].displayButton == 4) selcam(this.bttncam4sel, 4);
                if (CameraRig.rig[curCam].displayButton == 5) selcam(this.bttncam5sel, 5);
                if (CameraRig.rig[curCam].displayButton == 6) selcam(this.bttncam6sel, 6);
                if (CameraRig.rig[curCam].displayButton == 7) selcam(this.bttncam7sel, 7);
                if (CameraRig.rig[curCam].displayButton == 8) selcam(this.bttncam8sel, 8);
                if (CameraRig.rig[curCam].displayButton == 9) selcam(this.bttncam9sel, 9);

            }

            if (CameraRig.rig[curCam].cam.publishActive)
            {
                if (CameraRig.rig[curCam].displayButton == 1) pubcam(this.bttncam1pub, 1);
                if (CameraRig.rig[curCam].displayButton == 2) pubcam(this.bttncam2pub, 2);
                if (CameraRig.rig[curCam].displayButton == 3) pubcam(this.bttncam3pub, 3);
                if (CameraRig.rig[curCam].displayButton == 4) pubcam(this.bttncam4pub, 4);
                if (CameraRig.rig[curCam].displayButton == 5) pubcam(this.bttncam5pub, 5);
                if (CameraRig.rig[curCam].displayButton == 6) pubcam(this.bttncam6pub, 6);
                if (CameraRig.rig[curCam].displayButton == 7) pubcam(this.bttncam7pub, 7);
                if (CameraRig.rig[curCam].displayButton == 8) pubcam(this.bttncam8pub, 8);
                if (CameraRig.rig[curCam].displayButton == 9) pubcam(this.bttncam9pub, 9);

            }

            CameraRig.alert(bubble.Alert.on);
            CameraRig.rig[curCam].cam.exposeArea = bubble.exposeArea;

            CameraRig.rig[curCam].cam.motionAlarm -= new alarmEventHandler(bubble.camera_Alarm);
            CameraRig.rig[curCam].cam.motionAlarm += new alarmEventHandler(bubble.camera_Alarm);

            bubble.webcamAttached = true;

            button23.SynchronisedInvoke(() => button23.Enabled = CameraRig.camerasAttached());
            //SetButtonEnabled(button23, CameraRig.camerasAttached());
        }
        private void Start()
        {
            CloseVideoSource();
            ButtonImage = Application.Current.Resources["StopImage"] as BitmapImage;
            // le détecteur de mouvement
            _motionDetector = new MotionDetector(
                new TwoFramesDifferenceDetector
                {
                    DifferenceThreshold = 15,
                    SuppressNoise = true
                },
                new BlobCountingObjectsProcessing
                {
                    HighlightColor = Color.Red,
                    HighlightMotionRegions = true,
                    MinObjectsHeight = 10,
                    MinObjectsWidth = 10
                });

            _videoSource = new VideoCaptureDevice(SelectedDevice.MonikerString);
            _videoSource.NewFrame += OnNewFrameReceived;
            _videoSource.Start();
            Information = "Capture démarrée";
            _timer.IsEnabled = true;
        }
Beispiel #15
0
 public Camera(VideoCaptureDevice source, MotionDetector detector, string _name)
 {
     ipCamera = false;
     cameraName = _name;
     pubFrame = null;
     this.videoSource = source;
     this.motionDetecotor = detector;
     videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame);
 }
Beispiel #16
0
        private void MainForm_Load(object sender, EventArgs e)
        {
            Version version = System.Reflection.Assembly.GetExecutingAssembly().GetName().Version;

            Text = Text + " " + version.Major + "." + version.Minor + " (build " + version.Build + ")"; //change form title

            //todo: make this user selectable.
            SetWindowPos(this.Handle, HWND_TOPMOST, 0, 0, 0, 0, TOPMOST_FLAGS); // Set form as top form.  Always on top.
            AForge.Vision.Motion.MotionDetector motionDetector = null;
            motionDetector          = GetDefaultMotionDetector();
            globalTimer.Elapsed    += GlobalTimer_Elapsed;
            lureTimer.Elapsed      += LureTimer_Elapsed;
            baitTimer.Elapsed      += BaitTimer_Elapsed;
            screenUpdateTimer.Tick += ScreenUpdateTimer_Tick;

            screenStateLogger.ScreenRefreshed += (sender1, data) =>
            {
                //New frame in data
                System.Drawing.Image i = System.Drawing.Image.FromStream(new MemoryStream(data));
                origScreenHeight = i.Height;
                origScreenWidth  = i.Width;
                // todo:  The percentage of scan area should be able to be set on the form
                // To help in scan speed and to eliminate some of the external noise, only scan the center of the screen.

                int       left    = (int)(i.Width * 0.40);
                int       right   = (int)(i.Width * 0.60) - left;
                int       top     = (int)(i.Height * 0.50);
                int       bottom  = (int)(i.Height * 0.70) - top;
                Rectangle srcRect = new Rectangle(left, top, right, bottom);
                Bitmap    cropped = ((Bitmap)i).Clone(srcRect, i.PixelFormat);


                // depending on the area of the game it is sometimes best to remove some colors.

                if (rbGreyScaleFilter.Checked)
                {
                    cropped = Grayscale.CommonAlgorithms.BT709.Apply(cropped);
                }


                if (rbRedFilter.Checked)
                {
                    ExtractChannel extractFilter = new ExtractChannel(RGB.R);
                    cropped = extractFilter.Apply(cropped);
                }

                if (rbBlueFilter.Checked)
                {
                    ExtractChannel extractFilter = new ExtractChannel(RGB.B);
                    cropped = extractFilter.Apply(cropped);
                }

                if (rbGreenFilter.Checked)
                {
                    ExtractChannel extractFilter = new ExtractChannel(RGB.G);
                    cropped = extractFilter.Apply(cropped);
                }


                detectMovement((Bitmap)cropped);
                pbViewPane.Image = (Bitmap)cropped;
            };
            screenStateLogger.Start();
        }
Beispiel #17
0
 public Camera(MJPEGStream source, MotionDetector detector, string _name)
 {
     ipCamera = true;
     cameraName = _name;
     pubFrame = null;
     this.videoSource = source;
     this.motionDetecotor = detector;
     videoSource.NewFrame += new NewFrameEventHandler(video_NewFrame);
 }
Beispiel #18
0
        private void Form1_Load(object sender, EventArgs e)
        {
            // TODO get cams from DB
            pb = new PictureBox[2];
            cams = new Edge.IPCameras.IPCamera[2];
            streams = new MJPEGStream[2];
            detectors = new MotionDetector[2];

            // TODO determine width/height based on number of cams
            int width = 480, height = 360;

            // Reset filters with determined width/height
            filter = new ResizeNearestNeighbor(width, height);

            // Add picturebox for each cam
            pb[0] = new PictureBox() { Width = width, Height = height, ImageLocation = "Images/testbeeld.png" };
            flowLayoutPanel1.Controls.Add(pb[0]);
            pb[1] = new PictureBox() { Width = width, Height = height, ImageLocation = "Images/testbeeld.png" };
            flowLayoutPanel1.Controls.Add(pb[1]);

            // Add cams to array
            cams[0] = new Edge.IPCameras.Foscam()
            {
                Description = "Woonkamer",
                Host = "10.30.59.81",
                Port = 8081,
                Username = "******",
                Password = "******",
            };
            cams[1] = new Edge.IPCameras.Foscam()
            {
                Description = "Buiten",
                Host = "10.30.59.82",
                Port = 8082,
                Username = "******",
                Password = "******",
            };

            // Setup motion detection
            for (int i = 0; i < cams.Count(); i++)
            {
                detectors[i] = new MotionDetector(
                                   new SimpleBackgroundModelingDetector(),
                                   new BlobCountingObjectsProcessing());
            }

            // Start streaming
            for (int i = 0; i < cams.Count(); i++)
            {
                streams[i] = new MJPEGStream(cams[i].MJPEGStreamURL);
                streams[i].NewFrame += new NewFrameEventHandler(Form1_NewFrame);
                streams[i].Start();
            }
        }
        //Загрузка формы
        private void ClientSettingsForm_Load(object sender, EventArgs e)
        {
            videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice);
            if (videoDevices.Count == 0)
            {
                MessageBox.Show("Камера не найдена!");
                isAppExit = true;
                Application.Exit();
            }

            videoCamera = new VideoCaptureDevice(videoDevices[0].MonikerString);
            detector = new MotionDetector(new SimpleBackgroundModelingDetector(),
                new MotionAreaHighlighting());

            if (this.videoCamera.IsRunning)
                this.videoCamera.Stop();

            videoCamera.NewFrame += delegate(object send, AForge.Video.NewFrameEventArgs eventArgs)
            {
                //Отлавливаем движение
                if (detector.ProcessFrame(eventArgs.Frame) > 0.02)
                {
                    if (client == null)
                        return;

                    client.SignalAlarmCmd();
                }
            };

            ClientSettingsFormExtracted();
        }
 public LiveViewViewModel(ICameraDevice device)
 {
     CameraDevice = device;
     CameraProperty = device.LoadProperties();
     SimpleManualFocus = CameraDevice.GetCapability(CapabilityEnum.SimpleManualFocus);
     InitOverlay();
     InitCommands();
     if (ServiceProvider.Settings.DetectionType == 0)
     {
         _detector = new MotionDetector(
             new TwoFramesDifferenceDetector(true),
             new BlobCountingObjectsProcessing(
                 ServiceProvider.Settings.MotionBlockSize,
                 ServiceProvider.Settings.MotionBlockSize, true));
     }
     else
     {
         _detector = new MotionDetector(
             new SimpleBackgroundModelingDetector(true, true),
             new BlobCountingObjectsProcessing(
                 ServiceProvider.Settings.MotionBlockSize,
                 ServiceProvider.Settings.MotionBlockSize, true));
     }
     TriggerOnMotion = false;
     Init();
 }
        public void createMotionDetector()
        {
            switch (detectionSensitivity)
            {
                case DETECTIONSENSITIVITY.LOW:
                {
                    sensitivityFactor = 0.15;
                    break;
                }
                case DETECTIONSENSITIVITY.MEDIUM:
                {
                    sensitivityFactor = 0.1;
                    break;
                }
                case DETECTIONSENSITIVITY.HIGH:
                {
                    sensitivityFactor = 0.01;
                    break;
                }
            }

            switch (detectionMethod)
            {
                case MOTION.MOTION_AREA_HIGHLIGHTING:
                {
                    motionProcessor = new MotionAreaHighlighting();
                    break;
                }
                case MOTION.MOTION_GRID_AREA_HIGHLIGHTING:
                {
                    motionProcessor = new GridMotionAreaProcessing(5, 5);
                    break;
                }
                case MOTION.MOTION_BORDER_HIGHLIGHTING:
                {
                    motionProcessor = new MotionBorderHighlighting();
                    break;
                }
                case MOTION.MOTION_BLOB_COUNTER:
                        {
                            motionProcessor = new BlobCountingObjectsProcessing();
                            break;
                        }
            }

            switch (detecionSpeed)
            {
                case DETECTIONSPEED.FAST:
                {
                    skipFrame = 3;
                    break;
                }
                case DETECTIONSPEED.MEDIUM:
                {
                    skipFrame = 2;
                    break;
                }
                case DETECTIONSPEED.SLOW:
                {
                    skipFrame = 1;
                    break;
                }
            }

            if (detectionType == DETECTIONTYPE.TWO_FRAMES_DIFFERENCE)
                detector = new TwoFramesDifferenceDetector(true);
            else
                detector = new SimpleBackgroundModelingDetector(true, true);

            motionDetector = new MotionDetector(detector, motionProcessor);

            motionDetector.MotionZones = detectionArea;
        }
Beispiel #22
-1
        static void Main()
        {
            _path =  Path.GetTempPath();
            Console.WriteLine("Motion Detector");
            Console.WriteLine("Detects motion in the integrated laptop webcam");
            Console.WriteLine("Threshold level: " + _motionAlarmLevel);
            _motionDetector = new MotionDetector(new TwoFramesDifferenceDetector(), new MotionAreaHighlighting());
            if (new FilterInfoCollection(FilterCategory.VideoInputDevice).Count > 0)
            {
                _path += "motions";

                if (!Directory.Exists(_path))
                {
                    Directory.CreateDirectory(_path);
                }
                else
                {
                    var dir = new DirectoryInfo(_path);
                    foreach (var fi in dir.GetFiles())
                    {
                        fi.Delete();
                    }
                }

                var videoDevice = new FilterInfoCollection(FilterCategory.VideoInputDevice)[0];
                var videoCaptureDevice = new VideoCaptureDevice(videoDevice.MonikerString);
                var videoSourcePlayer = new AForge.Controls.VideoSourcePlayer();
                videoSourcePlayer.NewFrame += VideoSourcePlayer_NewFrame;
                videoSourcePlayer.VideoSource = new AsyncVideoSource(videoCaptureDevice);
                videoSourcePlayer.Start();
            }
        }