Motion processing algorithm, which counts separate moving objects and highlights them.

The aim of this motion processing algorithm is to count separate objects in the motion frame, which is provided by motion detection algorithm. In the case if HighlightMotionRegions property is set to , found objects are also highlighted on the original video frame. The algorithm counts and highlights only those objects, which size satisfies MinObjectsWidth and MinObjectsHeight properties.

The motion processing algorithm is supposed to be used only with motion detection algorithms, which are based on finding difference with background frame (see SimpleBackgroundModelingDetector and CustomFrameDifferenceDetector as simple implementations) and allow extract moving objects clearly.

Sample usage:

// create instance of motion detection algorithm IMotionDetector motionDetector = new ... ; // create instance of motion processing algorithm BlobCountingObjectsProcessing motionProcessing = new BlobCountingObjectsProcessing( ); // create motion detector MotionDetector detector = new MotionDetector( motionDetector, motionProcessing ); // continuously feed video frames to motion detector while ( ... ) { // process new video frame and check motion level if ( detector.ProcessFrame( videoFrame ) > 0.02 ) { // check number of detected objects if ( motionProcessing.ObjectsCount > 1 ) { // ... } } }
상속: IMotionProcessing
예제 #1
0
        private void buApplySettings_Click(object sender, EventArgs e)
        {
            AForge.Vision.Motion.IMotionDetector   detector  = null;
            AForge.Vision.Motion.IMotionProcessing processor = null;


            detector = new AForge.Vision.Motion.SimpleBackgroundModelingDetector(cbSuppressNoise.Checked)
            {
                DifferenceThreshold             = (int)numDifferenceThreshold.Value,
                FramesPerBackgroundUpdate       = (int)numPerBackgroundUpdate.Value,
                KeepObjectsEdges                = true,
                MillisecondsPerBackgroundUpdate = (int)numMsPerBackgroupUpdate.Value
            };


            processor = new AForge.Vision.Motion.BlobCountingObjectsProcessing()
            {
                HighlightColor         = System.Drawing.Color.Red,
                HighlightMotionRegions = true,
                MinObjectsHeight       = 20,
                MinObjectsWidth        = 20
            };

            var x = new AForge.Vision.Motion.MotionDetector(detector, processor);

            motionDetector = x;
            mouseClickTime = (int)numericUpDown1.Value;
        }
예제 #2
0
        public static AForge.Vision.Motion.MotionDetector GetDefaultMotionDetector()
        {
            AForge.Vision.Motion.IMotionDetector   detector       = null;
            AForge.Vision.Motion.IMotionProcessing processor      = null;
            AForge.Vision.Motion.MotionDetector    motionDetector = null;

            //detector = new AForge.Vision.Motion.TwoFramesDifferenceDetector()
            //{
            //  DifferenceThreshold = 15,
            //  SuppressNoise = true
            //};

            //detector = new AForge.Vision.Motion.CustomFrameDifferenceDetector()
            //{
            //    DifferenceThreshold = 15,
            //    KeepObjectsEdges = true,
            //    SuppressNoise = true
            //};

            // This is currently the best one.
            detector = new AForge.Vision.Motion.SimpleBackgroundModelingDetector()
            {
                DifferenceThreshold             = 15,
                FramesPerBackgroundUpdate       = 5,
                KeepObjectsEdges                = true,
                MillisecondsPerBackgroundUpdate = 5,
                SuppressNoise = true
            };

            //processor = new AForge.Vision.Motion.GridMotionAreaProcessing()
            //{
            //  HighlightColor = System.Drawing.Color.Red,
            //  HighlightMotionGrid = true,
            //  GridWidth = 100,
            //  GridHeight = 100,
            //  MotionAmountToHighlight = 100F
            //};

            processor = new AForge.Vision.Motion.BlobCountingObjectsProcessing()
            {
                HighlightColor         = System.Drawing.Color.Red,
                HighlightMotionRegions = true,
                MinObjectsHeight       = 20,
                MinObjectsWidth        = 20
            };

            motionDetector = new AForge.Vision.Motion.MotionDetector(detector, processor);

            return(motionDetector);
        }
예제 #3
0
        public MainWindow()
        {
            InitializeComponent();

            _engine = GameboyEngine.Instance;

            _processor = new BlobCountingObjectsProcessing(MinimumMovingObjectSize, MinimumMovingObjectSize, Color.Black);
            _detector = new MotionDetector(new TwoFramesDifferenceDetector(), _processor);

            _fields = new ButtonBase[MotionGrid.ColumnDefinitions.Count, MotionGrid.RowDefinitions.Count];

            RandomizeFields();

            Loaded += MainWindow_Loaded;
        }
예제 #4
0
파일: CameraWindow.cs 프로젝트: vmail/main
        public void Enable()
        {
            if (IsEnabled)
                return;
            if (InvokeRequired)
            {
                Invoke(new SwitchDelegate(Enable));
                return;
            }

            _processing = true;
            if (Camera != null && Camera.IsRunning)
            {
                Disable();
            }

            IsEnabled = true;
            string ckies;
            switch (Camobject.settings.sourceindex)
            {
                case 0:
                    ckies = Camobject.settings.cookies ?? "";
                    ckies = ckies.Replace("[USERNAME]", Camobject.settings.login);
                    ckies = ckies.Replace("[PASSWORD]", Camobject.settings.password);
                    ckies = ckies.Replace("[CHANNEL]", Camobject.settings.ptzchannel);
                    var jpegSource = new JPEGStream2(Camobject.settings.videosourcestring)
                                         {
                                             Login = Camobject.settings.login,
                                             Password = Camobject.settings.password,
                                             ForceBasicAuthentication = Camobject.settings.forcebasic,
                                             RequestTimeout = MainForm.Conf.IPCameraTimeout,
                                             UseHTTP10 = Camobject.settings.usehttp10,
                                             HttpUserAgent = Camobject.settings.useragent,
                                             Cookies = ckies
                                         };

                    OpenVideoSource(jpegSource, true);

                    if (Camobject.settings.frameinterval != 0)
                        jpegSource.FrameInterval = Camobject.settings.frameinterval;

                    break;
                case 1:
                    ckies = Camobject.settings.cookies ?? "";
                    ckies = ckies.Replace("[USERNAME]", Camobject.settings.login);
                    ckies = ckies.Replace("[PASSWORD]", Camobject.settings.password);
                    ckies = ckies.Replace("[CHANNEL]", Camobject.settings.ptzchannel);

                    var mjpegSource = new MJPEGStream2(Camobject.settings.videosourcestring)
                                            {
                                                Login = Camobject.settings.login,
                                                Password = Camobject.settings.password,
                                                ForceBasicAuthentication = Camobject.settings.forcebasic,
                                                RequestTimeout = MainForm.Conf.IPCameraTimeout,
                                                HttpUserAgent = Camobject.settings.useragent,
                                                DecodeKey = Camobject.decodekey,
                                                Cookies = ckies
                                            };
                    OpenVideoSource(mjpegSource, true);
                    break;
                case 2:
                    string url = Camobject.settings.videosourcestring;
                    var ffmpegSource = new FFMPEGStream(url);
                    OpenVideoSource(ffmpegSource, true);
                    break;
                case 3:
                    string moniker = Camobject.settings.videosourcestring;

                    var videoSource = new VideoCaptureDevice(moniker);
                    string[] wh = Camobject.resolution.Split('x');
                    var sz = new Size(Convert.ToInt32(wh[0]), Convert.ToInt32(wh[1]));
                    var vc = videoSource.VideoCapabilities.Where(p => p.FrameSize == sz).ToList();
                    if (vc.Count>0)
                    {
                        var vc2 = vc.FirstOrDefault(p => p.AverageFrameRate == Camobject.settings.framerate) ??
                                  vc.FirstOrDefault();
                        videoSource.VideoResolution = vc2;
                    }

                    if (Camobject.settings.crossbarindex!=-1 && videoSource.CheckIfCrossbarAvailable())
                    {
                        var cbi =
                            videoSource.AvailableCrossbarVideoInputs.FirstOrDefault(
                                p => p.Index == Camobject.settings.crossbarindex);
                        if (cbi!=null)
                        {
                            videoSource.CrossbarVideoInput = cbi;
                        }
                    }

                    OpenVideoSource(videoSource, true);

                    break;
                case 4:
                    Rectangle area = Rectangle.Empty;
                    if (!String.IsNullOrEmpty(Camobject.settings.desktoparea))
                    {
                        var i = Array.ConvertAll(Camobject.settings.desktoparea.Split(','), int.Parse);
                        area = new Rectangle(i[0],i[1],i[2],i[3]);
                    }
                    var desktopSource = new DesktopStream(Convert.ToInt32(Camobject.settings.videosourcestring), area)
                                            {MousePointer = Camobject.settings.desktopmouse};
                    if (Camobject.settings.frameinterval != 0)
                        desktopSource.FrameInterval = Camobject.settings.frameinterval;
                    OpenVideoSource(desktopSource, true);

                    break;
                case 5:
                    List<String> inargs = Camobject.settings.vlcargs.Split(Environment.NewLine.ToCharArray(),
                                                                           StringSplitOptions.RemoveEmptyEntries).ToList();
                    var vlcSource = new VlcStream(Camobject.settings.videosourcestring, inargs.ToArray());
                    OpenVideoSource(vlcSource, true);
                    break;
                case 6:
                    if (XimeaSource == null || !XimeaSource.IsRunning)
                        XimeaSource = new XimeaVideoSource(Convert.ToInt32(Nv("device")));
                    OpenVideoSource(XimeaSource, true);
                    break;
                case 7:
                    var tw = false;
                    if (!String.IsNullOrEmpty(Nv("TripWires")))
                        tw = Convert.ToBoolean(Nv("TripWires"));
                    var ks = new KinectStream(Nv("UniqueKinectId"), Convert.ToBoolean(Nv("KinectSkeleton")), tw);
                    OpenVideoSource(ks, true);
                    break;
                case 8:
                    switch (Nv("custom"))
                    {
                        case "Network Kinect":
                            OpenVideoSource(new KinectNetworkStream(Camobject.settings.videosourcestring), true);
                            break;
                        default:
                            throw new Exception("No custom provider found for "+Nv("custom"));
                    }
                    break;
            }

            if (Camera != null)
            {
                Camera.LastFrameNull = true;

                IMotionDetector motionDetector = null;
                IMotionProcessing motionProcessor = null;

                switch (Camobject.detector.type)
                {
                    case "Two Frames":
                        motionDetector = new TwoFramesDifferenceDetector(Camobject.settings.suppressnoise);
                        break;
                    case "Custom Frame":
                        motionDetector = new CustomFrameDifferenceDetector(Camobject.settings.suppressnoise,
                                                                            Camobject.detector.keepobjectedges);
                        break;
                    case "Background Modelling":
                        motionDetector = new SimpleBackgroundModelingDetector(Camobject.settings.suppressnoise,
                                                                                Camobject.detector.keepobjectedges);
                        break;
                    case "Two Frames (Color)":
                        motionDetector = new TwoFramesColorDifferenceDetector(Camobject.settings.suppressnoise);
                        break;
                    case "Custom Frame (Color)":
                        motionDetector = new CustomFrameColorDifferenceDetector(Camobject.settings.suppressnoise,
                                                                            Camobject.detector.keepobjectedges);
                        break;
                    case "Background Modelling (Color)":
                        motionDetector = new SimpleColorBackgroundModelingDetector(Camobject.settings.suppressnoise,
                                                                                Camobject.detector.keepobjectedges);
                        break;
                    case "None":
                        break;
                }

                if (motionDetector != null)
                {
                    switch (Camobject.detector.postprocessor)
                    {
                        case "Grid Processing":
                            motionProcessor = new GridMotionAreaProcessing
                                                    {
                                                        HighlightColor =
                                                            ColorTranslator.FromHtml(Camobject.detector.color),
                                                        HighlightMotionGrid = Camobject.detector.highlight
                                                    };
                            break;
                        case "Object Tracking":
                            motionProcessor = new BlobCountingObjectsProcessing
                                                    {
                                                        HighlightColor =
                                                            ColorTranslator.FromHtml(Camobject.detector.color),
                                                        HighlightMotionRegions = Camobject.detector.highlight,
                                                        MinObjectsHeight = Camobject.detector.minheight,
                                                        MinObjectsWidth = Camobject.detector.minwidth
                                                    };

                            break;
                        case "Border Highlighting":
                            motionProcessor = new MotionBorderHighlighting
                                                    {
                                                        HighlightColor =
                                                            ColorTranslator.FromHtml(Camobject.detector.color)
                                                    };
                            break;
                        case "Area Highlighting":
                            motionProcessor = new MotionAreaHighlighting
                                                    {
                                                        HighlightColor =
                                                            ColorTranslator.FromHtml(Camobject.detector.color)
                                                    };
                            break;
                        case "None":
                            break;
                    }

                    if (Camera.MotionDetector != null)
                    {
                        Camera.MotionDetector.Reset();
                        Camera.MotionDetector = null;
                    }

                    Camera.MotionDetector = motionProcessor == null ? new MotionDetector(motionDetector) : new MotionDetector(motionDetector, motionProcessor);

                    Camera.AlarmLevel = Helper.CalculateTrigger(Camobject.detector.minsensitivity);
                    Camera.AlarmLevelMax = Helper.CalculateTrigger(Camobject.detector.maxsensitivity);
                    NeedMotionZones = true;
                }
                else
                {
                    Camera.MotionDetector = null;
                }

                if (!Camera.IsRunning)
                {
                    Calibrating = true;
                    CalibrateCount = 0;
                    _calibrateTarget = Camobject.detector.calibrationdelay;
                    _lastRun = DateTime.Now.Ticks;
                    Camera.Start();
                }
                if (Camera.VideoSource is XimeaVideoSource)
                {
                    //need to set these after the camera starts
                    try
                    {
                        XimeaSource.SetParam(PRM.IMAGE_DATA_FORMAT, IMG_FORMAT.RGB24);
                    }
                    catch (ApplicationException)
                    {
                        XimeaSource.SetParam(PRM.IMAGE_DATA_FORMAT, IMG_FORMAT.MONO8);
                    }
                    XimeaSource.SetParam(CameraParameter.OffsetX, Convert.ToInt32(Nv("x")));
                    XimeaSource.SetParam(CameraParameter.OffsetY, Convert.ToInt32(Nv("y")));
                    float gain;
                    float.TryParse(Nv("gain"), out gain);
                    XimeaSource.SetParam(CameraParameter.Gain, gain);
                    float exp;
                    float.TryParse(Nv("exposure"), out exp);
                    XimeaSource.SetParam(CameraParameter.Exposure, exp*1000);
                    XimeaSource.SetParam(CameraParameter.Downsampling, Convert.ToInt32(Nv("downsampling")));
                    XimeaSource.SetParam(CameraParameter.Width, Convert.ToInt32(Nv("width")));
                    XimeaSource.SetParam(CameraParameter.Height, Convert.ToInt32(Nv("height")));
                    XimeaSource.FrameInterval =
                        (int) (1000.0f/XimeaSource.GetParamFloat(CameraParameter.FramerateMax));
                }

                Camobject.settings.active = true;

                if (File.Exists(Camobject.settings.maskimage))
                {
                    Camera.Mask = Image.FromFile(Camobject.settings.maskimage);
                }

                UpdateFloorplans(false);
            }
            _recordingTime = 0;
            _timeLapseTotal = _timeLapseFrameCount = 0;
            InactiveRecord = 0;
            MovementDetected = false;
            VideoSourceErrorState = false;
            VideoSourceErrorMessage = "";
            Alerted = false;
            PTZNavigate = false;
            Camobject.ftp.ready = true;
            _lastRun = DateTime.Now.Ticks;
            MainForm.NeedsSync = true;
            ReconnectCount = 0;
            _dtPTZLastCheck = DateTime.Now;
            _movementLastDetected = DateTime.MinValue;
            _firstFrame = true;

            if (_videoBuffer != null)
            {
                _videoBuffer.Clear();
            }

            if (Camera != null)
            {
                Camera.ZFactor = 1;
                Camera.ZPoint = Point.Empty;
            }
            Invalidate();

            if (VolumeControl != null)
                VolumeControl.Enable();
            _processing = false;
        }
예제 #5
0
        public void Enable()
        {
            if (_enabling)
                return;

            if (InvokeRequired)
            {
                Invoke(new Delegates.EnableDelegate(Enable));
                return;
            }

            lock (_lockobject)
            {
                if (IsEnabled)
                    return;
                IsEnabled = true;
            }
            _enabling = true;

            try
            {
            IsReconnect = false;
            Seekable = false;
            IsClone = Camobject.settings.sourceindex == 10;
            VideoSourceErrorState = false;
            VideoSourceErrorMessage = "";

            string ckies, hdrs;
            switch (Camobject.settings.sourceindex)
            {
                case 0:
                    ckies = Camobject.settings.cookies ?? "";
                    ckies = ckies.Replace("[USERNAME]", Camobject.settings.login);
                    ckies = ckies.Replace("[PASSWORD]", Camobject.settings.password);
                    ckies = ckies.Replace("[CHANNEL]", Camobject.settings.ptzchannel);

                    hdrs = Camobject.settings.headers ?? "";
                    hdrs = hdrs.Replace("[USERNAME]", Camobject.settings.login);
                    hdrs = hdrs.Replace("[PASSWORD]", Camobject.settings.password);
                    hdrs = hdrs.Replace("[CHANNEL]", Camobject.settings.ptzchannel);

                    var jpegSource = new JPEGStream2(Camobject.settings.videosourcestring)
                                        {
                                            Login = Camobject.settings.login,
                                            Password = Camobject.settings.password,
                                            ForceBasicAuthentication = Camobject.settings.forcebasic,
                                            RequestTimeout = Camobject.settings.timeout,
                                            UseHTTP10 = Camobject.settings.usehttp10,
                                            HttpUserAgent = Camobject.settings.useragent,
                                            Cookies = ckies,
                                            Headers = hdrs
                                        };

                    OpenVideoSource(jpegSource, true);

                    if (Camobject.settings.frameinterval != 0)
                        jpegSource.FrameInterval = Camobject.settings.frameinterval;

                    break;
                case 1:
                    ckies = Camobject.settings.cookies ?? "";
                    ckies = ckies.Replace("[USERNAME]", Camobject.settings.login);
                    ckies = ckies.Replace("[PASSWORD]", Camobject.settings.password);
                    ckies = ckies.Replace("[CHANNEL]", Camobject.settings.ptzchannel);

                    hdrs = Camobject.settings.headers ?? "";
                    hdrs = hdrs.Replace("[USERNAME]", Camobject.settings.login);
                    hdrs = hdrs.Replace("[PASSWORD]", Camobject.settings.password);
                    hdrs = hdrs.Replace("[CHANNEL]", Camobject.settings.ptzchannel);

                    var mjpegSource = new MJPEGStream2(Camobject.settings.videosourcestring)
                                        {
                                            Login = Camobject.settings.login,
                                            Password = Camobject.settings.password,
                                            ForceBasicAuthentication = Camobject.settings.forcebasic,
                                            RequestTimeout = Camobject.settings.timeout,
                                            HttpUserAgent = Camobject.settings.useragent,
                                            DecodeKey = Camobject.decodekey,
                                            Cookies = ckies,
                                            Headers = hdrs
                                        };
                    OpenVideoSource(mjpegSource, true);
                    break;
                case 2:
                    string url = Camobject.settings.videosourcestring;
                    var ffmpegSource = new FFMPEGStream(url)
                                        {
                                            Cookies = Camobject.settings.cookies,
                                            AnalyzeDuration = Camobject.settings.analyseduration,
                                            Timeout = Camobject.settings.timeout,
                                            UserAgent = Camobject.settings.useragent,
                                            Headers = Camobject.settings.headers,
                                            RTSPMode = Camobject.settings.rtspmode
                                        };
                    OpenVideoSource(ffmpegSource, true);
                    break;
                case 3:
                    string moniker = Camobject.settings.videosourcestring;

                    var videoSource = new VideoCaptureDevice(moniker);
                    string[] wh = Camobject.resolution.Split('x');
                    var sz = new Size(Convert.ToInt32(wh[0]), Convert.ToInt32(wh[1]));

                    string precfg = Nv("video");
                    bool found = false;

                    if (Nv("capturemode") != "snapshots")
                    {
                        VideoCapabilities[] videoCapabilities = videoSource.VideoCapabilities;
                        videoSource.ProvideSnapshots = false;
                        foreach (VideoCapabilities capabilty in videoCapabilities)
                        {

                            string item = string.Format(VideoSource.VideoFormatString, capabilty.FrameSize.Width,
                                Math.Abs(capabilty.FrameSize.Height), capabilty.AverageFrameRate, capabilty.BitCount);
                            if (precfg == item)
                            {
                                videoSource.VideoResolution = capabilty;
                                found = true;
                                break;
                            }
                        }
                    }
                    else
                    {
                        precfg = Nv("snapshots");
                        videoSource.ProvideSnapshots = true;
                        VideoCapabilities[] videoCapabilities = videoSource.SnapshotCapabilities;
                        foreach (VideoCapabilities capabilty in videoCapabilities)
                        {

                            string item = string.Format(VideoSource.SnapshotFormatString, capabilty.FrameSize.Width,
                                Math.Abs(capabilty.FrameSize.Height), capabilty.AverageFrameRate, capabilty.BitCount);
                            if (precfg == item)
                            {
                                videoSource.VideoResolution = capabilty;
                                found = true;
                                break;
                            }
                        }
                    }
                    if (!found)
                    {
                        var vc = videoSource.VideoCapabilities.Where(p => p.FrameSize == sz).ToList();
                        if (vc.Count > 0)
                        {
                            var vc2 = vc.FirstOrDefault(p => p.AverageFrameRate == Camobject.settings.framerate) ??
                                        vc.FirstOrDefault();
                            videoSource.VideoResolution = vc2;
                            found = true;
                        }
                        if (!found)
                        {
                            //first available
                            var vcf = videoSource.VideoCapabilities.FirstOrDefault();
                            if (vcf != null)
                                videoSource.VideoResolution = vcf;
                            //else
                            //{
                            //    dont do this, not having an entry is ok for some video providers
                            //    throw new Exception("Unable to find a video format for the capture device");
                            //}
                        }
                    }

                    if (Camobject.settings.crossbarindex != -1 && videoSource.CheckIfCrossbarAvailable())
                    {
                        var cbi =
                            videoSource.AvailableCrossbarVideoInputs.FirstOrDefault(
                                p => p.Index == Camobject.settings.crossbarindex);
                        if (cbi != null)
                        {
                            videoSource.CrossbarVideoInput = cbi;
                        }
                    }

                    OpenVideoSource(videoSource, true);

                    break;
                case 4:
                    Rectangle area = Rectangle.Empty;
                    if (!String.IsNullOrEmpty(Camobject.settings.desktoparea))
                    {
                        var i = System.Array.ConvertAll(Camobject.settings.desktoparea.Split(','), int.Parse);
                        area = new Rectangle(i[0], i[1], i[2], i[3]);
                    }
                    var desktopSource = new DesktopStream(Convert.ToInt32(Camobject.settings.videosourcestring),
                        area) {MousePointer = Camobject.settings.desktopmouse};
                    if (Camobject.settings.frameinterval != 0)
                        desktopSource.FrameInterval = Camobject.settings.frameinterval;
                    OpenVideoSource(desktopSource, true);

                    break;
                case 5:
                    List<String> inargs = Camobject.settings.vlcargs.Split(Environment.NewLine.ToCharArray(),
                        StringSplitOptions.RemoveEmptyEntries)
                        .
                        ToList();
                    var vlcSource = new VlcStream(Camobject.settings.videosourcestring, inargs.ToArray())
                                    {
                                        TimeOut
                                            =
                                            Camobject
                                            .settings
                                            .timeout
                                    };

                    OpenVideoSource(vlcSource, true);
                    break;
                case 6:
                    if (XimeaSource == null || !XimeaSource.IsRunning)
                        XimeaSource =
                            new XimeaVideoSource(Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "device")));
                    OpenVideoSource(XimeaSource, true);
                    break;
                case 7:
                    var tw = false;
                    try
                    {
                        if (!String.IsNullOrEmpty(Nv(Camobject.settings.namevaluesettings, "TripWires")))
                            tw = Convert.ToBoolean(Nv(Camobject.settings.namevaluesettings, "TripWires"));
                        var ks = new KinectStream(Nv(Camobject.settings.namevaluesettings, "UniqueKinectId"),
                            Convert.ToBoolean(Nv(Camobject.settings.namevaluesettings, "KinectSkeleton")), tw);
                        if (Nv(Camobject.settings.namevaluesettings, "StreamMode") != "")
                            ks.StreamMode = Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "StreamMode"));
                        OpenVideoSource(ks, true);
                    }
                    catch (Exception ex)
                    {
                        if (ErrorHandler != null)
                            ErrorHandler(ex.Message);
                    }
                    break;
                case 8:
                    switch (Nv(Camobject.settings.namevaluesettings, "custom"))
                    {
                        case "Network Kinect":
                            // open the network kinect video stream
                            OpenVideoSource(new KinectNetworkStream(Camobject.settings.videosourcestring), true);
                            break;
                        default:
                            lock (_lockobject)
                            {
                                IsEnabled = false;
                            }
                            throw new Exception("No custom provider found for " +
                                                Nv(Camobject.settings.namevaluesettings, "custom"));
                    }
                    break;
                case 9:
                    //there is no 9, spooky hey?
                    break;
                case 10:
                    int icam;
                    if (Int32.TryParse(Camobject.settings.videosourcestring, out icam))
                    {
                        var cw = MainForm.InstanceReference.GetCameraWindow(icam);
                        if (cw != null)
                        {
                            OpenVideoSource(cw);
                        }

                    }
                    break;
            }

            if (Camera != null)
            {
                IMotionDetector motionDetector = null;
                IMotionProcessing motionProcessor = null;

                switch (Camobject.detector.type)
                {
                    default:
                        motionDetector = new TwoFramesDifferenceDetector(Camobject.settings.suppressnoise);
                        break;
                    case "Custom Frame":
                        motionDetector = new CustomFrameDifferenceDetector(Camobject.settings.suppressnoise,
                            Camobject.detector.keepobjectedges);
                        break;
                    case "Background Modeling":
                        motionDetector = new SimpleBackgroundModelingDetector(Camobject.settings.suppressnoise,
                            Camobject.detector.keepobjectedges);
                        break;
                    case "Two Frames (Color)":
                        motionDetector = new TwoFramesColorDifferenceDetector(Camobject.settings.suppressnoise);
                        break;
                    case "Custom Frame (Color)":
                        motionDetector = new CustomFrameColorDifferenceDetector(
                            Camobject.settings.suppressnoise,
                            Camobject.detector.keepobjectedges);
                        break;
                    case "Background Modeling (Color)":
                        motionDetector =
                            new SimpleColorBackgroundModelingDetector(Camobject.settings.suppressnoise,
                                Camobject.detector.
                                    keepobjectedges);
                        break;
                    case "None":
                        break;
                }

                if (motionDetector != null)
                {
                    switch (Camobject.detector.postprocessor)
                    {
                        case "Grid Processing":
                            motionProcessor = new GridMotionAreaProcessing
                                                {
                                                    HighlightColor =
                                                        ColorTranslator.FromHtml(Camobject.detector.color),
                                                    HighlightMotionGrid = Camobject.detector.highlight
                                                };
                            break;
                        case "Object Tracking":
                            motionProcessor = new BlobCountingObjectsProcessing
                                                {
                                                    HighlightColor =
                                                        ColorTranslator.FromHtml(Camobject.detector.color),
                                                    HighlightMotionRegions = Camobject.detector.highlight,
                                                    MinObjectsHeight = Camobject.detector.minheight,
                                                    MinObjectsWidth = Camobject.detector.minwidth
                                                };

                            break;
                        case "Border Highlighting":
                            motionProcessor = new MotionBorderHighlighting
                                                {
                                                    HighlightColor =
                                                        ColorTranslator.FromHtml(Camobject.detector.color)
                                                };
                            break;
                        case "Area Highlighting":
                            motionProcessor = new MotionAreaHighlighting
                                                {
                                                    HighlightColor =
                                                        ColorTranslator.FromHtml(Camobject.detector.color)
                                                };
                            break;
                        case "None":
                            break;
                    }

                    if (Camera.MotionDetector != null)
                    {
                        Camera.MotionDetector.Reset();
                        Camera.MotionDetector = null;
                    }

                    Camera.MotionDetector = motionProcessor == null
                        ? new MotionDetector(motionDetector)
                        : new MotionDetector(motionDetector, motionProcessor);

                    Camera.AlarmLevel = Helper.CalculateTrigger(Camobject.detector.minsensitivity);
                    Camera.AlarmLevelMax = Helper.CalculateTrigger(Camobject.detector.maxsensitivity);
                    NeedMotionZones = true;
                }
                else
                {
                    Camera.MotionDetector = null;
                }

                LastMovementDetected = Helper.Now;

                ClearBuffer();

                if (!Camera.IsRunning)
                {
                    Calibrating = true;
                    _lastRun = Helper.Now.Ticks;
                    Camera.Start();
                }
                if (Camera.VideoSource is XimeaVideoSource)
                {
                    //need to set these after the camera starts
                    try
                    {
                        XimeaSource.SetParam(PRM.IMAGE_DATA_FORMAT, IMG_FORMAT.RGB24);
                    }
                    catch (ApplicationException)
                    {
                        XimeaSource.SetParam(PRM.IMAGE_DATA_FORMAT, IMG_FORMAT.MONO8);
                    }
                    XimeaSource.SetParam(CameraParameter.OffsetX,
                        Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "x")));
                    XimeaSource.SetParam(CameraParameter.OffsetY,
                        Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "y")));
                    float gain;
                    float.TryParse(Nv(Camobject.settings.namevaluesettings, "gain"), out gain);
                    XimeaSource.SetParam(CameraParameter.Gain, gain);
                    float exp;
                    float.TryParse(Nv(Camobject.settings.namevaluesettings, "exposure"), out exp);
                    XimeaSource.SetParam(CameraParameter.Exposure, exp*1000);
                    XimeaSource.SetParam(CameraParameter.Downsampling,
                        Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "downsampling")));
                    XimeaSource.SetParam(CameraParameter.Width,
                        Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "width")));
                    XimeaSource.SetParam(CameraParameter.Height,
                        Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "height")));
                    XimeaSource.FrameInterval =
                        (int) (1000.0f/XimeaSource.GetParamFloat(CameraParameter.FramerateMax));
                }

                if (File.Exists(Camobject.settings.maskimage))
                {
                    Camera.Mask = (Bitmap) Image.FromFile(Camobject.settings.maskimage);
                }

            }

            Camobject.settings.active = true;
            UpdateFloorplans(false);

            _timeLapseTotal = _timeLapseFrameCount = 0;
            InactiveRecord = 0;
            MovementDetected = false;

            Alerted = false;
            PTZNavigate = false;
            Camobject.ftp.ready = true;
            _lastRun = Helper.Now.Ticks;
            MainForm.NeedsSync = true;
            ReconnectCount = 0;
            _dtPTZLastCheck = DateTime.Now;

            _firstFrame = true;
            _autoofftimer = 0;

            if (Camera != null)
            {
                Camera.ZFactor = 1;
            }
            _requestRefresh = true;

            SetVolumeLevel(Camobject.settings.micpair);
            if (VolumeControl != null)
            {
                VolumeControl.Micobject.settings.buffer = Camobject.recorder.bufferseconds;
                VolumeControl.Enable();
            }

            SetVideoSize();

            //cloned initialisation goes here
            if (CameraEnabled != null)
                CameraEnabled(this, EventArgs.Empty);
            }
            catch (Exception ex)
            {
                if (ErrorHandler != null)
                    ErrorHandler(ex.Message);
            }
            _enabling = false;
        }