public void Enable() { if (_enabling) return; if (InvokeRequired) { Invoke(new Delegates.EnableDelegate(Enable)); return; } lock (_lockobject) { if (IsEnabled) return; IsEnabled = true; } _enabling = true; try { IsReconnect = false; Seekable = false; IsClone = Camobject.settings.sourceindex == 10; VideoSourceErrorState = false; VideoSourceErrorMessage = ""; string ckies, hdrs; switch (Camobject.settings.sourceindex) { case 0: ckies = Camobject.settings.cookies ?? ""; ckies = ckies.Replace("[USERNAME]", Camobject.settings.login); ckies = ckies.Replace("[PASSWORD]", Camobject.settings.password); ckies = ckies.Replace("[CHANNEL]", Camobject.settings.ptzchannel); hdrs = Camobject.settings.headers ?? ""; hdrs = hdrs.Replace("[USERNAME]", Camobject.settings.login); hdrs = hdrs.Replace("[PASSWORD]", Camobject.settings.password); hdrs = hdrs.Replace("[CHANNEL]", Camobject.settings.ptzchannel); var jpegSource = new JpegStream(Camobject.settings.videosourcestring) { Login = Camobject.settings.login, Password = Camobject.settings.password, ForceBasicAuthentication = Camobject.settings.forcebasic, RequestTimeout = Camobject.settings.timeout, UseHttp10 = Camobject.settings.usehttp10, HttpUserAgent = Camobject.settings.useragent, Cookies = ckies, Headers = hdrs }; OpenVideoSource(jpegSource, true); if (Camobject.settings.frameinterval != 0) jpegSource.FrameInterval = Camobject.settings.frameinterval; break; case 1: ckies = Camobject.settings.cookies ?? ""; ckies = ckies.Replace("[USERNAME]", Camobject.settings.login); ckies = ckies.Replace("[PASSWORD]", Camobject.settings.password); ckies = ckies.Replace("[CHANNEL]", Camobject.settings.ptzchannel); hdrs = Camobject.settings.headers ?? ""; hdrs = hdrs.Replace("[USERNAME]", Camobject.settings.login); hdrs = hdrs.Replace("[PASSWORD]", Camobject.settings.password); hdrs = hdrs.Replace("[CHANNEL]", Camobject.settings.ptzchannel); var mjpegSource = new MJPEGStream(Camobject.settings.videosourcestring) { Login = Camobject.settings.login, Password = Camobject.settings.password, ForceBasicAuthentication = Camobject.settings.forcebasic, RequestTimeout = Camobject.settings.timeout, HttpUserAgent = Camobject.settings.useragent, DecodeKey = Camobject.decodekey, Cookies = ckies, Headers = hdrs }; OpenVideoSource(mjpegSource, true); break; case 2: string url = Camobject.settings.videosourcestring; var ffmpegSource = new FfmpegStream(url) { Cookies = Camobject.settings.cookies, AnalyzeDuration = Camobject.settings.analyseduration, Timeout = Camobject.settings.timeout, UserAgent = Camobject.settings.useragent, Headers = Camobject.settings.headers, RTSPMode = Camobject.settings.rtspmode }; OpenVideoSource(ffmpegSource, true); break; case 3: string moniker = Camobject.settings.videosourcestring; var videoSource = new VideoCaptureDevice(moniker); string[] wh = Camobject.resolution.Split('x'); var sz = new Size(Convert.ToInt32(wh[0]), Convert.ToInt32(wh[1])); string precfg = Nv("video"); bool found = false; if (Nv("capturemode") != "snapshots") { VideoCapabilities[] videoCapabilities = videoSource.VideoCapabilities; videoSource.ProvideSnapshots = false; foreach (VideoCapabilities capabilty in videoCapabilities) { string item = string.Format(VideoSource.VideoFormatString, capabilty.FrameSize.Width, Math.Abs(capabilty.FrameSize.Height), capabilty.AverageFrameRate, capabilty.BitCount); if (precfg == item) { videoSource.VideoResolution = capabilty; found = true; break; } } } else { precfg = Nv("snapshots"); videoSource.ProvideSnapshots = true; VideoCapabilities[] videoCapabilities = videoSource.SnapshotCapabilities; foreach (VideoCapabilities capabilty in videoCapabilities) { string item = string.Format(VideoSource.SnapshotFormatString, capabilty.FrameSize.Width, Math.Abs(capabilty.FrameSize.Height), capabilty.AverageFrameRate, capabilty.BitCount); if (precfg == item) { videoSource.VideoResolution = capabilty; found = true; break; } } } if (!found) { var vc = videoSource.VideoCapabilities.Where(p => p.FrameSize == sz).ToList(); if (vc.Count > 0) { var vc2 = vc.FirstOrDefault(p => p.AverageFrameRate == Camobject.settings.framerate) ?? vc.FirstOrDefault(); videoSource.VideoResolution = vc2; found = true; } if (!found) { //first available var vcf = videoSource.VideoCapabilities.FirstOrDefault(); if (vcf != null) videoSource.VideoResolution = vcf; //else //{ // dont do this, not having an entry is ok for some video providers // throw new Exception("Unable to find a video format for the capture device"); //} } } if (Camobject.settings.crossbarindex != -1 && videoSource.CheckIfCrossbarAvailable()) { var cbi = videoSource.AvailableCrossbarVideoInputs.FirstOrDefault( p => p.Index == Camobject.settings.crossbarindex); if (cbi != null) { videoSource.CrossbarVideoInput = cbi; } } OpenVideoSource(videoSource, true); break; case 4: Rectangle area = Rectangle.Empty; if (!string.IsNullOrEmpty(Camobject.settings.desktoparea)) { var i = System.Array.ConvertAll(Camobject.settings.desktoparea.Split(','), int.Parse); area = new Rectangle(i[0], i[1], i[2], i[3]); } var desktopSource = new DesktopStream(Convert.ToInt32(Camobject.settings.videosourcestring), area) {MousePointer = Camobject.settings.desktopmouse}; if (Camobject.settings.frameinterval != 0) desktopSource.FrameInterval = Camobject.settings.frameinterval; OpenVideoSource(desktopSource, true); break; case 5: List<String> inargs = Camobject.settings.vlcargs.Split(Environment.NewLine.ToCharArray(), StringSplitOptions.RemoveEmptyEntries).ToList(); var vlcSource = new VlcStream(Camobject.settings.videosourcestring, inargs.ToArray()) { TimeOut = Camobject.settings.timeout }; OpenVideoSource(vlcSource, true); break; case 6: if (XimeaSource == null || !XimeaSource.IsRunning) XimeaSource = new XimeaVideoSource(Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "device"))); OpenVideoSource(XimeaSource, true); break; case 7: var tw = false; try { if (!string.IsNullOrEmpty(Nv(Camobject.settings.namevaluesettings, "TripWires"))) tw = Convert.ToBoolean(Nv(Camobject.settings.namevaluesettings, "TripWires")); var ks = new KinectStream(Nv(Camobject.settings.namevaluesettings, "UniqueKinectId"), Convert.ToBoolean(Nv(Camobject.settings.namevaluesettings, "KinectSkeleton")), tw); if (Nv(Camobject.settings.namevaluesettings, "StreamMode") != "") ks.StreamMode = Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "StreamMode")); OpenVideoSource(ks, true); } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); } break; case 8: switch (Nv(Camobject.settings.namevaluesettings, "custom")) { case "Network Kinect": // open the network kinect video stream OpenVideoSource(new KinectNetworkStream(Camobject.settings.videosourcestring), true); break; default: lock (_lockobject) { IsEnabled = false; } throw new Exception("No custom provider found for " + Nv(Camobject.settings.namevaluesettings, "custom")); } break; case 9: //there is no 9, spooky hey? break; case 10: int icam; if (Int32.TryParse(Camobject.settings.videosourcestring, out icam)) { var cw = MainForm.InstanceReference.GetCameraWindow(icam); if (cw != null) { OpenVideoSource(cw); } } break; } if (Camera != null) { IMotionDetector motionDetector = null; IMotionProcessing motionProcessor = null; switch (Camobject.detector.type) { default: motionDetector = new TwoFramesDifferenceDetector(Camobject.settings.suppressnoise); break; case "Custom Frame": motionDetector = new CustomFrameDifferenceDetector(Camobject.settings.suppressnoise, Camobject.detector.keepobjectedges); break; case "Background Modeling": motionDetector = new SimpleBackgroundModelingDetector(Camobject.settings.suppressnoise, Camobject.detector.keepobjectedges); break; case "Two Frames (Color)": motionDetector = new TwoFramesColorDifferenceDetector(Camobject.settings.suppressnoise); break; case "Custom Frame (Color)": motionDetector = new CustomFrameColorDifferenceDetector( Camobject.settings.suppressnoise, Camobject.detector.keepobjectedges); break; case "Background Modeling (Color)": motionDetector = new SimpleColorBackgroundModelingDetector(Camobject.settings.suppressnoise, Camobject.detector. keepobjectedges); break; case "None": break; } if (motionDetector != null) { switch (Camobject.detector.postprocessor) { case "Grid Processing": motionProcessor = new GridMotionAreaProcessing { HighlightColor = ColorTranslator.FromHtml(Camobject.detector.color), HighlightMotionGrid = Camobject.detector.highlight }; break; case "Object Tracking": motionProcessor = new BlobCountingObjectsProcessing { HighlightColor = ColorTranslator.FromHtml(Camobject.detector.color), HighlightMotionRegions = Camobject.detector.highlight, MinObjectsHeight = Camobject.detector.minheight, MinObjectsWidth = Camobject.detector.minwidth }; break; case "Border Highlighting": motionProcessor = new MotionBorderHighlighting { HighlightColor = ColorTranslator.FromHtml(Camobject.detector.color) }; break; case "Area Highlighting": motionProcessor = new MotionAreaHighlighting { HighlightColor = ColorTranslator.FromHtml(Camobject.detector.color) }; break; case "None": break; } if (Camera.MotionDetector != null) { Camera.MotionDetector.Reset(); Camera.MotionDetector = null; } Camera.MotionDetector = motionProcessor == null ? new MotionDetector(motionDetector) : new MotionDetector(motionDetector, motionProcessor); Camera.AlarmLevel = Helper.CalculateTrigger(Camobject.detector.minsensitivity); Camera.AlarmLevelMax = Helper.CalculateTrigger(Camobject.detector.maxsensitivity); NeedMotionZones = true; } else { Camera.MotionDetector = null; } LastMovementDetected = Helper.Now; ClearBuffer(); if (!Camera.IsRunning) { Calibrating = true; _lastRun = Helper.Now.Ticks; Camera.Start(); } if (Camera.VideoSource is XimeaVideoSource) { //need to set these after the camera starts try { XimeaSource.SetParam(PRM.IMAGE_DATA_FORMAT, IMG_FORMAT.RGB24); } catch (ApplicationException) { XimeaSource.SetParam(PRM.IMAGE_DATA_FORMAT, IMG_FORMAT.MONO8); } XimeaSource.SetParam(CameraParameter.OffsetX, Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "x"))); XimeaSource.SetParam(CameraParameter.OffsetY, Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "y"))); float gain; float.TryParse(Nv(Camobject.settings.namevaluesettings, "gain"), NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out gain); XimeaSource.SetParam(CameraParameter.Gain, gain); float exp; float.TryParse(Nv(Camobject.settings.namevaluesettings, "exposure"), NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out exp); XimeaSource.SetParam(CameraParameter.Exposure, exp*1000); XimeaSource.SetParam(CameraParameter.Downsampling, Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "downsampling"))); XimeaSource.SetParam(CameraParameter.Width, Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "width"))); XimeaSource.SetParam(CameraParameter.Height, Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "height"))); XimeaSource.FrameInterval = (int) (1000.0f/XimeaSource.GetParamFloat(CameraParameter.FramerateMax)); } Camera.UpdateResources(); } Camobject.settings.active = true; UpdateFloorplans(false); _timeLapseTotal = _timeLapseFrameCount = 0; InactiveRecord = 0; MovementDetected = false; Alerted = false; PTZNavigate = false; Camobject.ftp.ready = true; _lastRun = Helper.Now.Ticks; MainForm.NeedsSync = true; ReconnectCount = 0; _dtPTZLastCheck = DateTime.Now; _firstFrame = true; _autoofftimer = 0; if (Camera != null) { Camera.ZFactor = 1; } _requestRefresh = true; SetVolumeLevel(Camobject.settings.micpair); if (VolumeControl != null) { VolumeControl.Micobject.settings.buffer = Camobject.recorder.bufferseconds; VolumeControl.Enable(); } SetVideoSize(); //cloned initialisation goes here CameraEnabled?.Invoke(this, EventArgs.Empty); } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); } _enabling = false; }