public void Enable() { if (_enabling) return; if (InvokeRequired) { Invoke(new Delegates.EnableDelegate(Enable)); return; } lock (_lockobject) { if (IsEnabled) return; IsEnabled = true; } _enabling = true; try { IsReconnect = false; Seekable = false; IsClone = Camobject.settings.sourceindex == 10; VideoSourceErrorState = false; VideoSourceErrorMessage = ""; string ckies, hdrs; switch (Camobject.settings.sourceindex) { case 0: ckies = Camobject.settings.cookies ?? ""; ckies = ckies.Replace("[USERNAME]", Camobject.settings.login); ckies = ckies.Replace("[PASSWORD]", Camobject.settings.password); ckies = ckies.Replace("[CHANNEL]", Camobject.settings.ptzchannel); hdrs = Camobject.settings.headers ?? ""; hdrs = hdrs.Replace("[USERNAME]", Camobject.settings.login); hdrs = hdrs.Replace("[PASSWORD]", Camobject.settings.password); hdrs = hdrs.Replace("[CHANNEL]", Camobject.settings.ptzchannel); var jpegSource = new JpegStream(Camobject.settings.videosourcestring) { Login = Camobject.settings.login, Password = Camobject.settings.password, ForceBasicAuthentication = Camobject.settings.forcebasic, RequestTimeout = Camobject.settings.timeout, UseHttp10 = Camobject.settings.usehttp10, HttpUserAgent = Camobject.settings.useragent, Cookies = ckies, Headers = hdrs }; OpenVideoSource(jpegSource, true); if (Camobject.settings.frameinterval != 0) jpegSource.FrameInterval = Camobject.settings.frameinterval; break; case 1: ckies = Camobject.settings.cookies ?? ""; ckies = ckies.Replace("[USERNAME]", Camobject.settings.login); ckies = ckies.Replace("[PASSWORD]", Camobject.settings.password); ckies = ckies.Replace("[CHANNEL]", Camobject.settings.ptzchannel); hdrs = Camobject.settings.headers ?? ""; hdrs = hdrs.Replace("[USERNAME]", Camobject.settings.login); hdrs = hdrs.Replace("[PASSWORD]", Camobject.settings.password); hdrs = hdrs.Replace("[CHANNEL]", Camobject.settings.ptzchannel); var mjpegSource = new MJPEGStream(Camobject.settings.videosourcestring) { Login = Camobject.settings.login, Password = Camobject.settings.password, ForceBasicAuthentication = Camobject.settings.forcebasic, RequestTimeout = Camobject.settings.timeout, HttpUserAgent = Camobject.settings.useragent, DecodeKey = Camobject.decodekey, Cookies = ckies, Headers = hdrs }; OpenVideoSource(mjpegSource, true); break; case 2: string url = Camobject.settings.videosourcestring; var ffmpegSource = new FfmpegStream(url) { Cookies = Camobject.settings.cookies, AnalyzeDuration = Camobject.settings.analyseduration, Timeout = Camobject.settings.timeout, UserAgent = Camobject.settings.useragent, Headers = Camobject.settings.headers, RTSPMode = Camobject.settings.rtspmode }; OpenVideoSource(ffmpegSource, true); break; case 3: string moniker = Camobject.settings.videosourcestring; var videoSource = new VideoCaptureDevice(moniker); string[] wh = Camobject.resolution.Split('x'); var sz = new Size(Convert.ToInt32(wh[0]), Convert.ToInt32(wh[1])); string precfg = Nv("video"); bool found = false; if (Nv("capturemode") != "snapshots") { VideoCapabilities[] videoCapabilities = videoSource.VideoCapabilities; videoSource.ProvideSnapshots = false; foreach (VideoCapabilities capabilty in videoCapabilities) { string item = string.Format(VideoSource.VideoFormatString, capabilty.FrameSize.Width, Math.Abs(capabilty.FrameSize.Height), capabilty.AverageFrameRate, capabilty.BitCount); if (precfg == item) { videoSource.VideoResolution = capabilty; found = true; break; } } } else { precfg = Nv("snapshots"); videoSource.ProvideSnapshots = true; VideoCapabilities[] videoCapabilities = videoSource.SnapshotCapabilities; foreach (VideoCapabilities capabilty in videoCapabilities) { string item = string.Format(VideoSource.SnapshotFormatString, capabilty.FrameSize.Width, Math.Abs(capabilty.FrameSize.Height), capabilty.AverageFrameRate, capabilty.BitCount); if (precfg == item) { videoSource.VideoResolution = capabilty; found = true; break; } } } if (!found) { var vc = videoSource.VideoCapabilities.Where(p => p.FrameSize == sz).ToList(); if (vc.Count > 0) { var vc2 = vc.FirstOrDefault(p => p.AverageFrameRate == Camobject.settings.framerate) ?? vc.FirstOrDefault(); videoSource.VideoResolution = vc2; found = true; } if (!found) { //first available var vcf = videoSource.VideoCapabilities.FirstOrDefault(); if (vcf != null) videoSource.VideoResolution = vcf; //else //{ // dont do this, not having an entry is ok for some video providers // throw new Exception("Unable to find a video format for the capture device"); //} } } if (Camobject.settings.crossbarindex != -1 && videoSource.CheckIfCrossbarAvailable()) { var cbi = videoSource.AvailableCrossbarVideoInputs.FirstOrDefault( p => p.Index == Camobject.settings.crossbarindex); if (cbi != null) { videoSource.CrossbarVideoInput = cbi; } } OpenVideoSource(videoSource, true); break; case 4: Rectangle area = Rectangle.Empty; if (!string.IsNullOrEmpty(Camobject.settings.desktoparea)) { var i = System.Array.ConvertAll(Camobject.settings.desktoparea.Split(','), int.Parse); area = new Rectangle(i[0], i[1], i[2], i[3]); } var desktopSource = new DesktopStream(Convert.ToInt32(Camobject.settings.videosourcestring), area) {MousePointer = Camobject.settings.desktopmouse}; if (Camobject.settings.frameinterval != 0) desktopSource.FrameInterval = Camobject.settings.frameinterval; OpenVideoSource(desktopSource, true); break; case 5: List<String> inargs = Camobject.settings.vlcargs.Split(Environment.NewLine.ToCharArray(), StringSplitOptions.RemoveEmptyEntries).ToList(); var vlcSource = new VlcStream(Camobject.settings.videosourcestring, inargs.ToArray()) { TimeOut = Camobject.settings.timeout }; OpenVideoSource(vlcSource, true); break; case 6: if (XimeaSource == null || !XimeaSource.IsRunning) XimeaSource = new XimeaVideoSource(Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "device"))); OpenVideoSource(XimeaSource, true); break; case 7: var tw = false; try { if (!string.IsNullOrEmpty(Nv(Camobject.settings.namevaluesettings, "TripWires"))) tw = Convert.ToBoolean(Nv(Camobject.settings.namevaluesettings, "TripWires")); var ks = new KinectStream(Nv(Camobject.settings.namevaluesettings, "UniqueKinectId"), Convert.ToBoolean(Nv(Camobject.settings.namevaluesettings, "KinectSkeleton")), tw); if (Nv(Camobject.settings.namevaluesettings, "StreamMode") != "") ks.StreamMode = Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "StreamMode")); OpenVideoSource(ks, true); } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); } break; case 8: switch (Nv(Camobject.settings.namevaluesettings, "custom")) { case "Network Kinect": // open the network kinect video stream OpenVideoSource(new KinectNetworkStream(Camobject.settings.videosourcestring), true); break; default: lock (_lockobject) { IsEnabled = false; } throw new Exception("No custom provider found for " + Nv(Camobject.settings.namevaluesettings, "custom")); } break; case 9: //there is no 9, spooky hey? break; case 10: int icam; if (Int32.TryParse(Camobject.settings.videosourcestring, out icam)) { var cw = MainForm.InstanceReference.GetCameraWindow(icam); if (cw != null) { OpenVideoSource(cw); } } break; } if (Camera != null) { IMotionDetector motionDetector = null; IMotionProcessing motionProcessor = null; switch (Camobject.detector.type) { default: motionDetector = new TwoFramesDifferenceDetector(Camobject.settings.suppressnoise); break; case "Custom Frame": motionDetector = new CustomFrameDifferenceDetector(Camobject.settings.suppressnoise, Camobject.detector.keepobjectedges); break; case "Background Modeling": motionDetector = new SimpleBackgroundModelingDetector(Camobject.settings.suppressnoise, Camobject.detector.keepobjectedges); break; case "Two Frames (Color)": motionDetector = new TwoFramesColorDifferenceDetector(Camobject.settings.suppressnoise); break; case "Custom Frame (Color)": motionDetector = new CustomFrameColorDifferenceDetector( Camobject.settings.suppressnoise, Camobject.detector.keepobjectedges); break; case "Background Modeling (Color)": motionDetector = new SimpleColorBackgroundModelingDetector(Camobject.settings.suppressnoise, Camobject.detector. keepobjectedges); break; case "None": break; } if (motionDetector != null) { switch (Camobject.detector.postprocessor) { case "Grid Processing": motionProcessor = new GridMotionAreaProcessing { HighlightColor = ColorTranslator.FromHtml(Camobject.detector.color), HighlightMotionGrid = Camobject.detector.highlight }; break; case "Object Tracking": motionProcessor = new BlobCountingObjectsProcessing { HighlightColor = ColorTranslator.FromHtml(Camobject.detector.color), HighlightMotionRegions = Camobject.detector.highlight, MinObjectsHeight = Camobject.detector.minheight, MinObjectsWidth = Camobject.detector.minwidth }; break; case "Border Highlighting": motionProcessor = new MotionBorderHighlighting { HighlightColor = ColorTranslator.FromHtml(Camobject.detector.color) }; break; case "Area Highlighting": motionProcessor = new MotionAreaHighlighting { HighlightColor = ColorTranslator.FromHtml(Camobject.detector.color) }; break; case "None": break; } if (Camera.MotionDetector != null) { Camera.MotionDetector.Reset(); Camera.MotionDetector = null; } Camera.MotionDetector = motionProcessor == null ? new MotionDetector(motionDetector) : new MotionDetector(motionDetector, motionProcessor); Camera.AlarmLevel = Helper.CalculateTrigger(Camobject.detector.minsensitivity); Camera.AlarmLevelMax = Helper.CalculateTrigger(Camobject.detector.maxsensitivity); NeedMotionZones = true; } else { Camera.MotionDetector = null; } LastMovementDetected = Helper.Now; ClearBuffer(); if (!Camera.IsRunning) { Calibrating = true; _lastRun = Helper.Now.Ticks; Camera.Start(); } if (Camera.VideoSource is XimeaVideoSource) { //need to set these after the camera starts try { XimeaSource.SetParam(PRM.IMAGE_DATA_FORMAT, IMG_FORMAT.RGB24); } catch (ApplicationException) { XimeaSource.SetParam(PRM.IMAGE_DATA_FORMAT, IMG_FORMAT.MONO8); } XimeaSource.SetParam(CameraParameter.OffsetX, Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "x"))); XimeaSource.SetParam(CameraParameter.OffsetY, Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "y"))); float gain; float.TryParse(Nv(Camobject.settings.namevaluesettings, "gain"), NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out gain); XimeaSource.SetParam(CameraParameter.Gain, gain); float exp; float.TryParse(Nv(Camobject.settings.namevaluesettings, "exposure"), NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out exp); XimeaSource.SetParam(CameraParameter.Exposure, exp*1000); XimeaSource.SetParam(CameraParameter.Downsampling, Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "downsampling"))); XimeaSource.SetParam(CameraParameter.Width, Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "width"))); XimeaSource.SetParam(CameraParameter.Height, Convert.ToInt32(Nv(Camobject.settings.namevaluesettings, "height"))); XimeaSource.FrameInterval = (int) (1000.0f/XimeaSource.GetParamFloat(CameraParameter.FramerateMax)); } Camera.UpdateResources(); } Camobject.settings.active = true; UpdateFloorplans(false); _timeLapseTotal = _timeLapseFrameCount = 0; InactiveRecord = 0; MovementDetected = false; Alerted = false; PTZNavigate = false; Camobject.ftp.ready = true; _lastRun = Helper.Now.Ticks; MainForm.NeedsSync = true; ReconnectCount = 0; _dtPTZLastCheck = DateTime.Now; _firstFrame = true; _autoofftimer = 0; if (Camera != null) { Camera.ZFactor = 1; } _requestRefresh = true; SetVolumeLevel(Camobject.settings.micpair); if (VolumeControl != null) { VolumeControl.Micobject.settings.buffer = Camobject.recorder.bufferseconds; VolumeControl.Enable(); } SetVideoSize(); //cloned initialisation goes here CameraEnabled?.Invoke(this, EventArgs.Empty); } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); } _enabling = false; }
private void SetVideoSourceProperty(VideoCaptureDevice device, VideoProcAmpProperty prop, string n) { try { int v; if (Int32.TryParse(Nv(Camobject.settings.procAmpConfig, n), out v)) { if (v > Int32.MinValue) { int fv; if (Int32.TryParse(Nv(Camobject.settings.procAmpConfig, "f" + n), out fv)) { device.SetProperty(prop, v, (VideoProcAmpFlags)fv); } } } } catch (Exception ex) { ErrorHandler?.Invoke(ex.Message); } }
// Constructor public Grabber( VideoCaptureDevice parent, bool snapshotMode ) { _parent = parent; _snapshotMode = snapshotMode; }
private void devicesCombo_SelectedIndexChanged_1(object sender, EventArgs e) { if (_videoDevices.Count != 0) { _videoCaptureDevice = new VideoCaptureDevice(_videoDevices[devicesCombo.SelectedIndex].MonikerString); EnumeratedSupportedFrameSizes(); } }
private void VideoSourceLoad(object sender, EventArgs e) { UISync.Init(this); tlpVLC.Enabled = VlcHelper.VlcInstalled; linkLabel3.Visible = !tlpVLC.Enabled; cmbJPEGURL.Text = MainForm.Conf.JPEGURL; cmbMJPEGURL.Text = MainForm.Conf.MJPEGURL; cmbVLCURL.Text = MainForm.Conf.VLCURL; cmbFile.Text = MainForm.Conf.AVIFileName; ConfigureSnapshots = true; txtOnvifUsername.Text = txtLogin.Text = txtLogin2.Text = CameraControl.Camobject.settings.login; txtOnvifPassword.Text = txtPassword.Text = txtPassword2.Text = CameraControl.Camobject.settings.password; VideoSourceString = CameraControl.Camobject.settings.videosourcestring; SourceIndex = CameraControl.Camobject.settings.sourceindex; if (SourceIndex == 3) { VideoDeviceMoniker = VideoSourceString; string[] wh= CameraControl.Camobject.resolution.Split('x'); CaptureSize = new Size(Convert.ToInt32(wh[0]), Convert.ToInt32(wh[1])); } txtFrameInterval.Text = txtFrameInterval2.Text = CameraControl.Camobject.settings.frameinterval.ToString(CultureInfo.InvariantCulture); txtVLCArgs.Text = CameraControl.Camobject.settings.vlcargs.Replace("\r\n","\n").Replace("\n\n","\n").Replace("\n", Environment.NewLine); foreach (var cam in MainForm.Cameras) { if (cam.id != CameraControl.Camobject.id && cam.settings.sourceindex!=10) //dont allow a clone of a clone as the events get too complicated (and also it's pointless) ddlCloneCamera.Items.Add(new MainForm.ListItem2(cam.name, cam.id)); } ddlCustomProvider.SelectedIndex = 0; switch (SourceIndex) { case 0: cmbJPEGURL.Text = VideoSourceString; txtFrameInterval.Text = CameraControl.Camobject.settings.frameinterval.ToString(CultureInfo.InvariantCulture); break; case 1: cmbMJPEGURL.Text = VideoSourceString; break; case 2: cmbFile.Text = VideoSourceString; break; case 3: chkAutoImageSettings.Checked = NV("manual") != "true"; break; case 5: cmbVLCURL.Text = VideoSourceString; break; case 8: txtCustomURL.Text = VideoSourceString; switch (NV("custom")) { default: ddlCustomProvider.SelectedIndex = 0; break; } break; case 10: int id; if (Int32.TryParse(VideoSourceString, out id)) { foreach (MainForm.ListItem2 li in ddlCloneCamera.Items) { if (li.Value == id) { ddlCloneCamera.SelectedItem = li; break; } } } break; } if (!string.IsNullOrEmpty(CameraControl.Camobject.decodekey)) txtDecodeKey.Text = CameraControl.Camobject.decodekey; chkMousePointer.Checked = CameraControl.Camobject.settings.desktopmouse; numBorderTimeout.Value = CameraControl.Camobject.settings.bordertimeout; cmbJPEGURL.Items.AddRange(ObjectList(MainForm.Conf.RecentJPGList)); cmbMJPEGURL.Items.AddRange(ObjectList(MainForm.Conf.RecentMJPGList)); cmbFile.Items.AddRange(ObjectList(MainForm.Conf.RecentFileList)); cmbVLCURL.Items.AddRange(ObjectList(MainForm.Conf.RecentVLCList)); numAnalyseDuration.Value = CameraControl.Camobject.settings.analyseduration; int selectedCameraIndex = 0; for (int i = 0; i < _videoDevices.Count; i++) { if (_videoDeviceMoniker == _videoDevices[i].MonikerString) { selectedCameraIndex = i; break; } } devicesCombo.SelectedIndex = selectedCameraIndex; ddlScreen.SuspendLayout(); foreach (Screen s in Screen.AllScreens) { ddlScreen.Items.Add(s.DeviceName); } ddlScreen.Items.Insert(0, LocRm.GetString("PleaseSelect")); if (SourceIndex == 4) { int screenIndex = Convert.ToInt32(VideoSourceString) + 1; ddlScreen.SelectedIndex = ddlScreen.Items.Count>screenIndex ? screenIndex : 1; } else ddlScreen.SelectedIndex = 0; ddlScreen.ResumeLayout(); SetSourceIndex(SourceIndex); if (CameraControl?.Camera?.VideoSource is VideoCaptureDevice) { _videoCaptureDevice = (VideoCaptureDevice)CameraControl.Camera.VideoSource; _videoInput = _videoCaptureDevice.CrossbarVideoInput; EnumeratedSupportedFrameSizes(); } //ximea int deviceCount = 0; try { deviceCount = XimeaCamera.CamerasCount; } catch(Exception) { //Ximea DLL not installed //Logger.LogMessageToFile("This is not a XIMEA device"); } pnlXimea.Enabled = deviceCount>0; if (pnlXimea.Enabled) { for (int i = 0; i < deviceCount; i++) { ddlXimeaDevice.Items.Add("Device " + i); } if (NV("type")=="ximea") { int deviceIndex = Convert.ToInt32(NV("device")); ddlXimeaDevice.SelectedIndex = ddlXimeaDevice.Items.Count > deviceIndex?deviceIndex:0; numXimeaWidth.Text = NV("width"); numXimeaHeight.Text = NV("height"); numXimeaOffsetX.Value = Convert.ToInt32(NV("x")); numXimeaOffestY.Value = Convert.ToInt32(NV("y")); decimal gain; decimal.TryParse(NV("gain"), NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out gain); numXimeaGain.Value = gain; decimal exp; decimal.TryParse(NV("exposure"), NumberStyles.AllowDecimalPoint, CultureInfo.InvariantCulture, out exp); if (exp == 0) exp = 100; numXimeaExposure.Value = exp; combo_dwnsmpl.SelectedItem = NV("downsampling"); } } else { ddlXimeaDevice.Items.Add(LocRm.GetString("NoDevicesFound")); ddlXimeaDevice.SelectedIndex = 0; } deviceCount = 0; try { foreach (var potentialSensor in KinectSensor.KinectSensors) { if (potentialSensor.Status == KinectStatus.Connected) { deviceCount++; ddlKinectDevice.Items.Add(potentialSensor.UniqueKinectId); if (NV("type") == "kinect") { if (NV("UniqueKinectId") == potentialSensor.UniqueKinectId) { ddlKinectDevice.SelectedIndex = ddlKinectDevice.Items.Count - 1; } } } } } catch (Exception) { //Type error if not installed Logger.LogMessageToFile("Kinect is not installed"); } if (deviceCount>0) { if (ddlKinectDevice.SelectedIndex == -1) ddlKinectDevice.SelectedIndex = 0; } else { pnlKinect.Enabled = false; } ddlKinectVideoMode.SelectedIndex = 0; if (NV("type") == "kinect") { try { chkKinectSkeletal.Checked = Convert.ToBoolean(NV("KinectSkeleton")); chkTripWires.Checked = Convert.ToBoolean(NV("TripWires")); if (NV("StreamMode")!="") ddlKinectVideoMode.SelectedIndex = Convert.ToInt32(NV("StreamMode")); } catch (Exception) { // ignored } } ddlTransport.Items.AddRange(_transports); ddlTransport.SelectedIndex = 0; ddlRTSP.SelectedIndex = CameraControl.Camobject.settings.rtspmode; chkConnectVLC.Enabled = chkConnectVLC.Checked = VlcHelper.VlcInstalled; int j = 0; foreach(var dev in MainForm.ONVIFDevices) { string n = dev.Name; if (!string.IsNullOrEmpty(dev.Location)) n += " (" + dev.Location + ")"; lbONVIFDevices.Items.Add(new MainForm.ListItem2(n,j)); j++; } _loaded = true; if (StartWizard) Wizard(); }
void IAMMove(VideoCaptureDevice d, CameraControlProperty p, int i) { int v, minv, maxv, stepSize, defVal; CameraControlFlags f, cf; d.GetCameraProperty(p, out v, out f); d.GetCameraPropertyRange(p, out minv, out maxv, out stepSize, out defVal, out cf); int newv = v + i*stepSize; if (newv<minv) newv = minv; if (newv>maxv) newv = maxv; if (i == 0) newv = defVal; if (cf== CameraControlFlags.Manual) { d.SetCameraProperty(p, newv, CameraControlFlags.Manual); } else { MainForm.LogMessageToFile("Camera control flags are not manual"); } }