void GetInterfaces() { Type comType = null; object comObj = null; try { //ICaptureGraphBuilder2 pBuilder = null; // Initiate Capture Graph Builder Guid clsid = typeof(CaptureGraphBuilder2).GUID; comType = Type.GetTypeFromCLSID(clsid); comObj = Activator.CreateInstance(comType); m_bCapGraph = (ICaptureGraphBuilder2)comObj; // Initiate Graph Builder Guid clsfg = typeof(FilterGraph).GUID; comType = Type.GetTypeFromCLSID(clsfg); //Clsid.FilterGraph); comObj = Activator.CreateInstance(comType); m_bGraph = (IGraphBuilder)comObj; // Initiate Video Configuration Interface DsGuid cat = PinCategory.Capture; DsGuid type = MediaType.Interleaved; Guid iid = typeof(IAMVideoProcAmp).GUID; m_bCapGraph.FindInterface(cat, type, capFilter, iid, out comObj); m_iVidConfig = (IAMVideoProcAmp)comObj; // test //m_iVidConfig.Set(VideoProcAmpProperty.WhiteBalance, 0, VideoProcAmpFlags.Manual); // Initiate Camera Configuration Interface cat = PinCategory.Capture; type = MediaType.Interleaved; iid = typeof(IAMCameraControl).GUID; m_bCapGraph.FindInterface(cat, type, capFilter, iid, out comObj); m_iCamConfig = (IAMCameraControl)comObj; } catch (Exception ee) { if (comObj != null) Marshal.ReleaseComObject(comObj); throw new Exception("Could not get interfaces\r\n" + ee.Message); } }
private void controlCameraSettings() { videoSource.SetCameraProperty(CameraControlProperty.Focus, (int)((255 / 15) * 15), CameraControlFlags.Manual); videoSource.SetCameraProperty(CameraControlProperty.Zoom, 2, CameraControlFlags.Manual); videoSource.SetCameraProperty(CameraControlProperty.Pan, 1, CameraControlFlags.Manual); videoSource.SetCameraProperty(CameraControlProperty.Tilt, -2, CameraControlFlags.Manual); object o; Guid IID_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770"); IAMVideoProcAmp vpa = (IAMVideoProcAmp)o; int pMin, pMax, pSteppingDelta, pDefault; VideoProcAmpFlags pFlags; vpa.GetRange( VideoProcAmpProperty.Brightness, out pMin, out pMax, out pSteppingDelta, out pDefault, out pFlags); vpa.Set(VideoProcAmpProperty.Gain, 20, pFlags); vpa.Set(VideoProcAmpProperty.Contrast, 70, pFlags); }
public VideoProcAmpCameraProperty(CameraPropertyDescriptor descriptor, IAMVideoProcAmp videoAmpControl, VideoProcAmpProperty videoAmpProperty) { Descriptor = descriptor; _videoAmpControl = videoAmpControl; _videoAmpProperty = videoAmpProperty; UpdateRange(); }
private void CloseInterfaces() { try { if (graphBuilder != null) { IMediaControl mediaCtrl = graphBuilder as IMediaControl; mediaCtrl.Stop(); } } catch {} if (graphBuilder != null) { Marshal.ReleaseComObject(graphBuilder); graphBuilder = null; } if (m_VideoControl != null) { Marshal.ReleaseComObject(m_VideoControl); m_VideoControl = null; } if (m_CameraControl != null) { Marshal.ReleaseComObject(m_CameraControl); m_CameraControl = null; } bgrData = null; }
public LifeCamCamera(string name) { _name = name; String moniker = null; FilterInfoCollection videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice); // Match specified camera name to device for (int i = 0, n = videoDevices.Count; i < n; i++) { if (name == videoDevices[i].Name) { moniker = videoDevices[i].MonikerString; break; } } if (moniker == null) { return; } //throw new Exception("Video device with name '" + name + "' not found."); Source = new VideoCaptureDevice(moniker); Source.DesiredFrameRate = 30; cameraControls = (IAMCameraControl)Source.SourceObject; videoProcAmp = (IAMVideoProcAmp)Source.SourceObject; }
/// <summary> /// 设置亮度 /// </summary> /// <param name="lightValue">亮度值0 到 100</param> /// <returns></returns> public int SetLightValue(int lightValue) { int iResult = 0; IAMVideoProcAmp videoProcAmp = theDevice as IAMVideoProcAmp; if (videoProcAmp == null) { iResult = -1; return(iResult); } int val; int min; int max; int step; int defaultValue; VideoProcAmpFlags flags = VideoProcAmpFlags.Manual; // 设置亮度 if (lightValue != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.Brightness, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Brightness, out val, out flags); //val = min + (max - min) * lightValue / 255; iResult = videoProcAmp.Set(VideoProcAmpProperty.Brightness, lightValue, flags); } } return(iResult); }
internal WebcamProperty GetProcAmpProperties(VideoProcAmpProperty property) { HResult result = HResult.ERROR_NOT_READY; WebcamProperty settings = new WebcamProperty { _name = property.ToString(), _procAmpProp = property, _isProcAmp = true }; if (_base._webcamMode) { IAMVideoProcAmp control = _base.mf_MediaSource as IAMVideoProcAmp; result = control.GetRange(property, out settings._min, out settings._max, out settings._step, out settings._default, out VideoProcAmpFlags flags); if (result == Player.NO_ERROR) { settings._supported = (flags & VideoProcAmpFlags.Manual) != 0; settings._autoSupport = (flags & VideoProcAmpFlags.Auto) != 0; control.Get(property, out settings._value, out flags); settings._auto = (flags & VideoProcAmpFlags.Auto) != 0; } } _base._lastError = result; return(settings); }
/// <summary> /// 设置饱和度 /// </summary> /// <param name="SaturationValue">饱和度 0到 100</param> /// <returns></returns> public int SetSaturationValue(int SaturationValue) { int iResult = 0; IAMVideoProcAmp videoProcAmp = theDevice as IAMVideoProcAmp; if (videoProcAmp == null) { iResult = -1; return(iResult); } int val; int min; int max; int step; int defaultValue; VideoProcAmpFlags flags = VideoProcAmpFlags.Manual; //设置饱和度 if (SaturationValue != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.Saturation, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Saturation, out val, out flags); //val = min + (max - min) * SaturationValue / 100; iResult = videoProcAmp.Set(VideoProcAmpProperty.Saturation, SaturationValue, flags); } } return(iResult); }
private IAMVideoProcAmp GetIAMVideoProcAmp(DsDevice device) { IGraphBuilder m_graph; /* Create a new graph */ m_graph = (IGraphBuilder) new FilterGraphNoThread(); #if DEBUG m_rotEntry = new DsROTEntry(m_graph); #endif /* Create a capture graph builder to help * with rendering a capture graph */ var graphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); /* Set our filter graph to the capture graph */ int hr = graphBuilder.SetFiltergraph(m_graph); DsError.ThrowExceptionForHR(hr); m_captureDevice = AddFilterByDevice(m_graph, device); object ampControl; int hr11 = graphBuilder.FindInterface(PinCategory.Capture, MediaType.Video, m_captureDevice , typeof(IAMVideoProcAmp).GUID, out ampControl); DsError.ThrowExceptionForHR(hr11); iAMVideoProcAmp = ampControl as IAMVideoProcAmp; return(iAMVideoProcAmp); }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <param name="prop"></param> /// <returns></returns> public int GetVideoControl(DsDevice dsDevice, VideoProcAmpProperty prop) { IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; int retVal = 0; try { // add the video input device int hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); IAMVideoProcAmp videoControl = capFilter as IAMVideoProcAmp; int min, max, step, default_val; VideoProcAmpFlags flag = 0; videoControl.GetRange(prop, out min, out max, out step, out default_val, out flag); videoControl.Get(prop, out retVal, out flag); } catch (Exception ex) { Console.WriteLine(ex.Message); } return(retVal); }
public SelectedProperties(DsDevice dev) { InitializeComponent(); webcam = dev; HIDConnectEvent = new EventHandler(HIDConnected); HIDDisconnectEvent = new EventHandler(HIDDisconnected); Guid iid = typeof(IBaseFilter).GUID; webcam.Mon.BindToObject(null, null, ref iid, out object camDevice); IBaseFilter camFilter = camDevice as IBaseFilter; pCameraControl = camFilter as IAMCameraControl; pVideoProcAmp = camFilter as IAMVideoProcAmp; webcamName = INI.KeyExists("Name", webcam.DevicePath) ? INI.ReadINI(webcam.DevicePath, "Name") : webcam.Name; Anchor = AnchorStyles.Left | AnchorStyles.Right; Padding = new Padding(0); gBox = new GroupBox { AutoSize = true, AutoSizeMode = AutoSizeMode.GrowAndShrink, Anchor = AnchorStyles.Left | AnchorStyles.Right | AnchorStyles.Top, Dock = DockStyle.Top, Text = webcamName, Margin = new Padding(0) }; mainLayout = new TableLayoutPanel { Name = "TableLayout", AutoSize = true, AutoSizeMode = AutoSizeMode.GrowAndShrink, Location = new Point(gBox.Padding.Left, gBox.Padding.Top + 20), Dock = DockStyle.Top, }; gBox.Controls.Add(mainLayout); Controls.Add(gBox); InitProperties(); InitHID(); if (hid == null) { HIDConnectEvent = new EventHandler(HIDConnected); Globals._USBControl.HIDConnected += HIDConnectEvent; } alertTimer = new System.Timers.Timer { Interval = 600, AutoReset = true, Enabled = false, }; alertTimer.Elapsed += AlertTimedEvent; }
/// <summary> /// 获取当前饱和度值 /// </summary> /// <returns></returns> public int GetSaturationValue() { int SaturationValue = 0; IAMVideoProcAmp videoProcAmp = theDevice as IAMVideoProcAmp; VideoProcAmpFlags flags = VideoProcAmpFlags.Manual; videoProcAmp.Get(VideoProcAmpProperty.Saturation, out SaturationValue, out flags); return(SaturationValue); }
public VideoSetting(IAMVideoProcAmp filter, VideoProcAmpProperty property) { _CamFilter = filter; _CamProperty = property; _Name = property.ToString(); // Update all defaults min max, etc... Read(); }
/// <summary> /// 获取当前亮度值 /// </summary> /// <returns></returns> public int GetLightValue() { int LightValue = 0; IAMVideoProcAmp videoProcAmp = theDevice as IAMVideoProcAmp; VideoProcAmpFlags flags = VideoProcAmpFlags.Manual; videoProcAmp.Get(VideoProcAmpProperty.Brightness, out LightValue, out flags); return(LightValue); }
/// <summary> /// 获取当前对比度值 /// </summary> /// <returns></returns> public int GetContrastValue() { int ContrastValue = 0; IAMVideoProcAmp videoProcAmp = theDevice as IAMVideoProcAmp; VideoProcAmpFlags flags = VideoProcAmpFlags.Manual; videoProcAmp.Get(VideoProcAmpProperty.Contrast, out ContrastValue, out flags); return(ContrastValue); }
protected override void FreeResources() { /* We run the StopInternal() to avoid any * Dispatcher VeryifyAccess() issues */ StopInternal(); /* Let's clean up the base * class's stuff first */ base.FreeResources(); #if DEBUG if (m_rotEntry != null) { m_rotEntry.Dispose(); } m_rotEntry = null; #endif if (m_videoFrame != null) { m_videoFrame.Dispose(); m_videoFrame = null; } if (m_renderer != null) { Marshal.FinalReleaseComObject(m_renderer); m_renderer = null; } if (m_captureDevice != null) { Marshal.FinalReleaseComObject(m_captureDevice); m_captureDevice = null; } if (m_sampleGrabber != null) { Marshal.FinalReleaseComObject(m_sampleGrabber); m_sampleGrabber = null; } if (m_graph != null) { Marshal.FinalReleaseComObject(m_graph); m_graph = null; InvokeMediaClosed(new EventArgs()); } if (m_cameraControl != null) { Marshal.FinalReleaseComObject(m_cameraControl); m_cameraControl = null; } if (m_videoProcAmp != null) { Marshal.FinalReleaseComObject(m_videoProcAmp); m_videoProcAmp = null; } }
public bool Start(string cam, string MonikerStr) { try { // enumerate video devices FilterInfoCollection videoDevices = new FilterInfoCollection(AForge.Video.DirectShow.FilterCategory.VideoInputDevice); // create the video source (check that the camera exists is already done VideoSource = new VideoCaptureDevice(MonikerStr); VideoSource.VideoResolution = VideoSource.VideoCapabilities[17]; VideoSource.NewFrame += new NewFrameEventHandler(Video_NewFrame); ReceivingFrames = false; // try ten times to start int tries = 0; while (tries < 80) // 4s maximum to a camera to start { // VideoSource.Start() checks running status, is safe to call multiple times tries++; VideoSource.Start(); if (!ReceivingFrames) { // 50 ms pause, processing events so that videosource has a chance for (int i = 0; i < 5; i++) { Thread.Sleep(10); Application.DoEvents(); } } else { break; } } cameraControls = VideoSource.SourceObject as IAMVideoProcAmp; if (!ReceivingFrames) { return(false); } VideoCapabilities Capability = VideoSource.VideoCapabilities[17]; ImgSizeX = Capability.FrameSize.Width; ImgSizeY = Capability.FrameSize.Height; ImgCenterX = ImgSizeX / 2; ImgCenterY = ImgSizeY / 2; lock (_locker) { } // wait //PauseProcessing = false; return(true); } catch { return(false); } }
public void InitDevice(DsDevice device, int iWidth, int iHeight) { int hr; object camDevice; Guid iid = typeof(IBaseFilter).GUID; device.Mon.BindToObject(null, null, ref iid, out camDevice); IBaseFilter camFilter = camDevice as IBaseFilter; m_CameraControl = camFilter as IAMCameraControl; m_VideoControl = camFilter as IAMVideoProcAmp; ISampleGrabber sampGrabber = null; graphBuilder = (IGraphBuilder) new FilterGraph(); //Create the Capture Graph Builder ICaptureGraphBuilder2 captureGraphBuilder = null; captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Attach the filter graph to the capture graph hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); //Add the Video input device to the graph hr = graphBuilder.AddFilter(camFilter, "WebCam" + deviceNumber); DsError.ThrowExceptionForHR(hr); // Configure the sample grabber sampGrabber = new SampleGrabber() as ISampleGrabber; ConfigureSampleGrabber(sampGrabber); IBaseFilter sampGrabberBF = sampGrabber as IBaseFilter; //Add the Video compressor filter to the graph hr = graphBuilder.AddFilter(sampGrabberBF, "SampleGrabber" + deviceNumber); DsError.ThrowExceptionForHR(hr); IBaseFilter nullRender = new NullRenderer() as IBaseFilter; graphBuilder.AddFilter(nullRender, "NullRenderer" + deviceNumber); InitResolution(captureGraphBuilder, camFilter, iWidth, iHeight); hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camDevice, sampGrabberBF, nullRender); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); Marshal.ReleaseComObject(sampGrabber); Marshal.ReleaseComObject(captureGraphBuilder); }
void GetInterfaces() { Type comType = null; object comObj = null; try { //ICaptureGraphBuilder2 pBuilder = null; // Initiate Capture Graph Builder Guid clsid = typeof(CaptureGraphBuilder2).GUID; comType = Type.GetTypeFromCLSID(clsid); comObj = Activator.CreateInstance(comType); m_bCapGraph = (ICaptureGraphBuilder2)comObj; // Initiate Graph Builder Guid clsfg = typeof(FilterGraph).GUID; comType = Type.GetTypeFromCLSID(clsfg); //Clsid.FilterGraph); comObj = Activator.CreateInstance(comType); m_bGraph = (IGraphBuilder)comObj; // Initiate Video Configuration Interface DsGuid cat = PinCategory.Capture; DsGuid type = MediaType.Interleaved; Guid iid = typeof(IAMVideoProcAmp).GUID; m_bCapGraph.FindInterface(cat, type, capFilter, iid, out comObj); m_iVidConfig = (IAMVideoProcAmp)comObj; // test //m_iVidConfig.Set(VideoProcAmpProperty.WhiteBalance, 0, VideoProcAmpFlags.Manual); // Initiate Camera Configuration Interface cat = PinCategory.Capture; type = MediaType.Interleaved; iid = typeof(IAMCameraControl).GUID; m_bCapGraph.FindInterface(cat, type, capFilter, iid, out comObj); m_iCamConfig = (IAMCameraControl)comObj; } catch (Exception ee) { if (comObj != null) { Marshal.ReleaseComObject(comObj); } throw new Exception("Could not get interfaces\r\n" + ee.Message); } }
public LifeCamCamera(string name, string nameContains, int cameraNumber) { _name = ""; String moniker = null; FilterInfoCollection videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice); Console.WriteLine(""); if (!string.IsNullOrEmpty(name)) { Console.WriteLine("Searching for camera name = '" + name + "'"); } if (!string.IsNullOrEmpty(nameContains)) { Console.WriteLine("Searching for camera name contains = '" + nameContains + "'"); } if (cameraNumber > 0) { Console.WriteLine("Searching for camera number = " + cameraNumber); } // Match specified camera name to device for (int i = 0, n = videoDevices.Count; i < n; i++) { Console.WriteLine("Camera " + (i + 1) + ": '" + videoDevices[i].Name + "'"); if ((name == videoDevices[i].Name) || (!string.IsNullOrEmpty(nameContains) && videoDevices[i].Name.IndexOf(nameContains) >= 0) || (cameraNumber == i + 1)) { moniker = videoDevices[i].MonikerString; _name = videoDevices[i].Name; cameraNumberFound = i + 1; break; } } if (moniker == null) { return; } //throw new Exception("Video device with name '" + name + "' not found."); Source = new VideoCaptureDevice(moniker); Source.DesiredFrameRate = 30; cameraControls = (IAMCameraControl)Source.SourceObject; videoProcAmp = (IAMVideoProcAmp)Source.SourceObject; }
void BuildGraph() { int hr = 0; IBaseFilter filter; this.graphBuilder = (IFilterGraph2) new FilterGraph(); DsDevice[] devs = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); DsDevice dev = devs[0]; hr = this.graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out filter); DsError.ThrowExceptionForHR(hr); this.videoProcAmp = (IAMVideoProcAmp)filter; }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <param name="prop"></param> /// <param name="value"></param> /// <param name="flag"></param> public void SetVideoControl(DsDevice dsDevice, VideoProcAmpProperty prop, int value = 0, VideoProcAmpFlags flag = VideoProcAmpFlags.Auto) { IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; try { // add the video input device int hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); IAMVideoProcAmp videoControl = capFilter as IAMVideoProcAmp; videoControl.Set(prop, value, flag); } catch (Exception ex) { Console.WriteLine(ex.Message); } }
public MyCameraControl(string name) { _name = name; String moniker = null; FilterInfoCollection videoDevices = new FilterInfoCollection(FilterCategory.VideoInputDevice); Encoding e = Encoding.GetEncoding("iso-8859-1"); Console.OutputEncoding = e; Console.WriteLine("Scanning for video device " + name + "\n"); // Match specified camera name to device for (int i = 0, n = videoDevices.Count; i < n; i++) { Console.WriteLine("Detected video device: " + videoDevices[i].Name + " checking: " + (name.Equals(videoDevices[i].Name))); if (name == videoDevices[i].Name) { moniker = videoDevices[i].MonikerString; break; } } if (moniker == null) { return; } //throw new Exception("Video device with name '" + name + "' not found."); Source = new VideoCaptureDevice(moniker); Source.DesiredFrameRate = 30; cameraControls = (IAMCameraControl)Source.SourceObject; videoProcAmp = (IAMVideoProcAmp)Source.SourceObject; }
/// <summary> /// Gets the range and default value of a specified camera property. /// </summary> /// /// <param name="property">Specifies the property to query.</param> /// <param name="minValue">Receives the minimum value of the property.</param> /// <param name="maxValue">Receives the maximum value of the property.</param> /// <param name="stepSize">Receives the step size for the property.</param> /// <param name="defaultValue">Receives the default value of the property.</param> /// <param name="controlFlags">Receives a member of the <see cref="CameraControlFlags"/> enumeration, indicating whether the property is controlled automatically or manually.</param> /// /// <returns>Returns true on success or false otherwise.</returns> /// /// <exception cref="ArgumentException">Video source is not specified - device moniker is not set.</exception> /// <exception cref="ApplicationException">Failed creating device object for moniker.</exception> /// <exception cref="NotSupportedException">The video source does not support camera control.</exception> /// public bool GetVideoPropertyRange(VideoProcAmpProperty property, out int minValue, out int maxValue, out int stepSize, out int defaultValue, out VideoProcAmpFlags controlFlags) { bool ret = true; // check if source was set if ((deviceMoniker == null) || (string.IsNullOrEmpty(deviceMoniker))) { throw new ArgumentException("Video source is not specified."); } lock (sync) { object tempSourceObject = null; // create source device's object try { tempSourceObject = FilterInfo.CreateFilter(deviceMoniker); } catch { throw new ApplicationException("Failed creating device object for moniker."); } if (!(tempSourceObject is IAMVideoProcAmp)) { throw new NotSupportedException("The video source does not support camera control."); } IAMVideoProcAmp pCamControl = (IAMVideoProcAmp)tempSourceObject; int hr = pCamControl.GetRange(property, out minValue, out maxValue, out stepSize, out defaultValue, out controlFlags); ret = (hr >= 0); Marshal.ReleaseComObject(tempSourceObject); } return(ret); }
public override void Cleanup() { try { // To stop the capture filter before stopping the media control // seems to solve the problem described in the next comment. // sancta simplicitas... if (capFilter != null) { capFilter.Stop(); } // The stop or stopwhenready methods sometimes hang ... // This is a multithreading issue but I don´t solved it yet // But stopping is needed, otherwise the video device // is not disposed fast enough (due to GC) so at next initialization // with other params the video device seems to be in // use and the GraphBuilder render mehtod fails. if (mediaControl != null) { // This hangs when closing the GT int hr = mediaControl.Stop(); } isRunning = false; } catch (Exception) { //ErrorLogger.ProcessException(ex, false); } if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; cameraControl = null; videoControl = null; videoStreamConfig = null; } if (videoProcAmp != null) { Marshal.ReleaseComObject(videoProcAmp); videoProcAmp = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (graphBuilder != null) { Marshal.ReleaseComObject(graphBuilder); graphBuilder = null; mediaControl = null; hasValidGraph = false; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } if (map != IntPtr.Zero) { UnmapViewOfFile(map); map = IntPtr.Zero; } if (section != IntPtr.Zero) { CloseHandle(section); section = IntPtr.Zero; } #if DEBUG if (this.rotEntry != null) { // This hangs when closing the GT this.rotEntry.Dispose(); } #endif }
/// <summary> /// Connects to the property changed events of the camera settings. /// </summary> //private void Initialize() //{ // //Settings.Instance.Camera.OnCameraControlPropertyChanged += OnCameraControlPropertyChanged; // //Settings.Instance.Camera.OnVideoProcAmpPropertyChanged += OnVideoProcAmpPropertyChanged; // //Settings.Instance.Camera.OnVideoControlFlagsChanged += OnVideoControlFlagsChanged; // //stopwatch = new Stopwatch(); //} /// <summary> /// Build the capture graph for grabber. /// </summary> /// <param name="dev">The index of the new capture device.</param> /// <param name="frameRate">The framerate to use.</param> /// <param name="width">The width to use.</param> /// <param name="height">The height to use.</param> /// <returns>True, if successful, otherwise false.</returns> private bool SetupGraph(DsDevice dev, int frameRate, int width, int height) { int hr; fps = frameRate; // Not measured, only to expose FPS externally cameraControl = null; capFilter = null; // Get the graphbuilder object graphBuilder = (IFilterGraph2)new FilterGraph(); mediaControl = graphBuilder as IMediaControl; try { // Create the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); // Create the SampleGrabber interface sampGrabber = (ISampleGrabber)new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(graphBuilder); //if (hr != 0) // ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " + // DsError.GetErrorText(hr)); #if DEBUG this.rotEntry = new DsROTEntry(this.graphBuilder); #endif this.capFilter = CreateFilter( FilterCategory.VideoInputDevice, dev.Name); if (this.capFilter != null) { hr = graphBuilder.AddFilter(this.capFilter, "Video Source"); DsError.ThrowExceptionForHR(hr); } //// Add the video device //hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); //if (hr != 0) // ErrorLogger.WriteLine( // "Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " + // DsError.GetErrorText(hr)); var baseGrabFlt = (IBaseFilter)sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); //if (hr != 0) // ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " + // DsError.GetErrorText(hr)); // turn on the infrared leds ONLY FOR THE GENIUS WEBCAM /* if (!defaultMode) { m_icc = capFilter as IAMCameraControl; CameraControlFlags CamFlags = new CameraControlFlags(); int pMin, pMax, pStep, pDefault; hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags); m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None); } */ //IBaseFilter smartTee = new SmartTee() as IBaseFilter; //// Add the smart tee filter to the graph //hr = this.graphBuilder.AddFilter(smartTee, "Smart Tee"); //Marshal.ThrowExceptionForHR(hr); // Connect the video source output to the smart tee //hr = capGraph.RenderStream(null, null, capFilter, null, smartTee); hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt); var errorText = DsError.GetErrorText(hr); cameraControl = capFilter as IAMCameraControl; // Set videoProcAmp object obj; var iid_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770"); DirectShowDevices.Instance.Cameras[deviceNumber].DirectshowDevice.Mon.BindToObject( null, null, ref iid_IBaseFilter, out obj); videoProcAmp = obj as IAMVideoProcAmp; // If any of the default config items are set if (frameRate + height + width > 0) SetConfigParms(capGraph, capFilter, frameRate, width, height); // Check for successful rendering, if this failed the class cannot be used, so dispose the resources and return false. if (hr < 0) { Cleanup(); return false; } else { // Otherwise update the SampleGrabber. SaveSizeInfo(sampGrabber); hr = sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = sampGrabber.SetOneShot(false); hr = sampGrabber.SetCallback(this, 1); } //if (hr < 0) // ErrorLogger.WriteLine("Could not set callback function (SetupGraph) in Camera.Capture()"); } } catch (Exception) { //ErrorLogger.ProcessException(ex, false); Cleanup(); return false; } return true; }
public void UpdateTarget(IAMVideoProcAmp filter) { _CamFilter = filter; Read(); }
/// <summary> /// 设置摄像头为此配置 /// </summary> /// <param name="setting">摄像头的配置</param> /// <param name="asDefault">是否并设为默认</param> /// <returns></returns> public int SetSettingValue(VideoSetting setting, bool asDefault = false) { if (asDefault) { VideoSettingUtils.Instance.SetDefaultSettings(setting); } int iResult = 0; IAMVideoProcAmp videoProcAmp = theDevice as IAMVideoProcAmp; if (videoProcAmp == null) { iResult = -1; return(iResult); } int val; int min; int max; int step; int defaultValue; VideoProcAmpFlags flags = VideoProcAmpFlags.Manual; // 设置亮度 if (setting.Brightness != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.Brightness, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Brightness, out val, out flags); //val = min + (max - min) * setting.Brightness / 255; iResult = videoProcAmp.Set(VideoProcAmpProperty.Brightness, setting.Brightness, flags); } } //设置对比度 if (setting.ContrastRatio != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.Contrast, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Contrast, out val, out flags); //val = min + (max - min) * setting.ContrastRatio / 100; iResult = videoProcAmp.Set(VideoProcAmpProperty.Contrast, setting.ContrastRatio, flags); } }//设置饱和度 if (setting.Saturation != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.Saturation, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Saturation, out val, out flags); //val = min + (max - min) * setting.Saturation / 100; iResult = videoProcAmp.Set(VideoProcAmpProperty.Saturation, setting.Saturation, flags); } } //设置色调 if (setting.Hue != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.Hue, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Saturation, out val, out flags); //val = min + (max - min) * setting.Saturation / 100; iResult = videoProcAmp.Set(VideoProcAmpProperty.Hue, setting.Hue, flags); } } //设置清晰度 if (setting.Sharpness != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.Sharpness, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Saturation, out val, out flags); //val = min + (max - min) * setting.Saturation / 100; iResult = videoProcAmp.Set(VideoProcAmpProperty.Sharpness, setting.Sharpness, flags); } } //设置伽玛 if (setting.Gamma != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.Gamma, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Saturation, out val, out flags); //val = min + (max - min) * setting.Saturation / 100; iResult = videoProcAmp.Set(VideoProcAmpProperty.Gamma, setting.Gamma, flags); } } //设置启用颜色 if (setting.Gamma != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.ColorEnable, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Saturation, out val, out flags); //val = min + (max - min) * setting.Saturation / 100; iResult = videoProcAmp.Set(VideoProcAmpProperty.ColorEnable, Convert.ToInt32(setting.ColorEnable), flags); } } //白平衡 if (setting.WhiteBalance != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.WhiteBalance, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Saturation, out val, out flags); //val = min + (max - min) * setting.Saturation / 100; iResult = videoProcAmp.Set(VideoProcAmpProperty.WhiteBalance, setting.WhiteBalance, flags); } } //背光补偿 if (setting.WhiteBalance != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.BacklightCompensation, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Saturation, out val, out flags); //val = min + (max - min) * setting.Saturation / 100; iResult = videoProcAmp.Set(VideoProcAmpProperty.BacklightCompensation, setting.BacklightCompensation, flags); } } //增益 if (setting.Gain != -1) { int hr = videoProcAmp.GetRange(VideoProcAmpProperty.Gain, out min, out max, out step, out defaultValue, out flags); if (0 == hr) { //videoProcAmp.Get(VideoProcAmpProperty.Saturation, out val, out flags); //val = min + (max - min) * setting.Saturation / 100; iResult = videoProcAmp.Set(VideoProcAmpProperty.Gain, setting.Gain, flags); } } return(iResult); }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP, Control hControl) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; IAMVideoProcAmp vpa = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; try { #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif // add the video input device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); // Find the still pin m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); // Didn't find one. Is there a preview pin? if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } //test ISpecifyPropertyPages pProp = capFilter as ISpecifyPropertyPages; DsCAUUID caGUID; hr = pProp.GetPages(out caGUID); DsError.ThrowExceptionForHR(hr); // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; // There is no still pin m_VidControl = null; // Add a splitter IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); // Find the find the capture pin from the video device and the // input pin for the splitter, and connnect them pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); // Now set the capture and still pins (from the splitter) m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); // If any of the default config items are set, perform the config // on the actual video device (rather than the splitter) if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // If any of the default config items are set if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Get the default video renderer IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } // Learn the video properties SaveSizeInfo(sampGrabber); ConfigVideoWindow(hControl); // Start the graph IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (vpa != null) { Marshal.ReleaseComObject(vpa); vpa = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
public void InitDevice(DsDevice device, int iWidth, int iHeight) { int hr; object camDevice; Guid iid = typeof(IBaseFilter).GUID; device.Mon.BindToObject(null, null, ref iid, out camDevice); IBaseFilter camFilter = camDevice as IBaseFilter; m_CameraControl = camFilter as IAMCameraControl; m_VideoControl = camFilter as IAMVideoProcAmp; ISampleGrabber sampGrabber = null; graphBuilder = (IGraphBuilder)new FilterGraph(); //Create the Capture Graph Builder ICaptureGraphBuilder2 captureGraphBuilder = null; captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); // Attach the filter graph to the capture graph hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); //Add the Video input device to the graph hr = graphBuilder.AddFilter(camFilter, "WebCam" + deviceNumber); DsError.ThrowExceptionForHR(hr); // Configure the sample grabber sampGrabber = new SampleGrabber() as ISampleGrabber; ConfigureSampleGrabber(sampGrabber); IBaseFilter sampGrabberBF = sampGrabber as IBaseFilter; //Add the Video compressor filter to the graph hr = graphBuilder.AddFilter(sampGrabberBF, "SampleGrabber" + deviceNumber); DsError.ThrowExceptionForHR(hr); IBaseFilter nullRender = new NullRenderer() as IBaseFilter; graphBuilder.AddFilter(nullRender, "NullRenderer" + deviceNumber); InitResolution(captureGraphBuilder, camFilter, iWidth, iHeight); hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camDevice, sampGrabberBF, nullRender); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); Marshal.ReleaseComObject(sampGrabber); Marshal.ReleaseComObject(captureGraphBuilder); }
private void UpdatePropertiesCache() { AMproperties.Clear(); IAMVideoProcAmp vpa = null; IAMCameraControl cc = null; try { vpa = (IAMVideoProcAmp)Marshal.GetObjectForIUnknown(dev.NativePointer); foreach (var pVal in Enum.GetValues(typeof(VideoProcAmpProperty))) { AMVideoProcAmpProperty p = new AMVideoProcAmpProperty(); p.PropertyName = Enum.GetName(typeof(VideoProcAmpProperty), pVal); p.ProperyId = (int)pVal; VideoProcAmpFlags flags; int hr = vpa.GetRange((VideoProcAmpProperty)pVal, out p.Min, out p.Max, out p.Delta, out p.DefaultValue, out flags); if (hr != 0) { continue; } p.PossibleFlags = (int)flags; hr = vpa.Get((VideoProcAmpProperty)pVal, out p.Value, out flags); if (hr != 0) { continue; } p.Flag = (int)flags; AMproperties.Add(p); } } catch (Exception ex) { if (ex.HResult != SharpDX.Result.NoInterface.Code) { throw; } } finally { if (vpa != null) { Marshal.ReleaseComObject(vpa); } } try { cc = (IAMCameraControl)Marshal.GetObjectForIUnknown(dev.NativePointer); foreach (var pVal in Enum.GetValues(typeof(CameraControlProperty))) { AMCameraControlProperty p = new AMCameraControlProperty(); p.PropertyName = Enum.GetName(typeof(CameraControlProperty), pVal); p.ProperyId = (int)pVal; CameraControlFlags flags; int hr = cc.GetRange((CameraControlProperty)pVal, out p.Min, out p.Max, out p.Delta, out p.DefaultValue, out flags); if (hr != 0) { continue; } p.PossibleFlags = (int)flags; hr = cc.Get((CameraControlProperty)pVal, out p.Value, out flags); if (hr != 0) { continue; } p.Flag = (int)flags; AMproperties.Add(p); } } catch (Exception ex) { if (ex.HResult != SharpDX.Result.NoInterface.Code) { throw; } } finally { if (cc != null) { Marshal.ReleaseComObject(cc); } } }
public void SetupProperties(DsDevice dev, int brightness, int contrast, int backlightcompensation, int sharpness) { object o; Guid IID_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770"); dev.Mon.BindToObject(null, null, ref IID_IBaseFilter, out o); IAMVideoProcAmp vpa = (IAMVideoProcAmp)o; int pMin, pMax, pSteppingDelta, pDefault; int pMin2, pMax2, pSteppingDelta2, pDefault2; int pMin3, pMax3, pSteppingDelta3, pDefault3; int pMin4, pMax4, pSteppingDelta4, pDefault4; //int pMin5, pMax5, pSteppingDelta5, pDefault5; VideoProcAmpFlags pFlags, pFlags2, pFlags3, pFlags4;//, pFlags5; vpa.GetRange( VideoProcAmpProperty.Brightness, out pMin, out pMax, out pSteppingDelta, out pDefault, out pFlags); vpa.GetRange( VideoProcAmpProperty.Sharpness, out pMin2, out pMax2, out pSteppingDelta2, out pDefault2, out pFlags2); vpa.GetRange( VideoProcAmpProperty.BacklightCompensation, out pMin3, out pMax3, out pSteppingDelta3, out pDefault3, out pFlags3); vpa.GetRange( VideoProcAmpProperty.Contrast, out pMin4, out pMax4, out pSteppingDelta4, out pDefault4, out pFlags4); //vpa.GetRange( //VideoProcAmpProperty.ColorEnable, //out pMin5, //out pMax5, //out pSteppingDelta5, //out pDefault5, //out pFlags5); //Here I will change the settings of the camera! if (brightness >= pMin && brightness <= pMax) { vpa.Set(VideoProcAmpProperty.Brightness, brightness, pFlags); } if (sharpness >= pMin2 && sharpness <= pMax2) { vpa.Set(VideoProcAmpProperty.Sharpness, sharpness, pFlags2); } if (backlightcompensation >= pMin3 && backlightcompensation <= pMax3) { vpa.Set(VideoProcAmpProperty.BacklightCompensation, backlightcompensation, pFlags3); } if (contrast >= pMin4 && contrast <= pMax) { vpa.Set(VideoProcAmpProperty.Contrast, contrast, pFlags4); } }
/// <summary> /// 保存当前的相机设置 /// </summary> public void SaveCurrentOriginSetting(string VideoSettingName, bool AsDefault) { IAMVideoProcAmp videoProcAmp = theDevice as IAMVideoProcAmp; VideoProcAmpFlags flags = VideoProcAmpFlags.Manual; //亮度值 0到255 int LightValue = 0; videoProcAmp.Get(VideoProcAmpProperty.Brightness, out LightValue, out flags); //对比度 0到255 int ContrastValue = 0; videoProcAmp.Get(VideoProcAmpProperty.Contrast, out ContrastValue, out flags); //饱和度 0到255 int SaturationValue = 0; videoProcAmp.Get(VideoProcAmpProperty.Saturation, out SaturationValue, out flags); //色调 -127 到127 int HueValue = 0; videoProcAmp.Get(VideoProcAmpProperty.Hue, out HueValue, out flags); //清晰度 0到15 int SharpnessValue = 0; videoProcAmp.Get(VideoProcAmpProperty.Sharpness, out SharpnessValue, out flags); //伽玛 1到8 int GammaValue = 0; videoProcAmp.Get(VideoProcAmpProperty.Gamma, out GammaValue, out flags); //启用颜色 不支持 int ColorEnable = 0; videoProcAmp.Get(VideoProcAmpProperty.ColorEnable, out ColorEnable, out flags); //白平衡 不支持 int WhiteBalanceValue = 0; videoProcAmp.Get(VideoProcAmpProperty.WhiteBalance, out WhiteBalanceValue, out flags); //背光补偿 1 到 5 int BacklightCompensation = 0; videoProcAmp.Get(VideoProcAmpProperty.BacklightCompensation, out BacklightCompensation, out flags); //增益 不支持 int Gain = 0; videoProcAmp.Get(VideoProcAmpProperty.Gain, out Gain, out flags); VideoSetting setting = new VideoSetting(); setting.Brightness = LightValue; setting.VideoSettingName = VideoSettingName; setting.ContrastRatio = ContrastValue; setting.Saturation = SaturationValue; setting.Hue = HueValue; setting.Sharpness = SharpnessValue; setting.Gamma = GammaValue; setting.ColorEnable = Convert.ToBoolean(ColorEnable); setting.WhiteBalance = WhiteBalanceValue; setting.BacklightCompensation = BacklightCompensation; setting.Gain = Gain; setting.DefaultSetting = AsDefault; VideoSettingUtils.Instance.SaveVideoSetting(setting, AsDefault); }
/// <summary> /// Build the capture graph for grabber. /// </summary> /// <param name="dev">The index of the new capture device.</param> /// <param name="frameRate">The framerate to use.</param> /// <param name="width">The width to use.</param> /// <param name="height">The height to use.</param> /// <returns>True, if succesfull, otherwise false.</returns> private bool SetupGraph(DsDevice dev, int frameRate, int width, int height) { int hr; this.fps = frameRate; // Not measured, only to expose FPS externally this.cameraControl = null; this.capFilter = null; // Get the graphbuilder object this.graphBuilder = (IFilterGraph2) new FilterGraph(); this.mediaControl = this.graphBuilder as IMediaControl; try { // Create the ICaptureGraphBuilder2 this.capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Create the SampleGrabber interface this.sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = this.capGraph.SetFiltergraph(this.graphBuilder); if (hr != 0) { ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " + DsError.GetErrorText(hr)); } #if DEBUG this.rotEntry = new DsROTEntry(this.graphBuilder); #endif // Add the video device hr = this.graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out this.capFilter); if (hr != 0) { ErrorLogger.WriteLine("Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " + DsError.GetErrorText(hr)); } IBaseFilter baseGrabFlt = (IBaseFilter)this.sampGrabber; this.ConfigureSampleGrabber(this.sampGrabber); // Add the frame grabber to the graph hr = this.graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); if (hr != 0) { ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " + DsError.GetErrorText(hr)); } this.cameraControl = this.capFilter as IAMCameraControl; // turn on the infrared leds ONLY FOR THE GENIUS WEBCAM /* * if (!defaultMode) * { * m_icc = capFilter as IAMCameraControl; * CameraControlFlags CamFlags = new CameraControlFlags(); * int pMin, pMax, pStep, pDefault; * * hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags); * m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None); * } */ // Set videoProcAmp object obj; Guid iid_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770"); Devices.Current.Cameras[0].DirectshowDevice.Mon.BindToObject( null, null, ref iid_IBaseFilter, out obj); this.videoProcAmp = obj as IAMVideoProcAmp; // If any of the default config items are set if (frameRate + height + width > 0) { this.SetConfigParms(this.capGraph, this.capFilter, frameRate, width, height); } hr = this.capGraph.RenderStream(PinCategory.Capture, MediaType.Video, this.capFilter, null, baseGrabFlt); // Check for succesful rendering, if this failed the class cannot be used, so dispose the resources and return false. if (hr < 0) { string error = DsError.GetErrorText(hr); MessageBox.Show(error); this.Dispose(); return(false); } else { // Otherwise update the SampleGrabber. this.SaveSizeInfo(this.sampGrabber); hr = this.sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = this.sampGrabber.SetOneShot(false); } if (hr == 0) { hr = this.sampGrabber.SetCallback(this, 1); } if (hr < 0) { ErrorLogger.WriteLine("Could not set callback function (SetupGraph) in Camera.Capture()"); } } } catch (Exception ex) { ErrorLogger.ProcessException(ex, false); this.Dispose(); return(false); } return(true); }