/// <summary> /// Returns available resolutions with RGB color system for device moniker /// </summary> /// <param name="moniker">Moniker (device identification) of camera.</param> /// <returns>List of resolutions with RGB color system of device</returns> public static ResolutionList GetResolutionList(IMoniker moniker) { int hr; ResolutionList ResolutionsAvailable = null; //new ResolutionList(); // Get the graphbuilder object IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; try { // add the video input device hr = filterGraph.AddSourceFilterForMoniker(moniker, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); ResolutionsAvailable = GetResolutionsAvailable(capFilter); } finally { SafeReleaseComObject(filterGraph); filterGraph = null; SafeReleaseComObject(capFilter); capFilter = null; } return(ResolutionsAvailable); }
private void Setup() { int hr; IFilterGraph2 ifg = new FilterGraph() as IFilterGraph2; m_imc = ifg as IMediaControl; DsROTEntry rot = new DsROTEntry(ifg); IBaseFilter pFilter; DsDevice[] devs = DsDevice.GetDevicesOfCat(FilterCategory.AudioInputDevice); hr = ifg.AddSourceFilterForMoniker(devs[0].Mon, null, devs[0].Name, out pFilter); DsError.ThrowExceptionForHR(hr); ICaptureGraphBuilder2 icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; hr = icgb2.SetFiltergraph(ifg); DsError.ThrowExceptionForHR(hr); //IPin pPin = DsFindPin.ByDirection((IBaseFilter)o, PinDirection.Output, 0); hr = icgb2.RenderStream(null, MediaType.Audio, pFilter, null, null); DsError.ThrowExceptionForHR(hr); IBaseFilter pAudio; hr = ifg.FindFilterByName("Audio Renderer", out pAudio); m_ibn = pAudio as IAMAudioRendererStats; }
/// <summary> /// Returns available frame sizes with RGB color system for device moniker /// </summary> /// <param name="moniker">Moniker (device identification) of camera.</param> /// <returns>List of frame sizes with RGB color system of device</returns> public static FrameSize[] GetFrameSizeList(IMoniker moniker) { int hr; FrameSize[] FrameSizeArray = null; // Get the graphbuilder object IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; try { // add the video input device hr = filterGraph.AddSourceFilterForMoniker(moniker, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); FrameSizeArray = GetFrameSizesAvailable(capFilter); } finally { SafeReleaseComObject(filterGraph); filterGraph = null; SafeReleaseComObject(capFilter); capFilter = null; } return(FrameSizeArray); }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <param name="prop"></param> /// <returns></returns> public int GetVideoControl(DsDevice dsDevice, VideoProcAmpProperty prop) { IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; int retVal = 0; try { // add the video input device int hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); IAMVideoProcAmp videoControl = capFilter as IAMVideoProcAmp; int min, max, step, default_val; VideoProcAmpFlags flag = 0; videoControl.GetRange(prop, out min, out max, out step, out default_val, out flag); videoControl.Get(prop, out retVal, out flag); } catch (Exception ex) { Console.WriteLine(ex.Message); } return(retVal); }
private void Setup() { int hr; IBaseFilter ibf; IFilterGraph2 ifg = new FilterGraph() as IFilterGraph2; m_imc = ifg as IMediaControl; DsROTEntry rot = new DsROTEntry(ifg); DsDevice[] devs = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); hr = ifg.AddSourceFilterForMoniker(devs[0].Mon, null, devs[0].Name, out ibf); ICaptureGraphBuilder2 icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; hr = icgb2.SetFiltergraph(ifg); DsError.ThrowExceptionForHR(hr); hr = icgb2.RenderStream(null, null, ibf, null, null); DsError.ThrowExceptionForHR(hr); IBaseFilter pFilter; hr = ifg.FindFilterByName("Video Renderer", out pFilter); m_qc = pFilter as IQualityControl; rot.Dispose(); }
private PTZDevice(string name, PTZType type) { var devices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); var device = devices.Where(d => d.Name == name).FirstOrDefault(); _device = device; _type = type; if (_device == null) throw new ApplicationException(String.Format("Couldn't find device named {0}!", name)); IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; IBaseFilter filter = null; IMoniker i = _device.Mon as IMoniker; graphBuilder.AddSourceFilterForMoniker(i, null, _device.Name, out filter); _camControl = filter as IAMCameraControl; _ksPropertySet = filter as IKsPropertySet; if (_camControl == null) throw new ApplicationException("Couldn't get ICamControl!"); if (_ksPropertySet == null) throw new ApplicationException("Couldn't get IKsPropertySet!"); //TODO: Add Absolute if (type == PTZType.Relative && !(SupportFor(KSProperties.CameraControlFeature.KSPROPERTY_CAMERACONTROL_PAN_RELATIVE) && SupportFor(KSProperties.CameraControlFeature.KSPROPERTY_CAMERACONTROL_TILT_RELATIVE))) { throw new NotSupportedException("This camera doesn't appear to support Relative Pan and Tilt"); } //TODO: Do I through NotSupported when methods are called or throw them now? //TODO: Do I check for Zoom or ignore if it's not there? InitZoomRanges(); }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <returns></returns> static public string[] GetCameraCapability(DsDevice dsDevice) { int hr; IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; IPin pPin = null; string[] listVideoInfo; try { // add the video input device hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); pPin = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); //listResolution = GetResolutionsAvailable( pPin ).ToList(); listVideoInfo = GetResolutionsAvailable(pPin); } finally { Marshal.ReleaseComObject(pPin); pPin = null; } return(listVideoInfo); }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <param name="bEnable"></param> public void AutoFocus(DsDevice dsDevice, bool bEnable) { IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; try { // add the video input device int hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); IAMCameraControl cameraControl = capFilter as IAMCameraControl; if (bEnable) { cameraControl.Set(CameraControlProperty.Focus, 250, CameraControlFlags.Auto); } else { cameraControl.Set(CameraControlProperty.Focus, 250, CameraControlFlags.Manual); } } catch (Exception ex) { Console.WriteLine(ex.Message); } }
private void buildGraph() { int hr = 0; IBaseFilter captureFilter;AMMediaType pmt4 = new AMMediaType(); IFilterGraph2 filtergraph = new FilterGraph() as IFilterGraph2; ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); hr = pBuilder.SetFiltergraph(m_graph); DsError.ThrowExceptionForHR(hr); filtergraph.AddSourceFilterForMoniker(m_capDev.Mon, null, m_capDev.Name, out captureFilter); m_graph.AddFilter(captureFilter, "CapFilter"); DsError.ThrowExceptionForHR(hr); IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(typeof(SampleGrabber)); hr = m_graph.AddFilter(pSampleGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); hr = ((ISampleGrabber)pSampleGrabber).SetCallback(new StillGrabberCallBack(), 0); AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; VideoInfoHeader format = new VideoInfoHeader(); format.SrcRect = new DsRect(); format.TargetRect = new DsRect(); format.BmiHeader = new BitmapInfoHeader() format.BmiHeader.Size = }
public IBaseFilter GetVideo() { IBaseFilter baseDevice; var filterGraph = new FilterGraph() as IFilterGraph2; filterGraph.AddSourceFilterForMoniker(selectedDevice.Mon, null, selectedDevice.Name, out baseDevice); IPin pin = DsFindPin.ByCategory(baseDevice, PinCategory.Capture, 0); var streamConfig = pin as IAMStreamConfig; AMMediaType media; int iC = 0, iS = 0; streamConfig.GetNumberOfCapabilities(out iC, out iS); IntPtr ptr = Marshal.AllocCoTaskMem(iS); for (int i = 0; i < iC; i++) { streamConfig.GetStreamCaps(i, out media, ptr); VideoInfoHeader v; v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); } Guid iid = typeof(IBaseFilter).GUID; object source; selectedDevice.Mon.BindToObject(null, null, ref iid, out source); return((IBaseFilter)source); }
public static List <CameraResolution> GetAvailableResolutions(int deviceIndex) // DsDevice vidDev) { try { DsDevice[] captureDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); DsDevice vidDev = captureDevices[deviceIndex]; int hr; int max = 0; int bitCount = 0; IBaseFilter sourceFilter = null; var mFilterGraph2 = new FilterGraph() as IFilterGraph2; hr = mFilterGraph2.AddSourceFilterForMoniker(vidDev.Mon, null, vidDev.Name, out sourceFilter); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); var AvailableResolutions = new List <CameraResolution>(); VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, videoInfoHeader); if (videoInfoHeader.BmiHeader.Size != 0 && videoInfoHeader.BmiHeader.BitCount != 0) { if (videoInfoHeader.BmiHeader.BitCount > bitCount) { AvailableResolutions.Clear(); max = 0; bitCount = videoInfoHeader.BmiHeader.BitCount; } CameraResolution availableResolution = new CameraResolution(); availableResolution.HorizontalResolution = videoInfoHeader.BmiHeader.Width; availableResolution.VerticalResolution = videoInfoHeader.BmiHeader.Height; AvailableResolutions.Add(availableResolution); if (videoInfoHeader.BmiHeader.Width > max || videoInfoHeader.BmiHeader.Height > max) { max = (Math.Max(videoInfoHeader.BmiHeader.Width, videoInfoHeader.BmiHeader.Height)); } } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); } return(AvailableResolutions); } catch (Exception ex) { return(null); } }
internal Camera(DsDevice device) { _device = device; IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; IMoniker i = _device.Mon as IMoniker; graphBuilder.AddSourceFilterForMoniker(i, null, _device.Name, out _filter); RegisterProperties(); }
public static string GetVideoFormats(int deviceId) { var AvailableFormats = "No device information available"; var capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); if (capDevices.Length > 0 && deviceId < capDevices.Length) { var device = capDevices[deviceId]; if (device != null) { try { int hr; IBaseFilter sourceFilter = null; var m_FilterGraph2 = new FilterGraph() as IFilterGraph2; hr = m_FilterGraph2.AddSourceFilterForMoniker(device.Mon, null, device.Name, out sourceFilter); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); VideoInfoHeader v = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); AvailableFormats = ""; while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, v); if (v.BmiHeader.Size != 0 && v.BmiHeader.BitCount != 0) { var fps = Math.Floor((1 / ((v.AvgTimePerFrame * 100) * 0.000000001))).ToString(); var format = v.BmiHeader.Width.ToString() + "x" + v.BmiHeader.Height.ToString() + " @" + fps.ToString() + " fps\n"; if (!AvailableFormats.Contains(format)) { AvailableFormats += format; } } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); } } catch { return(AvailableFormats); } } } return(AvailableFormats); }
public static List <Resolution> GetAllAvailableResolution(DsDevice vidDev) { try { int hr; int max = 0; int bitCount = 0; IBaseFilter sourceFilter = null; var m_FilterGraph2 = new FilterGraph() as IFilterGraph2; hr = m_FilterGraph2.AddSourceFilterForMoniker(vidDev.Mon, null, vidDev.Name, out sourceFilter); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); var AvailableResolutions = new List <Resolution>(); VideoInfoHeader v = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, v); if (v.BmiHeader.Size != 0 && v.BmiHeader.BitCount != 0) { if (v.BmiHeader.BitCount > bitCount) { //AvailableResolutions.Clear(); max = 0; bitCount = v.BmiHeader.BitCount; } AvailableResolutions.Add(new Resolution(v.BmiHeader.Width, v.BmiHeader.Height, v.BmiHeader.BitCount)); if (v.BmiHeader.Width > max || v.BmiHeader.Height > max) { max = (Math.Max(v.BmiHeader.Width, v.BmiHeader.Height)); } } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); } return(AvailableResolutions); } catch (Exception ex) { throw ex; //return new List<Resolution>(); } }
private static IEnumerable <VideoCapabilities> GetAllAvailableResolution(DsDevice vidDev) { //I used to use SharpDX.MediaFoundation to enumerate all camera and its supported resolution //however, according to https://stackoverflow.com/questions/24612174/mediafoundation-can%C2%B4t-find-video-capture-emulator-driver-but-directshow-does, //MediaFoundation cannot find virtual camera, so I turned to use IPin.EnumMediaTypes to fetch supported resolution //https://stackoverflow.com/questions/20414099/videocamera-get-supported-resolutions int hr, bitCount = 0; IBaseFilter sourceFilter; var m_FilterGraph2 = new FilterGraph() as IFilterGraph2; hr = m_FilterGraph2.AddSourceFilterForMoniker(vidDev.Mon, null, vidDev.Name, out sourceFilter); DsError.ThrowExceptionForHR(hr); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); var availableResolutions = new List <VideoCapabilities>(); VideoInfoHeader v = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); DsError.ThrowExceptionForHR(hr); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); DsError.ThrowExceptionForHR(hr); while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, v); if (v.BmiHeader.Size != 0 && v.BmiHeader.BitCount != 0) { if (v.BmiHeader.BitCount > bitCount) { availableResolutions.Clear(); bitCount = v.BmiHeader.BitCount; } VideoCapabilities cap = new VideoCapabilities(); cap.Height = v.BmiHeader.Height; cap.Width = v.BmiHeader.Width; //the unit of AvgTimePerFrame is 100 nanoseconds, //and 10^9 nanosenconds = 1 second cap.FrameRate = (int)(1000_000_000 / 100 / v.AvgTimePerFrame); cap.BitRate = v.BitRate; availableResolutions.Add(cap); } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); DsError.ThrowExceptionForHR(hr); } return(availableResolutions); }
public void reloadCamera(int index = 0) { lock (_locker) { camIndex = index; try { // // Get the camera devices // TODO:: Provide the control system with a list of cameras to select from // TODO:: Provide a right click menu for selecting the camera to be controlled // TODO:: Event for sending out-of-turn status information to the control system // DsDevice[] capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); // // Get the graphbuilder object // IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; // // add the video input device // IBaseFilter camFilter = null; int hr = graphBuilder.AddSourceFilterForMoniker(capDevices[camIndex].Mon, null, capDevices[camIndex].Name, out camFilter); DsError.ThrowExceptionForHR(hr); // // Camera control object // theCamera = camFilter as IAMCameraControl; getProperties(); updateStatus("Camera", "Selected: " + capDevices[camIndex].Name, false); if (!cameraReady) { cameraReady = true; string[] lstCameras = new string[capDevices.Length]; for (int i = 0; i < capDevices.Length; i++) { lstCameras[i] = capDevices[i].Name; } updateCamList(lstCameras, camIndex); } } catch { cameraReady = false; updateStatus("Camera", "Error: unable to bind controls", false); } } }
public static Point SetupGraph2(DsDevice dev) { // GlobalVariable.DebugMessage("winmate", "SetupGraph2 start", GlobalVariable.bDebug);// brian add int hr; Point pp = new Point(0, 0); IBaseFilter capFilter = null; IFilterGraph2 m_FilterGraph2 = null; // Get the graphbuilder object m_FilterGraph2 = new FilterGraph() as IFilterGraph2; try { // add the video input device hr = m_FilterGraph2.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); IPin mStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); if (mStill != null) { pp = GetMaxFrameSize(mStill); // GlobalVariable.DebugMessage("winmate", "still max=" + pp.X + "x" + pp.Y, GlobalVariable.bDebug); if (mStill != null) { Marshal.ReleaseComObject(mStill); mStill = null; } } else { IPin mCapture = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); if (mCapture != null) { pp = GetMaxFrameSize(mCapture); // GlobalVariable.DebugMessage("winmate", "capture max=" + pp.X + "x" + pp.Y, GlobalVariable.bDebug); if (mCapture != null) { Marshal.ReleaseComObject(mCapture); mCapture = null; } } } } finally { } // GlobalVariable.DebugMessage("winmate", "SetupGraph2 end", GlobalVariable.bDebug);// brian add return(pp); }
public FrameGrabber(DsDevice camDevice) { IFilterGraph2 filterGraph; ICaptureGraphBuilder2 graphBuilder; IBaseFilter camBase, nullRenderer; ISampleGrabber sampleGrabber; filterGraph = new FilterGraph() as IFilterGraph2; mediaCtrl = filterGraph as IMediaControl; graphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; HRCheck(graphBuilder.SetFiltergraph(filterGraph)); // Add camera HRCheck(filterGraph.AddSourceFilterForMoniker( camDevice.Mon, null, camDevice.Name, out camBase)); // Add sample grabber sampleGrabber = new SampleGrabber() as ISampleGrabber; var mType = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; HRCheck(sampleGrabber.SetMediaType(mType)); DsUtils.FreeAMMediaType(mType); HRCheck(sampleGrabber.SetCallback(this, 1)); HRCheck(filterGraph.AddFilter(sampleGrabber as IBaseFilter, "CamGrabber")); // Add null renderer nullRenderer = new NullRenderer() as IBaseFilter; HRCheck(filterGraph.AddFilter(nullRenderer, "Null renderer")); // Render the webcam through the grabber and the renderer HRCheck(graphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camBase, sampleGrabber as IBaseFilter, nullRenderer)); // Get resulting picture size mType = new AMMediaType(); HRCheck(sampleGrabber.GetConnectedMediaType(mType)); if (mType.formatType != FormatType.VideoInfo || mType.formatPtr == IntPtr.Zero) { throw new NotSupportedException("Unknown grabber media format"); } var videoInfoHeader = Marshal.PtrToStructure(mType.formatPtr, typeof(VideoInfoHeader)) as VideoInfoHeader; width = videoInfoHeader.BmiHeader.Width; height = videoInfoHeader.BmiHeader.Height; Console.WriteLine("{0} x {1}", width, height); stride = width * (videoInfoHeader.BmiHeader.BitCount / 8); DsUtils.FreeAMMediaType(mType); HRCheck(mediaCtrl.Run()); }
private void Config() { int hr; IBaseFilter pFilter; IFilterGraph2 ifg = new FilterGraph() as IFilterGraph2; IObjectWithSite iows = ifg as IObjectWithSite; //hr = iows.SetSite(this); DsDevice[] devs = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); hr = ifg.AddSourceFilterForMoniker(devs[0].Mon, null, "asdf", out pFilter); IPin pPin = DsFindPin.ByDirection(pFilter, PinDirection.Output, 1); hr = ((IGraphBuilder)ifg).Render(pPin); DsError.ThrowExceptionForHR(hr); }
private PTZDevice(string name) { var devices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); var device = devices.Where(d => d.Name == name).FirstOrDefault(); _device = device; if (_device == null) { throw new ApplicationException(String.Format("Couldn't find device named {0}!", name)); } IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; IBaseFilter filter = null; IMoniker i = _device.Mon as IMoniker; graphBuilder.AddSourceFilterForMoniker(i, null, _device.Name, out filter); _camControl = filter as IAMCameraControl; _ksPropertySet = filter as IKsPropertySet; if (_camControl == null) { throw new ApplicationException("Couldn't get ICamControl!"); } if (_ksPropertySet == null) { throw new ApplicationException("Couldn't get IKsPropertySet!"); } //TODO: Add Absolute /* * if (type == PTZType.Relative && * !(SupportFor(KSProperties.CameraControlFeature.KSPROPERTY_CAMERACONTROL_PAN_RELATIVE) && * SupportFor(KSProperties.CameraControlFeature.KSPROPERTY_CAMERACONTROL_TILT_RELATIVE))) * { * throw new NotSupportedException("This camera doesn't appear to support Relative Pan and Tilt"); * } * /**/ //TODO: Do I through NotSupported when methods are called or throw them now? //TODO: Do I check for Zoom or ignore if it's not there? InitZoomRanges(); }
// https://stackoverflow.com/questions/20414099/videocamera-get-supported-resolutions private List <Resolution> GetAllAvailableResolution(DsDevice vidDev) { try { int hr, bitCount = 0; IBaseFilter sourceFilter = null; var m_FilterGraph2 = new FilterGraph() as IFilterGraph2; hr = m_FilterGraph2.AddSourceFilterForMoniker(vidDev.Mon, null, vidDev.Name, out sourceFilter); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); var AvailableResolutions = new List <Resolution>(); VideoInfoHeader v = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, v); if (v.BmiHeader.Size != 0 && v.BmiHeader.BitCount != 0) { if (v.BmiHeader.BitCount > bitCount) { AvailableResolutions.Clear(); bitCount = v.BmiHeader.BitCount; } AvailableResolutions.Add(new Resolution(v.BmiHeader.Width, v.BmiHeader.Height)); } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); } return(AvailableResolutions); } catch (Exception ex) { //MessageBox.Show(ex.Message); Console.WriteLine(ex.ToString()); return(new List <Resolution>()); } }
private void DoSetup() { m_icc = null; // Get the IAMCameraControl DsDevice[] devs = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); // Get the graphbuilder object IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; // add the video input device IBaseFilter capFilter = null; int hr = graphBuilder.AddSourceFilterForMoniker(devs[0].Mon, null, devs[0].Name, out capFilter); DsError.ThrowExceptionForHR(hr); m_icc = capFilter as IAMCameraControl; Debug.Assert(m_icc != null); }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <param name="prop"></param> /// <param name="value"></param> /// <param name="flag"></param> public void SetVideoControl(DsDevice dsDevice, VideoProcAmpProperty prop, int value = 0, VideoProcAmpFlags flag = VideoProcAmpFlags.Auto) { IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; try { // add the video input device int hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); IAMVideoProcAmp videoControl = capFilter as IAMVideoProcAmp; videoControl.Set(prop, value, flag); } catch (Exception ex) { Console.WriteLine(ex.Message); } }
public static ResolutionList GetResolutionList(IMoniker moniker) { ResolutionList resolutionsAvailable = null; IFilterGraph2 graph = new FilterGraph() as IFilterGraph2; IBaseFilter ppFilter = null; try { DsError.ThrowExceptionForHR(graph.AddSourceFilterForMoniker(moniker, null, "Source Filter", out ppFilter)); resolutionsAvailable = GetResolutionsAvailable(ppFilter); } finally { SafeReleaseComObject(graph); graph = null; SafeReleaseComObject(ppFilter); ppFilter = null; } return(resolutionsAvailable); }
private void lbWebcams_SelectionChanged(object sender, SelectionChangedEventArgs e) { camSettings.Clear(); var cam = lbWebcams.SelectedItem as Webcam; // Update all settings IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; graphBuilder.AddSourceFilterForMoniker(cam.Moniker, null, cam.Name, out capFilter); foreach (var val in typeof(CameraControlProperty).GetEnumValues()) { camSettings.Add(new CamSetting(capFilter as IAMCameraControl, (CameraControlProperty)(int)val)); } foreach (var val in typeof(VideoProcAmpProperty).GetEnumValues()) { camSettings.Add(new VideoSetting(capFilter as IAMVideoProcAmp, (VideoProcAmpProperty)(int)val)); } }
// CameraControl auto focus public void SetupProperties(DsDevice dev, int focus, bool auto) { // CameraControl int pMin, pMax, pSteppingDelta, pDefault; CameraControlFlags pFlags; object o; Guid IID_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770"); dev.Mon.BindToObject(null, null, ref IID_IBaseFilter, out o); IAMCameraControl icc; // Get the graphbuilder object IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; int hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); icc = capFilter as IAMCameraControl; icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pSteppingDelta, out pDefault, out pFlags); if (auto) { pFlags = CameraControlFlags.Auto; } else { pFlags = CameraControlFlags.Manual; } if (focus >= pMin && focus <= pMax) { icc.Set(CameraControlProperty.Focus, focus, pFlags); } }
private IFilterGraph2 BuildGraph() { int hr; IBaseFilter ibfAVISource = null; IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; try { DsDevice [] capDevices; // Get the collection of video devices capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); if (capDevices.Length == 0) { throw new Exception("No video capture devices found!"); } DsDevice dev = capDevices[0]; // Add it to the graph hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Ds.NET CaptureDevice", out ibfAVISource); Marshal.ThrowExceptionForHR(hr); m_IPinOut = DsFindPin.ByDirection(ibfAVISource, PinDirection.Output, 0); } catch { Marshal.ReleaseComObject(graphBuilder); throw; } finally { Marshal.ReleaseComObject(ibfAVISource); } return(graphBuilder); }
private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP, Control hControl) { int hr; IAMVideoControl m_VidControl = null; IFilterGraph2 m_FilterGraph = null; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; try { #if DEBUG DsROTEntry m_rot = new DsROTEntry(m_FilterGraph); #endif // add the video input device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); // Find the still pin IPin m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); // Didn't find one. Is there a preview pin? if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; // There is no still pin m_VidControl = null; // Add a splitter IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); // Find the find the capture pin from the video device and the // input pin for the splitter, and connnect them pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); // Now set the capture and still pins (from the splitter) m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); // If any of the default config items are set, perform the config // on the actual video device (rather than the splitter) if (iHeight + iWidth + iBPP > 0) { //SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // If any of the default config items are set if (iHeight + iWidth + iBPP > 0) { //SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; //ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Get the default video renderer IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); /*if (m_VidControl == null) * { * // Connect the Still pin to the sample grabber * hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); * DsError.ThrowExceptionForHR(hr); * * // Connect the capture pin to the renderer * hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); * DsError.ThrowExceptionForHR(hr); * } * else * { * // Connect the capture pin to the renderer * hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); * DsError.ThrowExceptionForHR(hr); * * // Connect the Still pin to the sample grabber * hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); * DsError.ThrowExceptionForHR(hr); * }*/ // Learn the video properties //SaveSizeInfo(sampGrabber); //ConfigVideoWindow(hControl); // Start the graph IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
/// <summary> /// /// </summary> /// <param name="dsDevice"></param> /// <param name="formatGUID"></param> public void SetVideoFormat(DsDevice dsDevice, Guid formatGUID) { int hr; IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; IPin pPin = null; try { // add the video input device hr = filterGraph.AddSourceFilterForMoniker(dsDevice.Mon, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); pPin = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); IAMStreamConfig videoStreamConfig = pPin as IAMStreamConfig; // Get the existing format block AMMediaType mediaType = null; hr = videoStreamConfig.GetFormat(out mediaType); DsError.ThrowExceptionForHR(hr); // copy out the videoinfoheader VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(mediaType.formatPtr, videoInfoHeader); // if overriding the framerate, set the frame rate if (camFPS > 0) { videoInfoHeader.AvgTimePerFrame = 10000000 / camFPS; } // if overriding the width, set the width if (camWidth > 0) { videoInfoHeader.BmiHeader.Width = camWidth; } // if overriding the Height, set the Height if (camHeight > 0) { videoInfoHeader.BmiHeader.Height = camHeight; } // Copy the media structure back Marshal.StructureToPtr(videoInfoHeader, mediaType.formatPtr, false); mediaType.subType = formatGUID; // Set the new format hr = videoStreamConfig.SetFormat(mediaType); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(mediaType); mediaType = null; } finally { Marshal.ReleaseComObject(pPin); pPin = null; } }
/// <summary> /// Returns available resolutions with RGB color system for device moniker /// </summary> /// <param name="moniker">Moniker (device identification) of camera.</param> /// <returns>List of resolutions with RGB color system of device</returns> public static ResolutionList GetResolutionList(IMoniker moniker) { int hr; ResolutionList ResolutionsAvailable = null; //new ResolutionList(); // Get the graphbuilder object IFilterGraph2 filterGraph = new FilterGraph() as IFilterGraph2; IBaseFilter capFilter = null; try { // add the video input device hr = filterGraph.AddSourceFilterForMoniker(moniker, null, "Source Filter", out capFilter); DsError.ThrowExceptionForHR(hr); ResolutionsAvailable = GetResolutionsAvailable(capFilter); } finally { SafeReleaseComObject(filterGraph); filterGraph = null; SafeReleaseComObject(capFilter); capFilter = null; } return ResolutionsAvailable; }
public IBaseFilter GetVideo() { IBaseFilter baseDevice; var filterGraph = new FilterGraph() as IFilterGraph2; filterGraph.AddSourceFilterForMoniker(selectedDevice.Mon, null, selectedDevice.Name, out baseDevice); IPin pin = DsFindPin.ByCategory(baseDevice, PinCategory.Capture, 0); var streamConfig = pin as IAMStreamConfig; AMMediaType media; int iC = 0, iS = 0; streamConfig.GetNumberOfCapabilities(out iC, out iS); IntPtr ptr = Marshal.AllocCoTaskMem(iS); for (int i = 0; i < iC; i++) { streamConfig.GetStreamCaps(i, out media, ptr); VideoInfoHeader v; v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); } Guid iid = typeof(IBaseFilter).GUID; object source; selectedDevice.Mon.BindToObject(null, null, ref iid, out source); return (IBaseFilter)source; }