public IBaseFilter GetVideo() { IBaseFilter baseDevice; var filterGraph = new FilterGraph() as IFilterGraph2; filterGraph.AddSourceFilterForMoniker(selectedDevice.Mon, null, selectedDevice.Name, out baseDevice); IPin pin = DsFindPin.ByCategory(baseDevice, PinCategory.Capture, 0); var streamConfig = pin as IAMStreamConfig; AMMediaType media; int iC = 0, iS = 0; streamConfig.GetNumberOfCapabilities(out iC, out iS); IntPtr ptr = Marshal.AllocCoTaskMem(iS); for (int i = 0; i < iC; i++) { streamConfig.GetStreamCaps(i, out media, ptr); VideoInfoHeader v; v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); } Guid iid = typeof(IBaseFilter).GUID; object source; selectedDevice.Mon.BindToObject(null, null, ref iid, out source); return((IBaseFilter)source); }
public void SetupVideoStream(IFilterGraph2 builder, IBaseFilter capfiler, IBaseFilter renderer, Control target) { //串接後面的Filters 的 Pins //目前不做畫面擷取所以也不需要用SmartTee來分流 IPin pin_out = null; IPin pin_in = null; //==== Streaming Circuit ==== try { //Win7開始預設最好是使用 VideoMixingRenderer7,當然能用VideoMixingRenderer9更好 //原始的VideoRenderer吃不到顯卡的特殊能力 //renderer = (IBaseFilter)new VideoMixingRenderer9(); SetupRenderWindow((IVMRFilterConfig9)renderer, target); builder.AddFilter(renderer, "Video Mixing Renderer 9"); pin_in = DsFindPin.ByDirection(renderer, PinDirection.Input, 0); //裝好Filter以後,在Capture Device找出對應的pin腳然後接上去... //就像焊接電路一樣的觀念 pin_out = DsFindPin.ByCategory(capfiler, PinCategory.Capture, 0); builder.Connect(pin_out, pin_in); } finally { //todo: 這邊應該弄Dispose不應該直接叫 Marshal if (null != pin_out) { Marshal.ReleaseComObject(pin_out); } if (null != pin_in) { Marshal.ReleaseComObject(pin_in); } } }
public static List <CameraResolution> GetAvailableResolutions(int deviceIndex) // DsDevice vidDev) { try { DsDevice[] captureDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); DsDevice vidDev = captureDevices[deviceIndex]; int hr; int max = 0; int bitCount = 0; IBaseFilter sourceFilter = null; var mFilterGraph2 = new FilterGraph() as IFilterGraph2; hr = mFilterGraph2.AddSourceFilterForMoniker(vidDev.Mon, null, vidDev.Name, out sourceFilter); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); var AvailableResolutions = new List <CameraResolution>(); VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, videoInfoHeader); if (videoInfoHeader.BmiHeader.Size != 0 && videoInfoHeader.BmiHeader.BitCount != 0) { if (videoInfoHeader.BmiHeader.BitCount > bitCount) { AvailableResolutions.Clear(); max = 0; bitCount = videoInfoHeader.BmiHeader.BitCount; } CameraResolution availableResolution = new CameraResolution(); availableResolution.HorizontalResolution = videoInfoHeader.BmiHeader.Width; availableResolution.VerticalResolution = videoInfoHeader.BmiHeader.Height; AvailableResolutions.Add(availableResolution); if (videoInfoHeader.BmiHeader.Width > max || videoInfoHeader.BmiHeader.Height > max) { max = (Math.Max(videoInfoHeader.BmiHeader.Width, videoInfoHeader.BmiHeader.Height)); } } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); } return(AvailableResolutions); } catch (Exception ex) { return(null); } }
public static string GetVideoFormats(int deviceId) { var AvailableFormats = "No device information available"; var capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); if (capDevices.Length > 0 && deviceId < capDevices.Length) { var device = capDevices[deviceId]; if (device != null) { try { int hr; IBaseFilter sourceFilter = null; var m_FilterGraph2 = new FilterGraph() as IFilterGraph2; hr = m_FilterGraph2.AddSourceFilterForMoniker(device.Mon, null, device.Name, out sourceFilter); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); VideoInfoHeader v = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); AvailableFormats = ""; while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, v); if (v.BmiHeader.Size != 0 && v.BmiHeader.BitCount != 0) { var fps = Math.Floor((1 / ((v.AvgTimePerFrame * 100) * 0.000000001))).ToString(); var format = v.BmiHeader.Width.ToString() + "x" + v.BmiHeader.Height.ToString() + " @" + fps.ToString() + " fps\n"; if (!AvailableFormats.Contains(format)) { AvailableFormats += format; } } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); } } catch { return(AvailableFormats); } } } return(AvailableFormats); }
private static IEnumerable <VideoCapabilities> GetAllAvailableResolution(DsDevice vidDev) { //I used to use SharpDX.MediaFoundation to enumerate all camera and its supported resolution //however, according to https://stackoverflow.com/questions/24612174/mediafoundation-can%C2%B4t-find-video-capture-emulator-driver-but-directshow-does, //MediaFoundation cannot find virtual camera, so I turned to use IPin.EnumMediaTypes to fetch supported resolution //https://stackoverflow.com/questions/20414099/videocamera-get-supported-resolutions int hr, bitCount = 0; IBaseFilter sourceFilter; var m_FilterGraph2 = new FilterGraph() as IFilterGraph2; hr = m_FilterGraph2.AddSourceFilterForMoniker(vidDev.Mon, null, vidDev.Name, out sourceFilter); DsError.ThrowExceptionForHR(hr); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); var availableResolutions = new List <VideoCapabilities>(); VideoInfoHeader v = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); DsError.ThrowExceptionForHR(hr); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); DsError.ThrowExceptionForHR(hr); while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, v); if (v.BmiHeader.Size != 0 && v.BmiHeader.BitCount != 0) { if (v.BmiHeader.BitCount > bitCount) { availableResolutions.Clear(); bitCount = v.BmiHeader.BitCount; } VideoCapabilities cap = new VideoCapabilities(); cap.Height = v.BmiHeader.Height; cap.Width = v.BmiHeader.Width; //the unit of AvgTimePerFrame is 100 nanoseconds, //and 10^9 nanosenconds = 1 second cap.FrameRate = (int)(1000_000_000 / 100 / v.AvgTimePerFrame); cap.BitRate = v.BitRate; availableResolutions.Add(cap); } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); DsError.ThrowExceptionForHR(hr); } return(availableResolutions); }
public void Find() { try { _class.Debug.Log("[0] Find video device resolution"); List = new List <string> { "Auto" }; Type = new List <AMMediaType> { null }; var dev = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice)[_class.Capture.CurrentDevice]; // ReSharper disable once SuspiciousTypeConversion.Global var filterGraph = (IFilterGraph2) new FilterGraph(); IBaseFilter baseDev; filterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out baseDev); var pin = DsFindPin.ByCategory(baseDev, PinCategory.Capture, 0); // ReSharper disable once SuspiciousTypeConversion.Global var streamConfig = (IAMStreamConfig)pin; int iC, iS; streamConfig.GetNumberOfCapabilities(out iC, out iS); var ptr = Marshal.AllocCoTaskMem(iS); for (var i = 0; i < iC; i++) { AMMediaType media; streamConfig.GetStreamCaps(i, out media, ptr); var v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); if (v.BmiHeader.Width == 0) { continue; } var strRes = v.BmiHeader.Width + " x " + v.BmiHeader.Height; strRes += " [" + CheckMediaType(media) + "]"; Type.Add(media); List.Add(strRes); _class.Debug.Log("->" + strRes); } _class.System.strCurrentResolution = List[Current]; _class.Debug.Log(""); } catch (Exception e) { _class.Debug.Log("[ERR] fail find video resolution : " + e); } }
public static List <Resolution> GetAllAvailableResolution(DsDevice vidDev) { try { int hr; int max = 0; int bitCount = 0; IBaseFilter sourceFilter = null; var m_FilterGraph2 = new FilterGraph() as IFilterGraph2; hr = m_FilterGraph2.AddSourceFilterForMoniker(vidDev.Mon, null, vidDev.Name, out sourceFilter); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); var AvailableResolutions = new List <Resolution>(); VideoInfoHeader v = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, v); if (v.BmiHeader.Size != 0 && v.BmiHeader.BitCount != 0) { if (v.BmiHeader.BitCount > bitCount) { //AvailableResolutions.Clear(); max = 0; bitCount = v.BmiHeader.BitCount; } AvailableResolutions.Add(new Resolution(v.BmiHeader.Width, v.BmiHeader.Height, v.BmiHeader.BitCount)); if (v.BmiHeader.Width > max || v.BmiHeader.Height > max) { max = (Math.Max(v.BmiHeader.Width, v.BmiHeader.Height)); } } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); } return(AvailableResolutions); } catch (Exception ex) { throw ex; //return new List<Resolution>(); } }
public static Point SetupGraph2(DsDevice dev) { // GlobalVariable.DebugMessage("winmate", "SetupGraph2 start", GlobalVariable.bDebug);// brian add int hr; Point pp = new Point(0, 0); IBaseFilter capFilter = null; IFilterGraph2 m_FilterGraph2 = null; // Get the graphbuilder object m_FilterGraph2 = new FilterGraph() as IFilterGraph2; try { // add the video input device hr = m_FilterGraph2.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); IPin mStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); if (mStill != null) { pp = GetMaxFrameSize(mStill); // GlobalVariable.DebugMessage("winmate", "still max=" + pp.X + "x" + pp.Y, GlobalVariable.bDebug); if (mStill != null) { Marshal.ReleaseComObject(mStill); mStill = null; } } else { IPin mCapture = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); if (mCapture != null) { pp = GetMaxFrameSize(mCapture); // GlobalVariable.DebugMessage("winmate", "capture max=" + pp.X + "x" + pp.Y, GlobalVariable.bDebug); if (mCapture != null) { Marshal.ReleaseComObject(mCapture); mCapture = null; } } } } finally { } // GlobalVariable.DebugMessage("winmate", "SetupGraph2 end", GlobalVariable.bDebug);// brian add return(pp); }
private void SetupVideoStream(IFilterGraph2 builder, IBaseFilter capfiler, IBaseFilter renderer, IntPtr render_handle, Rectangle render_rect) { //串接後面的Filters 的 Pins //目前不做畫面擷取所以也不需要用SmartTee來分流 IPin pin_out = null; IPin pin_in = null; //==== Streaming Circuit ==== try { //Win7開始預設最好是使用 VideoMixingRenderer7,當然能用VideoMixingRenderer9更好 //原始的VideoRenderer吃不到顯卡的特殊能力 //renderer = (IBaseFilter)new VideoMixingRenderer9(); SetupRenderWindow((IVMRFilterConfig9)renderer, render_handle, render_rect); builder.AddFilter(renderer, "Video Mixing Renderer 9"); pin_in = DsFindPin.ByDirection(renderer, PinDirection.Input, 0); //裝好Filter以後,在Capture Device找出對應的pin腳然後接上去... //就像焊接電路一樣的觀念 pin_out = DsFindPin.ByCategory(capfiler, PinCategory.Capture, 0); //todo: setup capture format //IAMStreamConfig pin_cfg = (IAMStreamConfig) pin_out; //AMMediaType type = null; //int count = 0, size = 0; //pin_cfg.GetNumberOfCapabilities(out count, out size); //for (int i = 0; i < count; i++) //{ // //IntPtr scc = Marshal.AllocHGlobal(Marshal.SizeOf<VideoStreamConfigCaps>()); // VideoStreamConfigCaps cap = new VideoStreamConfigCaps(); // GCHandle handle = GCHandle.Alloc(cap, GCHandleType.Pinned); // IntPtr scc = handle.AddrOfPinnedObject(); // //Marshal.StructureToPtr<VideoStreamConfigCaps>(cap, scc, false); // pin_cfg.GetStreamCaps(i, out type, scc); // handle.Free(); //} builder.Connect(pin_out, pin_in); } finally { //todo: 這邊應該弄Dispose不應該直接叫 Marshal if (null != pin_out) { Marshal.ReleaseComObject(pin_out); } if (null != pin_in) { Marshal.ReleaseComObject(pin_in); } } }
public List <VideoInfoHeader> GetMediaTypes() { if (this.videoCapture == null) { throw new NullReferenceException("IBaseFilter::VideoCapture"); } List <VideoInfoHeader> mediaTypes = new List <VideoInfoHeader>(); IPin pin = DsFindPin.ByCategory(this.videoCapture, PinCategory.Capture, 0); if (pin == null) { throw new NullReferenceException("IPin"); } int hr = pin.EnumMediaTypes(out IEnumMediaTypes enumMediaTypes); if (hr < 0) { throw new COMException("IPin::EnumMediaTypes", hr); } AMMediaType[] nextMediaTypes = new AMMediaType[1]; hr = enumMediaTypes.Next(1, nextMediaTypes, IntPtr.Zero); if (hr < 0) { throw new COMException("IEnumMediaTypes::Next", hr); } AMMediaType nextMediaType = nextMediaTypes[0]; while (nextMediaType != null) { VideoInfoHeader mediaType = new VideoInfoHeader(); Marshal.PtrToStructure(nextMediaType.formatPtr, mediaType); DsUtils.FreeAMMediaType(nextMediaType); if (mediaType.BmiHeader.Width > 0 && mediaType.BmiHeader.Height > 0 && mediaType.BmiHeader.BitCount > 0) { mediaTypes.Add(mediaType); } hr = enumMediaTypes.Next(1, nextMediaTypes, IntPtr.Zero); if (hr < 0) { throw new COMException("IEnumMediaTypes::Next", hr); } nextMediaType = nextMediaTypes[0]; } mediaTypes.Sort(this); return(mediaTypes); }
// https://stackoverflow.com/questions/20414099/videocamera-get-supported-resolutions private List <Resolution> GetAllAvailableResolution(DsDevice vidDev) { try { int hr, bitCount = 0; IBaseFilter sourceFilter = null; var m_FilterGraph2 = new FilterGraph() as IFilterGraph2; hr = m_FilterGraph2.AddSourceFilterForMoniker(vidDev.Mon, null, vidDev.Name, out sourceFilter); var pRaw2 = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); var AvailableResolutions = new List <Resolution>(); VideoInfoHeader v = new VideoInfoHeader(); IEnumMediaTypes mediaTypeEnum; hr = pRaw2.EnumMediaTypes(out mediaTypeEnum); AMMediaType[] mediaTypes = new AMMediaType[1]; IntPtr fetched = IntPtr.Zero; hr = mediaTypeEnum.Next(1, mediaTypes, fetched); while (fetched != null && mediaTypes[0] != null) { Marshal.PtrToStructure(mediaTypes[0].formatPtr, v); if (v.BmiHeader.Size != 0 && v.BmiHeader.BitCount != 0) { if (v.BmiHeader.BitCount > bitCount) { AvailableResolutions.Clear(); bitCount = v.BmiHeader.BitCount; } AvailableResolutions.Add(new Resolution(v.BmiHeader.Width, v.BmiHeader.Height)); } hr = mediaTypeEnum.Next(1, mediaTypes, fetched); } return(AvailableResolutions); } catch (Exception ex) { //MessageBox.Show(ex.Message); Console.WriteLine(ex.ToString()); return(new List <Resolution>()); } }
private void SetupGraph(DsDevice dev, int RequestedHeight, int RequestedWidth, short iBPP) { int hr; m_FilterGraph = new FilterGraph() as IFilterGraph2; try { hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); m_pinCapture = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); SetupCapturePin(RequestedHeight, RequestedWidth, iBPP); SetupStillPin(RequestedHeight, RequestedWidth, iBPP); } catch (Exception ex) { Console.WriteLine("Setup Graph Error: " + ex.Message); } }
private void GetControlInterface() { this.tuner = null; this.crossbar = null; object o; int hr = this.captureGraphBuilder.FindInterface(null, null, this.videoCaptureFilter, typeof(IAMTVTuner).GUID, out o); if (hr >= 0) { this.tuner = o as IAMTVTuner; o = null; hr = this.captureGraphBuilder.FindInterface(null, null, this.videoCaptureFilter, typeof(IAMCrossbar).GUID, out o); if (hr >= 0) { this.crossbar = o as IBaseFilter; o = null; } // Use the crossbar class to help us sort out all the possible video inputs // The class needs to be given the capture filters ANALOGVIDEO input pin IPin pinVideo = DsFindPin.ByCategory(this.videoCaptureFilter, PinCategory.AnalogVideoIn, 0); if (pinVideo != null) { try { this.crossbarHelper = new CrossbarHelper(pinVideo); } catch {} Marshal.ReleaseComObject(pinVideo); } hr = this.captureGraphBuilder.FindInterface(null, null, this.videoCaptureFilter, typeof(IAMTVAudio).GUID, out o); if (hr >= 0) { this.amTVAudio = o as IAMTVAudio; o = null; } } }
internal override WebcamConfiguration[] QueryFormats() { List <WebcamConfiguration> result = new List <WebcamConfiguration>(); DsDevice cameraDevice = cameraDevice = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice)[m_cameraDeviceIndex]; IFilterGraph2 filterGraph = null; IBaseFilter cam = null; IPin camOutPin = null; try { filterGraph = (IFilterGraph2) new FilterGraph(); DsError.ThrowExceptionForHR(filterGraph.AddSourceFilterForMoniker(cameraDevice.Mon, null, cameraDevice.Name, out cam)); camOutPin = DsFindPin.ByCategory(cam, PinCategory.Capture, 0); if (camOutPin != null) { IAMStreamConfig config = (IAMStreamConfig)camOutPin; int piCount, piSize; config.GetNumberOfCapabilities(out piCount, out piSize); byte[] temp = new byte[piSize]; GCHandle tempHandle = GCHandle.Alloc(temp, GCHandleType.Pinned); try { for (int x = 0; x < piCount; x++) { AMMediaType mediaType = null; try { DsError.ThrowExceptionForHR(config.GetStreamCaps(x, out mediaType, tempHandle.AddrOfPinnedObject())); VideoInfoHeader v = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); if (BPPIsValid(v.BmiHeader.BitCount)) { result.Add(new WebcamConfiguration(new Size(v.BmiHeader.Width, v.BmiHeader.Height), v.BmiHeader.BitCount, mediaType.subType)); } else { //System.Diagnostics.Debug.WriteLine("BPP " + v.BmiHeader.BitCount + " was not accepted!"); } } finally { if (mediaType != null) { DsUtils.FreeAMMediaType(mediaType); mediaType = null; } } } } finally { tempHandle.Free(); } } } finally { if (camOutPin != null) { Marshal.ReleaseComObject(camOutPin); camOutPin = null; } if (filterGraph != null) { Marshal.ReleaseComObject(filterGraph); filterGraph = null; } } result.Sort(); return(result.ToArray()); }
private void BuildGraph(DirectShowLib.DsDevice dsDevice) { int hr = 0; pGraph = new FilterGraph() as IFilterGraph2; //graph builder ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); try { hr = pBuilder.SetFiltergraph(pGraph); DsError.ThrowExceptionForHR(hr); // Add camera IBaseFilter camera; //hr = pGraph.FindFilterByName(dsDevice.Name, out camera); hr = ((IFilterGraph2)pGraph).AddSourceFilterForMoniker(dsDevice.Mon, null, dsDevice.Name, out camera); DsError.ThrowExceptionForHR(hr); hr = pGraph.AddFilter(camera, "camera"); DsError.ThrowExceptionForHR(hr); // Set format for camera AMMediaType pmt = new AMMediaType(); pmt.majorType = MediaType.Video; pmt.subType = MediaSubType.MJPG; pmt.formatType = FormatType.VideoInfo; pmt.fixedSizeSamples = true; pmt.formatSize = 88; pmt.sampleSize = 2764800; pmt.temporalCompression = false; VideoInfoHeader format = new VideoInfoHeader(); format.SrcRect = new DsRect(); format.TargetRect = new DsRect(); format.BitRate = 663552000; format.AvgTimePerFrame = 333333; format.BmiHeader = new BitmapInfoHeader(); format.BmiHeader.Size = 40; format.BmiHeader.Width = 1280; format.BmiHeader.Height = 720; format.BmiHeader.Planes = 1; format.BmiHeader.BitCount = 24; format.BmiHeader.Compression = 1196444237; format.BmiHeader.ImageSize = 2764800; pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(format)); Marshal.StructureToPtr(format, pmt.formatPtr, false); hr = ((IAMStreamConfig)DsFindPin.ByCategory(camera, PinCategory.Capture, 0)).SetFormat(pmt); //hr = ((IAMStreamConfig)GetPin(pUSB20Camera, "Capture")).SetFormat(pmt); DsUtils.FreeAMMediaType(pmt); DsError.ThrowExceptionForHR(hr); //add MJPG Decompressor IBaseFilter pMJPGDecompressor = (IBaseFilter)new MjpegDec(); hr = pGraph.AddFilter(pMJPGDecompressor, "MJPG Decompressor"); DsError.ThrowExceptionForHR(hr); //add color space converter IBaseFilter pColorSpaceConverter = (IBaseFilter)new Colour(); hr = pGraph.AddFilter(pColorSpaceConverter, "Color space converter"); DsError.ThrowExceptionForHR(hr); // Connect camera and MJPEG Decomp //hr = pGraph.ConnectDirect(GetPin(pUSB20Camera, "Capture"), GetPin(pMJPGDecompressor, "XForm In"), null); hr = pGraph.ConnectDirect(DsFindPin.ByCategory(camera, PinCategory.Capture, 0), DsFindPin.ByName(pMJPGDecompressor, "XForm In"), null); DsError.ThrowExceptionForHR(hr); // Connect MJPG Decomp and color space converter hr = pGraph.ConnectDirect(DsFindPin.ByName(pMJPGDecompressor, "XForm Out"), DsFindPin.ByName(pColorSpaceConverter, "Input"), null); DsError.ThrowExceptionForHR(hr); //add SampleGrabber //IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber)); //hr = pGraph.AddFilter(pSampleGrabber, "SampleGrabber"); IBaseFilter sampleGrabber = new SampleGrabber() as IBaseFilter; hr = pGraph.AddFilter(sampleGrabber, "Sample grabber"); DsError.ThrowExceptionForHR(hr); // Configure the samplegrabber AMMediaType pSampleGrabber_pmt = new AMMediaType(); pSampleGrabber_pmt.majorType = MediaType.Video; pSampleGrabber_pmt.subType = MediaSubType.ARGB32; pSampleGrabber_pmt.formatType = FormatType.VideoInfo; pSampleGrabber_pmt.fixedSizeSamples = true; pSampleGrabber_pmt.formatSize = 88; pSampleGrabber_pmt.sampleSize = 3686400; pSampleGrabber_pmt.temporalCompression = false; VideoInfoHeader pSampleGrabber_format = new VideoInfoHeader(); pSampleGrabber_format.SrcRect = new DsRect(); pSampleGrabber_format.TargetRect = new DsRect(); pSampleGrabber_format.BitRate = 884736885; pSampleGrabber_format.AvgTimePerFrame = 333333; pSampleGrabber_format.BmiHeader = new BitmapInfoHeader(); pSampleGrabber_format.BmiHeader.Size = 40; pSampleGrabber_format.BmiHeader.Width = 1280; pSampleGrabber_format.BmiHeader.Height = 720; pSampleGrabber_format.BmiHeader.Planes = 1; pSampleGrabber_format.BmiHeader.BitCount = 32; //pSampleGrabber_format.BmiHeader.Compression = 1196444237; pSampleGrabber_format.BmiHeader.ImageSize = 3686400; pSampleGrabber_pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(pSampleGrabber_format)); Marshal.StructureToPtr(pSampleGrabber_format, pSampleGrabber_pmt.formatPtr, false); hr = ((ISampleGrabber)sampleGrabber).SetMediaType(pSampleGrabber_pmt); DsError.ThrowExceptionForHR(hr); //connect MJPG dec and SampleGrabber //hr = pGraph.ConnectDirect(GetPin(pMJPGDecompressor, "XForm Out"), GetPin(pSampleGrabber, "Input"), null); hr = pGraph.ConnectDirect(DsFindPin.ByName(pColorSpaceConverter, "XForm Out"), DsFindPin.ByName(sampleGrabber, "Input"), null); DsError.ThrowExceptionForHR(hr); //set callback hr = ((ISampleGrabber)sampleGrabber).SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); } finally { // Clean this mess up! } }
private (LocalAudioSourceCapability[] caps, bool success) GetCapabilities(DsDevice device) { Log.Information($"Audio ({device.Name}): Getting Caps"); var list = new List <LocalAudioSourceCapability>(); bool failed = false; IntPtr pCaps = IntPtr.Zero; IFilterGraph2 filterGraph2 = null; IBaseFilter sourceFilter = null; IAMStreamConfig streamConfig = null; object pin = null; int count = 0; int size = 0; try { filterGraph2 = new FilterGraph() as IFilterGraph2; if (filterGraph2 == null) { throw new NotSupportedException("filter2 is null"); } LocalVideoSourceManager.AddCaptureFilter(filterGraph2, device, out sourceFilter); pin = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); if (pin == null) { Log.Information($"Audio ({device.Name}): First pin is null"); pin = sourceFilter; } streamConfig = pin as IAMStreamConfig; if (streamConfig == null) { throw new NotSupportedException("pin is null"); } LocalVideoSourceManager.Checked(() => streamConfig.GetNumberOfCapabilities(out count, out size), "GetNumberOfCapabilities", null); if (count <= 0) { throw new NotSupportedException("This video source does not report capabilities."); } if (size != Marshal.SizeOf(typeof(AudioStreamConfigCaps))) { throw new NotSupportedException("Unable to retrieve video source capabilities. This video source requires a larger VideoStreamConfigCaps structure."); } // Alloc memory for structure pCaps = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(AudioStreamConfigCaps))); for (int i = 0; i < count; i++) { AMMediaType mediaType = null; LocalVideoSourceManager.Checked(() => streamConfig.GetStreamCaps(i, out mediaType, pCaps), "GetStreamCaps", null); AudioStreamConfigCaps caps = (AudioStreamConfigCaps)Marshal.PtrToStructure(pCaps, typeof(AudioStreamConfigCaps)); var result = new LocalAudioSourceCapability() { MinimumChannels = caps.MinimumChannels, MaximumChannels = caps.MaximumChannels, MinimumSampleFrequency = caps.MinimumSampleFrequency, MaximumSampleFrequency = caps.MaximumSampleFrequency }; list.Add(result); } } catch (Exception e) { Log.Error(e, $"Error during retreiving caps for '{device.Name}'"); failed = true; } finally { if (pCaps != IntPtr.Zero) { Marshal.FreeCoTaskMem(pCaps); } } Log.Information($"Audio ({device.Name}): Releasing"); try { LocalVideoSourceManager.ReleaseComObject(sourceFilter); LocalVideoSourceManager.ReleaseComObject(filterGraph2); LocalVideoSourceManager.ReleaseComObject(streamConfig); LocalVideoSourceManager.ReleaseComObject(pin); } catch (Exception e) { Log.Error(e, $"ReleaseComObject({device.Name}) failed"); } Log.Information($"Caps {device.Name}: Count: {list.Count}/{count}, Str={size} ({string.Join("; ", list.Where(s => !s.IsStandart()).Select(s => s.ToString()))})"); return(list.ToArray(), !failed); }
private (LocalVideoSourceCapability[] caps, InputDeviceState state) GetCapabilities(DsDevice device) { if (_initialLogging) { Log.Information($"Caps {device.Name}: getting"); } var list = new List <LocalVideoSourceCapability>(); IntPtr pCaps = IntPtr.Zero; IFilterGraph2 filterGraph2 = null; IBaseFilter sourceFilter = null; IAMStreamConfig streamConfig = null; object pin = null; InputDeviceState state = InputDeviceState.Ready; try { filterGraph2 = new FilterGraph() as IFilterGraph2; if (filterGraph2 == null) { throw new NotSupportedException("filter2 is null"); } LocalVideoSourceManager.AddCaptureFilter(filterGraph2, device, out sourceFilter); pin = DsFindPin.ByCategory(sourceFilter, PinCategory.Capture, 0); if (pin == null) { pin = sourceFilter; } streamConfig = pin as IAMStreamConfig; if (streamConfig == null) { throw new NotSupportedException("pin is null"); } int count = 0; int size = 0; Checked(() => streamConfig.GetNumberOfCapabilities(out count, out size), "GetNumberOfCapabilities", null); if (count <= 0) { throw new NotSupportedException("This video source does not report capabilities."); } if (size != Marshal.SizeOf(typeof(VideoStreamConfigCaps))) { throw new NotSupportedException("Unable to retrieve video source capabilities. This video source requires a larger VideoStreamConfigCaps structure."); } // Alloc memory for structure pCaps = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(VideoStreamConfigCaps))); for (int i = 0; i < count; i++) { AMMediaType mediaType = null; Checked(() => streamConfig.GetStreamCaps(i, out mediaType, pCaps), "GetStreamCaps", null); VideoStreamConfigCaps caps = (VideoStreamConfigCaps)Marshal.PtrToStructure(pCaps, typeof(VideoStreamConfigCaps)); var format = GetMediaTypeInfo(mediaType, out var height, out var width, out var compression, out var videoInfoHeader, out var videoInfoHeader2); var result = new LocalVideoSourceCapability() { MaxF = GetFps(caps.MinFrameInterval), MinF = GetFps(caps.MaxFrameInterval), Fmt = format, W = width, H = height, }; list.Add(result); } } catch (UnauthorizedAccessException e) { Log.Warning(e, $"Error during retreiving caps for '{device.Name}' (Locked)"); state = InputDeviceState.Locked; } catch (Exception e) { Log.Error(e, $"Error during retreiving caps for '{device.Name}'"); state = InputDeviceState.Failed; } finally { if (pCaps != IntPtr.Zero) { Marshal.FreeCoTaskMem(pCaps); } } try { ReleaseComObject(sourceFilter); ReleaseComObject(filterGraph2); ReleaseComObject(streamConfig); ReleaseComObject(pin); } catch (Exception e) { Log.Error(e, $"ReleaseComObject('{device.Name}') failed"); } if (_initialLogging) { Log.Information($"Caps {device.Name}: {string.Join("; ", list.Select(s => s.ToString()))}"); } return(list.ToArray(), state); }
private IPin FindPin(IBaseFilter filter, PinDirection direction, Guid mediaType, Guid pinCategory, string preferredName) { if (Guid.Empty != pinCategory) { int idx = 0; do { IPin pinByCategory = DsFindPin.ByCategory(filter, pinCategory, idx); if (pinByCategory != null) { if (IsMatchingPin(pinByCategory, direction, mediaType)) { return(pinByCategory); } Marshal.ReleaseComObject(pinByCategory); } else { break; } idx++; }while (true); } if (!string.IsNullOrEmpty(preferredName)) { IPin pinByName = DsFindPin.ByName(filter, preferredName); if (pinByName != null && IsMatchingPin(pinByName, direction, mediaType)) { return(pinByName); } Marshal.ReleaseComObject(pinByName); } IEnumPins pinsEnum; IPin[] pins = new IPin[1]; int hr = filter.EnumPins(out pinsEnum); DsError.ThrowExceptionForHR(hr); while (pinsEnum.Next(1, pins, IntPtr.Zero) == 0) { IPin pin = pins[0]; if (pin != null) { if (IsMatchingPin(pin, direction, mediaType)) { return(pin); } Marshal.ReleaseComObject(pin); } } return(null); }
private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; m_FilterGraph = new FilterGraph() as IFilterGraph2; try { #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; m_VidControl = null; IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } sampGrabber = new SampleGrabber() as ISampleGrabber; IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } SaveSizeInfo(sampGrabber); IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
/// <summary> /// Initializes the graph for which the capture data will be piped into /// </summary> /// <param name="p_capDev">The device to be capturesd</param> private void buildGraph(DsDevice p_capDev) { int hr = 0; //For error checking if (m_graph != null) { m_graph = null; } m_graph = (IGraphBuilder) new FilterGraph(); IBaseFilter captureFilter; //Filter for the captureDevice ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); //Rendering portion //Add the graph to the builder, like adding canvas to the stand hr = pBuilder.SetFiltergraph(m_graph); DsError.ThrowExceptionForHR(hr); //Initialize captureFilter with the unique identifier from capDev and add it to the graph captureFilter = createFilterByDevice(p_capDev); hr = m_graph.AddFilter(captureFilter, "CapFilter"); DsError.ThrowExceptionForHR(hr); //Create a sample grabber and add it to the graph m_sampleGrabber = (IBaseFilter)Activator.CreateInstance(typeof(CamSampleGrabber)); hr = m_graph.AddFilter(m_sampleGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); //Set the callback function for the sample grabber. It will be CamCaptureGrabberCallBack.bufferCB() // this is because sampleCB only support single image getting. hr = ((CamSampleGrabber)m_sampleGrabber).SetCallback(new CamCaptureGrabberCallBack(), 1); DsError.ThrowExceptionForHR(hr); hr = ((ISampleGrabber)m_sampleGrabber).SetOneShot(false); DsError.ThrowExceptionForHR(hr); //Get pins IPin capPin = DsFindPin.ByCategory(captureFilter, PinCategory.Capture, 0); IPin samPin = DsFindPin.ByDirection(m_sampleGrabber, PinDirection.Input, 0); m_camControl = captureFilter as IAMCameraControl; //Create the media type, just a video RGB24 with VideoInfo formatType AMMediaType media = null; hr = getMedia(capPin, out media); DsError.ThrowExceptionForHR(hr); media.majorType = MediaType.Video; hr = ((IAMStreamConfig)capPin).SetFormat(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); //Connect capture device to the sample grabber hr = m_graph.Connect(capPin, samPin); DsError.ThrowExceptionForHR(hr); //Render video // For a filter with only an output filter (ie. m_sample) then the first two // parameters are null. The 4 and 5 parameter could not be null, however the 4th // is an intermediate filter which i don't want and the 5th is the sink if not defined // will end up being a default filter. hr = pBuilder.RenderStream(null, null, m_sampleGrabber, null, null); DsError.ThrowExceptionForHR(hr); }
public void CaptureVideo() { int hr = 0; try { int width = INI.Default["DirectShow Player"]["Video Player/Device Source Width", "352"].Integer; int Height = INI.Default["DirectShow Player"]["Video Player/Device Source Height", "240"].Integer; int fps = INI.Default["DirectShow Player"]["Video Player/Frames Per Second (0 unlimited)", "30"].Integer; bool antenna_input = INI.Default[Options.ProgramName]["Video Player/Capture Tuner from antenna", "true"].Bool; bool capture_TV = INI.Default[Options.ProgramName]["Video Player/Capture Tuner", "true"].Bool; bool capture_is_audio = INI.Default[Options.ProgramName]["Video Player/Capture Video is Audio also", "true"].Bool; IPin cap_pin = null; IPin cap_audio_pin; crossbar_to_tuner = capture_TV; // Get DirectShow interfaces GetInterfaces(); // Attach the filter graph to the capture graph hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); // Use the system device enumerator and class enumerator to find // a video capture/preview device, such as a desktop USB video camera. sourceFilter = this.DeviceFinder.FindVideoCaptureDevice(false); if (sourceFilter == null) { Network.SendStatus(channel, volume, false); return; } #if use_bda bda_filter = FindVideoCaptureDevice(true); #endif if (!capture_is_audio) { sourceAudioFilter = this.DeviceFinder.FindAudioCaptureDevice(); } else { sourceAudioFilter = sourceFilter; } //reclock_video_filter = FindVideoRenderDevice(); //scale_filter = FindVideoScaleDevice(); reclock_filter = this.DeviceFinder.FindAudioRenderDevice(); IAMAnalogVideoDecoder decoder = sourceFilter as IAMAnalogVideoDecoder; if (decoder != null) { AnalogVideoStandard oldStandard; decoder.get_TVFormat(out oldStandard); if (oldStandard != AnalogVideoStandard.NTSC_M) { decoder.put_TVFormat(AnalogVideoStandard.NTSC_M); } decoder = null; } // this is really for which input - the tuner we shouldn't adjust //if( !capture_TV ) // Add Capture filter to our graph. hr = this.graphBuilder.AddFilter(sourceFilter, "Video Capture"); DsError.ThrowExceptionForHR(hr); if (scale_filter != null) { hr = this.graphBuilder.AddFilter(scale_filter, "Video Scaler"); DsError.ThrowExceptionForHR(hr); } this.graphBuilder.Connect(null, null); #if use_bda if (bda_filter != null) { hr = this.graphBuilder.AddFilter(bda_filter, "Video Tuner"); DsError.ThrowExceptionForHR(hr); } #endif if (capture_TV && !capture_is_audio) { if (sourceAudioFilter != null) { hr = this.graphBuilder.AddFilter(sourceAudioFilter, "Audio Capture"); DsError.ThrowExceptionForHR(hr); } } if (reclock_filter != null) { Log.log("Adding 'reclock' which is the audio output device?"); hr = this.graphBuilder.AddFilter(reclock_filter, "Audio Renderer"); DsError.ThrowExceptionForHR(hr); } //this.graphBuilder.AddFilter( AdjustCrossbarPin(); bool cap_is_preview; { // set the video input size on the preview pin. cap_audio_pin = DsFindPin.ByCategory((IBaseFilter)sourceAudioFilter, PinCategory.Preview, 0); cap_pin = DsFindPin.ByCategory((IBaseFilter)sourceFilter, PinCategory.Preview, 0); if (cap_pin == null) { cap_is_preview = false; cap_audio_pin = DsFindPin.ByCategory((IBaseFilter)sourceAudioFilter, PinCategory.Capture, 0); cap_pin = DsFindPin.ByCategory((IBaseFilter)sourceFilter, PinCategory.Capture, 0); } else { cap_is_preview = true; } //Log.log( "Cap pin + " + cap_pin ); } // Render the preview pin on the video capture filter // Use this instead of this.graphBuilder.RenderFile if (cap_is_preview) { //hr = this.captureGraphBuilder.RenderStream( PinCategory.Preview, MediaType.Video, scale_filter, null, null ); //DsError.ThrowExceptionForHR( hr ); hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, null, null); DsError.ThrowExceptionForHR(hr); if (sourceAudioFilter != null) { hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Audio, sourceAudioFilter, null, reclock_filter); DsError.ThrowExceptionForHR(hr); } } else { //hr = this.captureGraphBuilder.RenderStream( PinCategory.Capture, MediaType.Video, scale_filter, null, null ); //DsError.ThrowExceptionForHR( hr ); hr = this.captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, sourceFilter, null, null); DsError.ThrowExceptionForHR(hr); if (sourceAudioFilter != null) { //IBaseFilter renderer = null; //Log.log( "reclock is " + reclock_filter ); hr = this.captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Audio, sourceAudioFilter, null, reclock_filter); if (hr != 0) { Log.log("Bad audio stream"); } //DsError.ThrowExceptionForHR( hr ); } } IAMStreamConfig stream = cap_pin as IAMStreamConfig; if (stream != null) { // 352x240 AMMediaType media; VideoInfoHeader vih = new VideoInfoHeader(); stream.GetFormat(out media); Marshal.PtrToStructure(media.formatPtr, vih); //vih.BmiHeader.Width = width; //vih.BmiHeader.Height = Height; if (fps > 0) { vih.AvgTimePerFrame = (10000000L / fps); } //Log.log( "set the bitmap override..." ); Marshal.StructureToPtr(vih, media.formatPtr, false); hr = stream.SetFormat(media); if (hr != 0) { Log.log("Failed to set format (preview)." + hr); } } else { Log.log("Failed to get stream config from source filter"); } //graph_filter.SetSyncSource( ref_clock ); object o; hr = captureGraphBuilder.FindInterface(null, null, sourceFilter, typeof(IReferenceClock).GUID, out o); if (hr == 0) { ref_clock = (IReferenceClock)o; } if (ref_clock == null) { hr = captureGraphBuilder.FindInterface(null, null, sourceAudioFilter, typeof(IReferenceClock).GUID, out o); ref_clock = (IReferenceClock)o; } hr = captureGraphBuilder.FindInterface(null, null, sourceFilter, typeof(IAMTVTuner).GUID, out o); //graphBuilder.sa. if (hr >= 0) { tuner = (IAMTVTuner)o; o = null; } if (tuner != null) { if (antenna_input) { TunerInputType type; hr = tuner.get_InputType(0, out type); if (type != TunerInputType.Antenna) { tuner.put_InputType(0, TunerInputType.Antenna); hr = tuner.get_InputType(0, out type); } } else { if (tuner != null) { TunerInputType type; hr = tuner.get_InputType(0, out type); if (type != TunerInputType.Cable) { tuner.put_InputType(0, TunerInputType.Cable); hr = tuner.get_InputType(0, out type); } } } tuner.ChannelMinMax(out min_channel, out max_channel); min_channel = INI.Default["DirectShow Player"]["Video Player/Minimum Channel", min_channel.ToString()].Integer; max_channel = INI.Default["DirectShow Player"]["Video Player/Maximum Channel", max_channel.ToString()].Integer; } // Now that the filter has been added to the graph and we have // rendered its stream, we can release this reference to the filter. if (sourceAudioFilter != null) { //hr = captureGraphBuilder.FindInterface( null, null, sourceFilter, typeof( IAMTVAudio ).GUID, out o ); hr = captureGraphBuilder.FindInterface(null, null, sourceAudioFilter, typeof(IBasicAudio).GUID, out o); if (hr >= 0) { audio_mixer = (IBasicAudio)o; o = null; } } Marshal.ReleaseComObject(sourceFilter); if (audio_mixer != null) { audio_mixer.get_Volume(out volume); } if (tuner != null) { tuner.get_Channel(out channel, out sub_channel, out sub_channel2); } //this.graphBuilder.SetDefaultSyncSource(); if (ref_clock != null) { this.graph_filter.SetSyncSource(ref_clock); } graph_streams.SyncUsingStreamOffset(true); // Set video window style and position SetupVideoWindow(); // Add our graph to the running object table, which will allow // the GraphEdit application to "spy" on our graph rot = new DsROTEntry(this.graphBuilder); //this.mediaControl.set // Start previewing video data hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); // Remember current state this.currentState = PlayState.Running; Network.SendStatus(channel, volume, (this.currentState == PlayState.Running)); } catch (Exception e) { MessageBox.Show("An unrecoverable error has occurred : " + e.Message); this.DialogResult = DialogResult.Abort; this.Close(); Application.Exit(); } }
/// <summary> /// Start using the camera /// </summary> /// <returns>Indicate if the webcam was able to start</returns> public bool Start() { if (!started) { DsDevice[] devices = GetDevices(); if (devices.Length > 0) { DsDevice dev = devices[0]; // Initialize camera int hr; IBaseFilter capFilter = null; ISampleGrabber sampGrabber = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; m_FilterGraph = new FilterGraph() as IFilterGraph2; try { hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; // There is no still pin m_VidControl = null; // Add a splitter IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); // Find the find the capture pin from the video device and the // input pin for the splitter, and connnect them pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); // Now set the capture and still pins (from the splitter) m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); // If any of the default config items are set, perform the config // on the actual video device (rather than the splitter) if (captureHeight + captureWidth > 0) { SetConfigParms(pRaw, captureWidth, captureHeight, 24); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // If any of the default config items are set if (captureHeight + captureWidth > 0) { SetConfigParms(m_pinStill, captureWidth, captureHeight, 24); } } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Get the default video renderer IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } SaveSizeInfo(sampGrabber); ConfigVideoWindow(pictureBox); IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } m_PictureReady = new ManualResetEvent(false); timer.Interval = (int)(1000 / framesPerSecond); timer.Start(); return(true); } } else { return(true); } return(false); }
// Set the Framerate, and video size private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iFrameRate, int iWidth, int iHeight) { int hr; object o; AMMediaType media; IAMStreamConfig videoStreamConfig; IAMVideoControl videoControl = capFilter as IAMVideoControl; // Find the stream config interface hr = capGraph.FindInterface( PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o); videoStreamConfig = o as IAMStreamConfig; try { if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } hr = videoStreamConfig.GetFormat(out media); DsError.ThrowExceptionForHR(hr); // copy out the videoinfoheader VideoInfoHeader v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); // if overriding the framerate, set the frame rate if (iFrameRate > 0) { v.AvgTimePerFrame = 10000000 / iFrameRate; } // if overriding the width, set the width if (iWidth > 0) { v.BmiHeader.Width = iWidth; } // if overriding the Height, set the Height if (iHeight > 0) { v.BmiHeader.Height = iHeight; } // Copy the media structure back Marshal.StructureToPtr(v, media.formatPtr, false); // Set the new format hr = videoStreamConfig.SetFormat(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Fix upsidedown video if (videoControl != null) { VideoControlFlags pCapsFlags; IPin pPin = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); hr = videoControl.GetCaps(pPin, out pCapsFlags); DsError.ThrowExceptionForHR(hr); if ((pCapsFlags & VideoControlFlags.FlipVertical) > 0) { hr = videoControl.GetMode(pPin, out pCapsFlags); DsError.ThrowExceptionForHR(hr); hr = videoControl.SetMode(pPin, 0); } } } finally { Marshal.ReleaseComObject(videoStreamConfig); } }
public void PrepareCapture(int i_width, int i_height, float i_frame_rate) { const int BBP = 32; //既にキャプチャ中なら諦める。 if (this.m_graphi_active) { throw new Exception(); } //現在確保中のグラフインスタンスを全て削除 CleanupGraphiObjects(); int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pSampleIn = null; //グラフビルダを作る。 this.m_FilterGraph = new FilterGraph() as IFilterGraph2; try { //フィルタグラフにキャプチャを追加して、capFilterにピンを受け取る。 hr = m_FilterGraph.AddSourceFilterForMoniker(this.m_dev.Mon, null, this.m_dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); //stillピンを探す m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); //stillピンが無ければPreviewを探す。 if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { // There is no still pin m_VidControl = null; m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); } if (i_height + i_width + BBP > 0) { SetConfigParms(m_pinStill, i_width, i_height, i_frame_rate, BBP); } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; //sampGrabberの設定 IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } hr = sampGrabber.GetConnectedMediaType(this._capture_mediatype); DsError.ThrowExceptionForHR(hr); //ビデオフォーマット等の更新 upateVideoInfo(sampGrabber); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP, Control hControl) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; try { #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif // add the video input device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); // Find the still pin m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); // Didn't find one. Is there a preview pin? if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; // There is no still pin m_VidControl = null; // Add a splitter IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); // Find the find the capture pin from the video device and the // input pin for the splitter, and connnect them pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); // Now set the capture and still pins (from the splitter) m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); // If any of the default config items are set, perform the config // on the actual video device (rather than the splitter) if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // If any of the default config items are set if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Get the default video renderer IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } // Learn the video properties SaveSizeInfo(sampGrabber); ConfigVideoWindow(hControl); // Start the graph IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
internal override Bitmap TakePicture() { if (m_callbackCompleted != null) { return(null); } // m_pictureControl = pictureControl; m_takePictureEnd = false; DsDevice cameraDevice = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice)[m_cameraDeviceIndex]; IFilterGraph2 filterGraph = null; IBaseFilter cam = null; IPin camCapture = null; // cam ISampleGrabber sg = null; IPin sgIn = null; // samplegrabber try { // setup filterGraph & connect camera filterGraph = (IFilterGraph2) new FilterGraph(); DsError.ThrowExceptionForHR(filterGraph.AddSourceFilterForMoniker(cameraDevice.Mon, null, cameraDevice.Name, out cam)); // setup smarttee and connect so that cam(PinCategory.Capture)->st(PinDirection.Input) camCapture = DsFindPin.ByCategory(cam, PinCategory.Capture, 0); // output ConfStreamDimensions((IAMStreamConfig)camCapture); // connect Camera output to SampleGrabber input sg = (ISampleGrabber) new SampleGrabber(); // configure AMMediaType media = new AMMediaType(); try { media.majorType = MediaType.Video; media.subType = BPP2MediaSubtype(m_configuration.BPP); // this will ask samplegrabber to do convertions for us media.formatType = FormatType.VideoInfo; DsError.ThrowExceptionForHR(sg.SetMediaType(media)); } finally { DsUtils.FreeAMMediaType(media); media = null; } DsError.ThrowExceptionForHR(sg.SetCallback(this, 1)); // 1 = BufferCB DsError.ThrowExceptionForHR(filterGraph.AddFilter((IBaseFilter)sg, "SG")); sgIn = DsFindPin.ByDirection((IBaseFilter)sg, PinDirection.Input, 0); // input DsError.ThrowExceptionForHR(filterGraph.Connect(camCapture, sgIn)); GetSizeInfo(sg); // wait until timeout - or picture has been taken if (m_callbackCompleted == null) { m_callbackCompleted = new ManualResetEvent(false); // start filter DsError.ThrowExceptionForHR(((IMediaControl)filterGraph).Run()); m_callbackState = 5; //if (m_pictureControl != null) //{ // m_callbackCompleted.WaitOne(); //} //else //{ if (!m_callbackCompleted.WaitOne(15000, false)) { throw new Exception(); //"Timeout while waiting for Picture"); } //} return(m_capturedBitmap); } else { return(null); } } finally { // release allocated objects if (m_callbackCompleted != null) { m_callbackCompleted.Close(); m_callbackCompleted = null; } if (sgIn != null) { Marshal.ReleaseComObject(sgIn); sgIn = null; } if (sg != null) { Marshal.ReleaseComObject(sg); sg = null; } if (camCapture != null) { Marshal.ReleaseComObject(camCapture); camCapture = null; } if (cam != null) { Marshal.ReleaseComObject(cam); cam = null; } if (filterGraph != null) { try { ((IMediaControl)filterGraph).Stop(); } catch (Exception) { } Marshal.ReleaseComObject(filterGraph); filterGraph = null; } m_capturedBitmap = null; m_callbackCompleted = null; } }