private SampleGrabber BuildGrabber(string filterNamePrefix, IPin srcOutputPin, Guid majorType, Guid subType, Guid formatType, SampleGrabberCallback.BufferCBEventHandler callback) { // Create Filter SampleGrabber sampleGrabber = CreateSampleGrabber(majorType, subType, formatType, callback); NullRenderer nullRenderer = CreateNullRenderer(); // Add Filter GraphBuilder.AddFilter(sampleGrabber as IBaseFilter, filterNamePrefix + " Sample Grabber"); GraphBuilder.AddFilter(nullRenderer as IBaseFilter, filterNamePrefix + " Null Renderer"); // Connect srcOutput -> grabberInput, grabberOutput -> rendererInput IPin grabberIn = Util.FindInputPin(sampleGrabber as IBaseFilter); IPin grabberOut = Util.FindOutputPin(sampleGrabber as IBaseFilter); IPin rendererIn = Util.FindInputPin(nullRenderer as IBaseFilter); GraphBuilder.Connect(srcOutputPin, grabberIn); GraphBuilder.Connect(grabberOut, rendererIn); Util.FreePin(rendererIn); Util.FreePin(grabberOut); Util.FreePin(grabberIn); Marshal.ReleaseComObject(nullRenderer); return(sampleGrabber); }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> public VideoPlayer(string FileName, GraphicsDevice graphicsDevice) { try { currentState = VideoState.Stopped; filename = FileName; InitInterfaces(); SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber")); AMMediaType mt = new AMMediaType(); mt.majorType = MEDIATYPE_Video; // Video mt.subType = MEDIASUBTYPE_RGB24; // RGB24 mt.formatType = FORMAT_VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); DsError.ThrowExceptionForHR(gb.RenderFile(filename, null)); DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); IVideoWindow pVideoWindow = (IVideoWindow)gb; DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration)); videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(videoHeight * videoWidth) * 3]; // BGR24 format (3 bytes per pixel) outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color); } catch { throw new Exception("Unable to Load or Play the video file"); } }
public DSVideoPlayer(string filename, GraphicsDevice graphicsDevice) { try { // Open DirectShow Interfaces InitInterfaces(); Info = new DSVideoInfo(); // Create a SampleGrabber Filter and add it to the FilterGraph SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(FG_GraphBuilder.AddFilter((IBaseFilter)sg, "Grabber")); // Setup Media type info for the SampleGrabber AMMediaType mt = new AMMediaType(); mt.majorType = DSVideoInfo.MEDIATYPE_Video; // Video mt.subType = DSVideoInfo.MEDIASUBTYPE_RGB24; // RGB24 mt.formatType = DSVideoInfo.FORMAT_VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); //// Construct the rest of the FilterGraph DsError.ThrowExceptionForHR(FG_GraphBuilder.RenderFile(filename, null)); Info.FileName = filename; //// Set SampleGrabber Properties DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); // Hide Default Video Window IVideoWindow pVideoWindow = (IVideoWindow)FG_GraphBuilder; DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); //// Create AMMediaType to capture video information AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); // Store video information Info.Height = pVideoHeader.BmiHeader.Height; Info.Width = pVideoHeader.BmiHeader.Width; Info.AvgTimePerFrame = pVideoHeader.AvgTimePerFrame; Info.BitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(FG_MediaSeeking.GetDuration(out Info.Duration)); // Create byte arrays to hold video data videoFrameBytes = new byte[(Info.Height * Info.Width) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(Info.Height * Info.Width) * 3]; // BGR24 format (3 bytes per pixel) // Create Output Frame Texture2D with the height and width of the video outputFrame = new Texture2D(graphicsDevice, Info.Width, Info.Height, 1, TextureUsage.None, SurfaceFormat.Color); } catch (Exception ex) { throw new Exception("不能加载或播放该视频: " + ex.Message); } }
/// <summary> /// Initializes the devices and prepares for capture /// </summary> public void Initialize() { _filterGraph = new FilterGraph() as IFilterGraph2; _mediaCtrl = _filterGraph as IMediaControl; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; int status; try { capGraph = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; sampGrabber = new SampleGrabber() as ISampleGrabber; status = capGraph.SetFiltergraph(_filterGraph); DsError.ThrowExceptionForHR(status); //Adds the video device status = _filterGraph.AddSourceFilterForMoniker(_device.DsDevice.Mon, null, "Video input", out capFilter); DsError.ThrowExceptionForHR(status); IBaseFilter baseGrabFilter = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph status = _filterGraph.AddFilter(baseGrabFilter, ".net grabber"); DsError.ThrowExceptionForHR(status); //TODO: Set Framerate, height and width here status = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFilter); DsError.ThrowExceptionForHR(status); SaveSizeInfo(sampGrabber); _bitmapMemory = Marshal.AllocCoTaskMem(_stride * _videoHeight); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
public void InitDevice(DsDevice device, int iWidth, int iHeight) { int hr; object camDevice; Guid iid = typeof(IBaseFilter).GUID; device.Mon.BindToObject(null, null, ref iid, out camDevice); IBaseFilter camFilter = camDevice as IBaseFilter; m_CameraControl = camFilter as IAMCameraControl; m_VideoControl = camFilter as IAMVideoProcAmp; ISampleGrabber sampGrabber = null; graphBuilder = (IGraphBuilder) new FilterGraph(); //Create the Capture Graph Builder ICaptureGraphBuilder2 captureGraphBuilder = null; captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Attach the filter graph to the capture graph hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); //Add the Video input device to the graph hr = graphBuilder.AddFilter(camFilter, "WebCam" + deviceNumber); DsError.ThrowExceptionForHR(hr); // Configure the sample grabber sampGrabber = new SampleGrabber() as ISampleGrabber; ConfigureSampleGrabber(sampGrabber); IBaseFilter sampGrabberBF = sampGrabber as IBaseFilter; //Add the Video compressor filter to the graph hr = graphBuilder.AddFilter(sampGrabberBF, "SampleGrabber" + deviceNumber); DsError.ThrowExceptionForHR(hr); IBaseFilter nullRender = new NullRenderer() as IBaseFilter; graphBuilder.AddFilter(nullRender, "NullRenderer" + deviceNumber); InitResolution(captureGraphBuilder, camFilter, iWidth, iHeight); hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camDevice, sampGrabberBF, nullRender); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); Marshal.ReleaseComObject(sampGrabber); Marshal.ReleaseComObject(captureGraphBuilder); }
private void PlatformDispose(bool disposing) { if (_topology != null) { _topology.Dispose(); _topology = null; } if (SampleGrabber != null) { SampleGrabber.Dispose(); SampleGrabber = null; } }
public FrameGrabber(DsDevice camDevice) { IFilterGraph2 filterGraph; ICaptureGraphBuilder2 graphBuilder; IBaseFilter camBase, nullRenderer; ISampleGrabber sampleGrabber; filterGraph = new FilterGraph() as IFilterGraph2; mediaCtrl = filterGraph as IMediaControl; graphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; HRCheck(graphBuilder.SetFiltergraph(filterGraph)); // Add camera HRCheck(filterGraph.AddSourceFilterForMoniker( camDevice.Mon, null, camDevice.Name, out camBase)); // Add sample grabber sampleGrabber = new SampleGrabber() as ISampleGrabber; var mType = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; HRCheck(sampleGrabber.SetMediaType(mType)); DsUtils.FreeAMMediaType(mType); HRCheck(sampleGrabber.SetCallback(this, 1)); HRCheck(filterGraph.AddFilter(sampleGrabber as IBaseFilter, "CamGrabber")); // Add null renderer nullRenderer = new NullRenderer() as IBaseFilter; HRCheck(filterGraph.AddFilter(nullRenderer, "Null renderer")); // Render the webcam through the grabber and the renderer HRCheck(graphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camBase, sampleGrabber as IBaseFilter, nullRenderer)); // Get resulting picture size mType = new AMMediaType(); HRCheck(sampleGrabber.GetConnectedMediaType(mType)); if (mType.formatType != FormatType.VideoInfo || mType.formatPtr == IntPtr.Zero) { throw new NotSupportedException("Unknown grabber media format"); } var videoInfoHeader = Marshal.PtrToStructure(mType.formatPtr, typeof(VideoInfoHeader)) as VideoInfoHeader; width = videoInfoHeader.BmiHeader.Width; height = videoInfoHeader.BmiHeader.Height; Console.WriteLine("{0} x {1}", width, height); stride = width * (videoInfoHeader.BmiHeader.BitCount / 8); DsUtils.FreeAMMediaType(mType); HRCheck(mediaCtrl.Run()); }
private static SampleGrabber CreateSampleGrabber(Guid majorType, Guid subType, Guid formatType, SampleGrabberGraph.SampleGrabberCallback.BufferCBEventHandler callback) { SampleGrabber sampleGrabber = new SampleGrabber(); ISampleGrabber grabber = sampleGrabber as ISampleGrabber; grabber.SetMediaType(new AMMediaType { majorType = majorType, subType = subType, formatType = formatType }); grabber.SetBufferSamples(false); grabber.SetOneShot(false); grabber.SetCallback(new SampleGrabberCallback() { OnBuffer = callback }, 1); // 0 = Sample, 1 = Buffer return(sampleGrabber); }
public void GetInterfaces() { graph = (IGraphBuilder)(new FilterGraph()); pGraphBuilder = (ICaptureGraphBuilder2)(new CaptureGraphBuilder2()); mediaControl = (IMediaControl)graph; videoWindow = (IVideoWindow)graph; mediaEventEx = (IMediaEventEx)graph; renderFilter = (IBaseFilter) new VideoMixingRenderer9(); pSampleGrabber = new SampleGrabber(); i_grabber = pSampleGrabber as ISampleGrabber; var mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; i_grabber.SetMediaType(mt); //send notification messages to the control window int hr = mediaEventEx.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(mt); }
public void DeclareClasses() { Audio = new Audio(this); Capture = new Capture(this); Crossbar = new Crossbar(this); Debug = new Debug(this); Resolution = new Resolution(this); Var = new Variables(this); User = new User(this); Graph = new Graph(); GraphPin = new Pin(this); GraphFilter = new Filter(this); AviRender = new AviRender(this); SampleGrabber = new SampleGrabber(this); SmartTee = new GraphSmartTee(this); GraphCrossbar = new GraphCrossbar(this); Close = new Close(this); Display = new Display(this); GraphBuild = new GraphMap(this); GraphResolution = new GraphResolution(this); }
private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; m_FilterGraph = new FilterGraph() as IFilterGraph2; try { #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; m_VidControl = null; IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } sampGrabber = new SampleGrabber() as ISampleGrabber; IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } SaveSizeInfo(sampGrabber); IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> public XNAPlayer(Feel feel, string FileName, GraphicsDevice graphicsDevice, Action callback) { Utils.RunAsynchronously(() => { try { // Set video state currentState = VideoState.Stopped; // Store Filename filename = FileName; // Open DirectShow Interfaces InitInterfaces(); // Create a SampleGrabber Filter and add it to the FilterGraph SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber")); // Setup Media type info for the SampleGrabber AMMediaType mt = new AMMediaType(); mt.majorType = MEDIATYPE_Video; // Video mt.subType = MEDIASUBTYPE_RGB24; // RGB24 mt.formatType = FORMAT_VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); // Construct the rest of the FilterGraph DsError.ThrowExceptionForHR(gb.RenderFile(filename, null)); // Set SampleGrabber Properties DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); // Hide Default Video Window IVideoWindow pVideoWindow = (IVideoWindow)gb; DsError.ThrowExceptionForHR(pVideoWindow.put_MessageDrain(IntPtr.Zero)); DsError.ThrowExceptionForHR(pVideoWindow.put_WindowState(WindowState.Hide)); DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); // Create AMMediaType to capture video information AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); // Store video information videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration)); // Create byte arrays to hold video data videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(videoHeight * videoWidth) * 4]; // BGR24 format (3 bytes per pixel + 1 for safety) // Create Output Frame Texture2D with the height and width of the video outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color); feel.RunOnUIThread(callback); } catch { feel.ShowToast("Unable to Load or Play the video file"); } }, () => { }); }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP) { ISampleGrabber sampGrabber = null; IPin pCaptureOut = null; IPin pRenderIn = null; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2)new FilterGraph(); try { // add the video input device IBaseFilter capFilter; int hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); //if (iHeight + iWidth + iBPP > 0) //{ //SetConfigParms(pRaw, iWidth, iHeight, iBPP); //} pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ares Video Grabber"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Learn the video properties SaveSizeInfo(sampGrabber); // Start the graph IMediaControl mediaCtrl = (IMediaControl)m_FilterGraph; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); } } }
public void CaptureVideo() { pictureBox1.Image = null; int hr = 0; IBaseFilter sourceFilter = null; ISampleGrabber sampleGrabber = null; try { // Get DirectShow interfaces GetInterfaces(); // Attach the filter graph to the capture graph hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); // Use the system device enumerator and class enumerator to find // a video capture/preview device, such as a desktop USB video camera. sourceFilter = FindCaptureDevice(); // Add Capture filter to graph. hr = this.graphBuilder.AddFilter(sourceFilter, "Video Capture"); DsError.ThrowExceptionForHR(hr); // Initialize SampleGrabber. sampleGrabber = new SampleGrabber() as ISampleGrabber; // Configure SampleGrabber. Add preview callback. ConfigureSampleGrabber(sampleGrabber); // Add SampleGrabber to graph. hr = this.graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "Frame Callback"); DsError.ThrowExceptionForHR(hr); // Configure preview settings. SetConfigParams(this.captureGraphBuilder, sourceFilter, _previewFPS, _previewWidth, _previewHeight); // Render the preview hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, (sampleGrabber as IBaseFilter), null); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampleGrabber); // Set video window style and position SetupVideoWindow(); // Add our graph to the running object table, which will allow // the GraphEdit application to "spy" on our graph rot = new DsROTEntry(this.graphBuilder); // Start previewing video data hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); } catch { MessageBox.Show("An unrecoverable error has occurred."); } finally { if (sourceFilter != null) { Marshal.ReleaseComObject(sourceFilter); sourceFilter = null; } if (sampleGrabber != null) { Marshal.ReleaseComObject(sampleGrabber); sampleGrabber = null; } } }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> public VideoPlayer(string FileName, GraphicsDevice graphicsDevice) { try { // Set video state currentState = VideoState.Stopped; // Store Filename filename = FileName; // Open DirectShow Interfaces InitInterfaces(); // Create a SampleGrabber Filter and add it to the FilterGraph SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber")); // Setup Media type info for the SampleGrabber AMMediaType mt = new AMMediaType(); //mt.majorType = MediaType.Video; // Video mt.majorType = MEDIATYPE_Video; mt.subType = MediaSubType.RGB24; // RGB24 mt.formatType = FormatType.VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); // Construct the rest of the FilterGraph DsError.ThrowExceptionForHR(gb.RenderFile(filename, null)); // Set SampleGrabber Properties DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); // Hide Default Video Window IVideoWindow pVideoWindow = (IVideoWindow)gb; DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); // Create AMMediaType to capture video information AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); // Store video information videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration)); // Create byte arrays to hold video data videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(videoHeight * videoWidth) * 3]; // BGR24 format (3 bytes per pixel) // Create Output Frame Texture2D with the height and width of the video outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, false, SurfaceFormat.Color); } catch { throw new Exception("Unable to Load or Play the video file"); } }
private void SetupSampleGrabber() { if (_graph == null) { return; } int hr; //Get directsound filter IBaseFilter directSoundFilter; hr = _graph.FindFilterByName(DEFAULT_AUDIO_RENDERER_NAME, out directSoundFilter); DsError.ThrowExceptionForHR(hr); IPin rendererPinIn = DsFindPin.ByConnectionStatus(directSoundFilter, PinConnectedStatus.Connected, 0); if (rendererPinIn != null) { IPin audioPinOut; hr = rendererPinIn.ConnectedTo(out audioPinOut); DsError.ThrowExceptionForHR(hr); if (audioPinOut != null) { //Disconect audio decoder to directsound renderer hr = audioPinOut.Disconnect(); DsError.ThrowExceptionForHR(hr); hr = _graph.RemoveFilter(directSoundFilter); DsError.ThrowExceptionForHR(hr); //Add Sample Grabber ISampleGrabber sampleGrabber = new SampleGrabber() as ISampleGrabber; hr = sampleGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); AMMediaType media; media = new AMMediaType(); media.majorType = MediaType.Audio; media.subType = MediaSubType.PCM; media.formatType = FormatType.WaveEx; hr = sampleGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); IPin sampleGrabberPinIn = DsFindPin.ByDirection((IBaseFilter)sampleGrabber, PinDirection.Input, 0); IPin sampleGrabberPinOut = DsFindPin.ByDirection((IBaseFilter)sampleGrabber, PinDirection.Output, 0); hr = _graph.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); PinInfo pinInfo; hr = audioPinOut.QueryPinInfo(out pinInfo); DsError.ThrowExceptionForHR(hr); FilterInfo filterInfo; hr = pinInfo.filter.QueryFilterInfo(out filterInfo); DsError.ThrowExceptionForHR(hr); hr = _graph.Connect(audioPinOut, sampleGrabberPinIn); DsError.ThrowExceptionForHR(hr); //Add null renderer NullRenderer nullRenderer = new NullRenderer(); hr = _graph.AddFilter((IBaseFilter)nullRenderer, "NullRenderer"); DsError.ThrowExceptionForHR(hr); IPin nullRendererPinIn = DsFindPin.ByDirection((IBaseFilter)nullRenderer, PinDirection.Input, 0); hr = _graph.Connect(sampleGrabberPinOut, nullRendererPinIn); DsError.ThrowExceptionForHR(hr); _audioEngine.Setup(this.GetSampleGrabberFormat(sampleGrabber)); } } }
private void BuildGraph(DirectShowLib.DsDevice dsDevice) { int hr = 0; pGraph = new FilterGraph() as IFilterGraph2; //graph builder ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); try { hr = pBuilder.SetFiltergraph(pGraph); DsError.ThrowExceptionForHR(hr); // Add camera IBaseFilter camera; //hr = pGraph.FindFilterByName(dsDevice.Name, out camera); hr = ((IFilterGraph2)pGraph).AddSourceFilterForMoniker(dsDevice.Mon, null, dsDevice.Name, out camera); DsError.ThrowExceptionForHR(hr); hr = pGraph.AddFilter(camera, "camera"); DsError.ThrowExceptionForHR(hr); // Set format for camera AMMediaType pmt = new AMMediaType(); pmt.majorType = MediaType.Video; pmt.subType = MediaSubType.MJPG; pmt.formatType = FormatType.VideoInfo; pmt.fixedSizeSamples = true; pmt.formatSize = 88; pmt.sampleSize = 2764800; pmt.temporalCompression = false; VideoInfoHeader format = new VideoInfoHeader(); format.SrcRect = new DsRect(); format.TargetRect = new DsRect(); format.BitRate = 663552000; format.AvgTimePerFrame = 333333; format.BmiHeader = new BitmapInfoHeader(); format.BmiHeader.Size = 40; format.BmiHeader.Width = 1280; format.BmiHeader.Height = 720; format.BmiHeader.Planes = 1; format.BmiHeader.BitCount = 24; format.BmiHeader.Compression = 1196444237; format.BmiHeader.ImageSize = 2764800; pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(format)); Marshal.StructureToPtr(format, pmt.formatPtr, false); hr = ((IAMStreamConfig)DsFindPin.ByCategory(camera, PinCategory.Capture, 0)).SetFormat(pmt); //hr = ((IAMStreamConfig)GetPin(pUSB20Camera, "Capture")).SetFormat(pmt); DsUtils.FreeAMMediaType(pmt); DsError.ThrowExceptionForHR(hr); //add MJPG Decompressor IBaseFilter pMJPGDecompressor = (IBaseFilter)new MjpegDec(); hr = pGraph.AddFilter(pMJPGDecompressor, "MJPG Decompressor"); DsError.ThrowExceptionForHR(hr); //add color space converter IBaseFilter pColorSpaceConverter = (IBaseFilter)new Colour(); hr = pGraph.AddFilter(pColorSpaceConverter, "Color space converter"); DsError.ThrowExceptionForHR(hr); // Connect camera and MJPEG Decomp //hr = pGraph.ConnectDirect(GetPin(pUSB20Camera, "Capture"), GetPin(pMJPGDecompressor, "XForm In"), null); hr = pGraph.ConnectDirect(DsFindPin.ByCategory(camera, PinCategory.Capture, 0), DsFindPin.ByName(pMJPGDecompressor, "XForm In"), null); DsError.ThrowExceptionForHR(hr); // Connect MJPG Decomp and color space converter hr = pGraph.ConnectDirect(DsFindPin.ByName(pMJPGDecompressor, "XForm Out"), DsFindPin.ByName(pColorSpaceConverter, "Input"), null); DsError.ThrowExceptionForHR(hr); //add SampleGrabber //IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber)); //hr = pGraph.AddFilter(pSampleGrabber, "SampleGrabber"); IBaseFilter sampleGrabber = new SampleGrabber() as IBaseFilter; hr = pGraph.AddFilter(sampleGrabber, "Sample grabber"); DsError.ThrowExceptionForHR(hr); // Configure the samplegrabber AMMediaType pSampleGrabber_pmt = new AMMediaType(); pSampleGrabber_pmt.majorType = MediaType.Video; pSampleGrabber_pmt.subType = MediaSubType.ARGB32; pSampleGrabber_pmt.formatType = FormatType.VideoInfo; pSampleGrabber_pmt.fixedSizeSamples = true; pSampleGrabber_pmt.formatSize = 88; pSampleGrabber_pmt.sampleSize = 3686400; pSampleGrabber_pmt.temporalCompression = false; VideoInfoHeader pSampleGrabber_format = new VideoInfoHeader(); pSampleGrabber_format.SrcRect = new DsRect(); pSampleGrabber_format.TargetRect = new DsRect(); pSampleGrabber_format.BitRate = 884736885; pSampleGrabber_format.AvgTimePerFrame = 333333; pSampleGrabber_format.BmiHeader = new BitmapInfoHeader(); pSampleGrabber_format.BmiHeader.Size = 40; pSampleGrabber_format.BmiHeader.Width = 1280; pSampleGrabber_format.BmiHeader.Height = 720; pSampleGrabber_format.BmiHeader.Planes = 1; pSampleGrabber_format.BmiHeader.BitCount = 32; //pSampleGrabber_format.BmiHeader.Compression = 1196444237; pSampleGrabber_format.BmiHeader.ImageSize = 3686400; pSampleGrabber_pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(pSampleGrabber_format)); Marshal.StructureToPtr(pSampleGrabber_format, pSampleGrabber_pmt.formatPtr, false); hr = ((ISampleGrabber)sampleGrabber).SetMediaType(pSampleGrabber_pmt); DsError.ThrowExceptionForHR(hr); //connect MJPG dec and SampleGrabber //hr = pGraph.ConnectDirect(GetPin(pMJPGDecompressor, "XForm Out"), GetPin(pSampleGrabber, "Input"), null); hr = pGraph.ConnectDirect(DsFindPin.ByName(pColorSpaceConverter, "XForm Out"), DsFindPin.ByName(sampleGrabber, "Input"), null); DsError.ThrowExceptionForHR(hr); //set callback hr = ((ISampleGrabber)sampleGrabber).SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); } finally { // Clean this mess up! } }
public void CaptureVideo(DsDevice device, Resolution resolution) { int hr = 0; IBaseFilter sourceFilter = null; ISampleGrabber sampleGrabber = null; try { // Get DirectShow interfaces GetInterfaces(); // Attach the filter graph to the capture graph hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); // Use the system device enumerator and class enumerator to find // a video capture/preview device, such as a desktop USB video camera. sourceFilter = SelectCaptureDevice(device); // Get and set camera focus IAMCameraControl aMCameraControl = (IAMCameraControl)sourceFilter; if (aMCameraControl == null) { throw new ApplicationException("Cannot get IAMCameraControl"); } int iValue = 0; CameraControlFlags flag; aMCameraControl.Get(CameraControlProperty.Focus, out iValue, out flag); int min, max, pDelta, pDefault; aMCameraControl.GetRange(CameraControlProperty.Focus, out min, out max, out pDelta, out pDefault, out flag); aMCameraControl.Set(CameraControlProperty.Focus, 100, CameraControlFlags.Auto); // Add Capture filter to graph. hr = this.graphBuilder.AddFilter(sourceFilter, "Video Capture"); DsError.ThrowExceptionForHR(hr); // Initialize SampleGrabber. sampleGrabber = new SampleGrabber() as ISampleGrabber; // Configure SampleGrabber. Add preview callback. ConfigureSampleGrabber(sampleGrabber); // Add SampleGrabber to graph. hr = this.graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "Frame Callback"); DsError.ThrowExceptionForHR(hr); // Configure preview settings. _previewWidth = resolution.Width; _previewHeight = resolution.Height; SetConfigParams(this.captureGraphBuilder, sourceFilter, _previewFPS, _previewWidth, _previewHeight); // Render the preview hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, (sampleGrabber as IBaseFilter), null); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampleGrabber); // Set video window style and position SetupVideoWindow(); // Add our graph to the running object table, which will allow // the GraphEdit application to "spy" on our graph rot = new DsROTEntry(this.graphBuilder); // Start previewing video data hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); } catch { Console.WriteLine("An unrecoverable error has occurred."); } finally { if (sourceFilter != null) { Marshal.ReleaseComObject(sourceFilter); sourceFilter = null; } if (sampleGrabber != null) { Marshal.ReleaseComObject(sampleGrabber); sampleGrabber = null; } } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(string FileName, Control hWin) { int hr; IBaseFilter ibfRenderer = null; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin iPinInFilter = null; IPin iPinOutFilter = null; IPin iPinInDest = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif try { // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Add the video source hr = m_FilterGraph.AddSourceFilter(FileName, "Ds.NET FileFilter", out capFilter); DsError.ThrowExceptionForHR(hr); // Hopefully this will be the video pin IPin iPinOutSource = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); iPinInFilter = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); iPinOutFilter = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(iPinOutSource, iPinInFilter); DsError.ThrowExceptionForHR(hr); // Get the default video renderer ibfRenderer = (IBaseFilter) new VideoRendererDefault(); // Add it to the graph hr = m_FilterGraph.AddFilter(ibfRenderer, "Ds.NET VideoRendererDefault"); DsError.ThrowExceptionForHR(hr); iPinInDest = DsFindPin.ByDirection(ibfRenderer, PinDirection.Input, 0); // Connect the graph. Many other filters automatically get added here hr = m_FilterGraph.Connect(iPinOutFilter, iPinInDest); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); // Set the output window IVideoWindow videoWindow = m_FilterGraph as IVideoWindow; hr = videoWindow.put_Owner(hWin.Handle); DsError.ThrowExceptionForHR(hr); hr = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings); DsError.ThrowExceptionForHR(hr); hr = videoWindow.put_Visible(OABool.True); DsError.ThrowExceptionForHR(hr); Rectangle rc = hWin.ClientRectangle; hr = videoWindow.SetWindowPosition(0, 0, rc.Right, rc.Bottom); DsError.ThrowExceptionForHR(hr); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (ibfRenderer != null) { Marshal.ReleaseComObject(ibfRenderer); ibfRenderer = null; } if (iPinInFilter != null) { Marshal.ReleaseComObject(iPinInFilter); iPinInFilter = null; } if (iPinOutFilter != null) { Marshal.ReleaseComObject(iPinOutFilter); iPinOutFilter = null; } if (iPinInDest != null) { Marshal.ReleaseComObject(iPinInDest); iPinInDest = null; } } }
/// <summary> /// Start using the camera /// </summary> /// <returns>Indicate if the webcam was able to start</returns> public bool Start() { if (!started) { DsDevice[] devices = GetDevices(); if (devices.Length > 0) { DsDevice dev = devices[0]; // Initialize camera int hr; IBaseFilter capFilter = null; ISampleGrabber sampGrabber = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; m_FilterGraph = new FilterGraph() as IFilterGraph2; try { hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; // There is no still pin m_VidControl = null; // Add a splitter IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); // Find the find the capture pin from the video device and the // input pin for the splitter, and connnect them pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); // Now set the capture and still pins (from the splitter) m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); // If any of the default config items are set, perform the config // on the actual video device (rather than the splitter) if (captureHeight + captureWidth > 0) { SetConfigParms(pRaw, captureWidth, captureHeight, 24); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // If any of the default config items are set if (captureHeight + captureWidth > 0) { SetConfigParms(m_pinStill, captureWidth, captureHeight, 24); } } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Get the default video renderer IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } SaveSizeInfo(sampGrabber); ConfigVideoWindow(pictureBox); IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } m_PictureReady = new ManualResetEvent(false); timer.Interval = (int)(1000 / framesPerSecond); timer.Start(); return(true); } } else { return(true); } return(false); }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iSampleRate, int iChannels) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; IBaseFilter baseGrabFlt = null; IBaseFilter nullrenderer = null; IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the audio device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Audio input", out capFilter); DsError.ThrowExceptionForHR(hr); // If any of the default config items are set if (iSampleRate + iChannels > 0) { SetConfigParms(capGraph, capFilter, iSampleRate, iChannels); } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the file filter to the sample grabber // Hopefully this will be the audio pin, we could check by reading it's mediatype IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); // Get the input pin from the sample grabber IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Add the null renderer to the graph nullrenderer = new NullRenderer() as IBaseFilter; hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the sample grabber to the null renderer iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Read and cache the resulting settings SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
/// <summary> /// Create a new filter graph and add filters (devices, compressors, misc), /// but leave the filters unconnected. Call RenderGraph() /// to connect the filters. /// </summary> void CreateGraph() { //Skip if already created if ((int)_actualGraphState < (int)GraphState.Created) { // Make a new filter graph _graphBuilder = (IGraphBuilder) new FilterGraph(); // Get the Capture Graph Builder _captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Link the CaptureGraphBuilder to the filter graph var hr = _captureGraphBuilder.SetFiltergraph(_graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } var comObj = new SampleGrabber(); _sampGrabber = (ISampleGrabber)comObj; _baseGrabFlt = (IBaseFilter)_sampGrabber; var media = new AMMediaType(); // Get the video device and add it to the filter graph if (VideoDevice != null) { _videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString); hr = _graphBuilder.AddFilter(_videoDeviceFilter, "Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } media.majorType = MediaType.Video; media.subType = MediaSubType.RGB32; //RGB24; media.formatType = FormatType.VideoInfo; media.temporalCompression = true; //New hr = _sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = _graphBuilder.AddFilter(_baseGrabFlt, "Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type var cat = PinCategory.Capture; var med = MediaType.Interleaved; var iid = typeof(IAMStreamConfig).GUID; hr = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out var o); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; hr = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out o); if (hr != 0) { // ReSharper disable once RedundantAssignment o = null; } } //VideoStreamConfig = o as IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) _mediaControl = (IMediaControl)_graphBuilder; // Reload any video crossbars //if (videoSources != null) videoSources.Dispose(); videoSources = null; _videoInfoHeader = Marshal.PtrToStructure <VideoInfoHeader>(media.formatPtr); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = _sampGrabber.SetBufferSamples(true); if (hr == 0) { hr = _sampGrabber.SetOneShot(false); } if (hr == 0) { hr = _sampGrabber.SetCallback(null, 0); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Update the state now that we are done _actualGraphState = GraphState.Created; }
private void SetupGraph(string FileName) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter baseGrabFlt = null; IBaseFilter capFilter = null; IBaseFilter nullrenderer = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; m_mediaCtrl = m_FilterGraph as IMediaControl; m_MediaEvent = m_FilterGraph as IMediaEvent; IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter; try { // Add the video source hr = m_FilterGraph.AddSourceFilter(FileName, "Ds.NET FileFilter", out capFilter); DsError.ThrowExceptionForHR(hr); // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the file filter to the sample grabber // Hopefully this will be the audio pin, we could check by reading it's mediatype IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); // Get the input pin from the sample grabber IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Add the null renderer to the graph nullrenderer = new NullRenderer() as IBaseFilter; hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the sample grabber to the null renderer iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Turn off the clock. This causes the frames to be sent // thru the graph as fast as possible hr = mediaFilt.SetSyncSource(null); DsError.ThrowExceptionForHR(hr); // Read and cache the image sizes SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (nullrenderer != null) { Marshal.ReleaseComObject(nullrenderer); nullrenderer = null; } } }
private void SetupSampleGrabber() { if (_graph == null) return; int hr; //Get directsound filter IBaseFilter directSoundFilter; hr = _graph.FindFilterByName(DEFAULT_AUDIO_RENDERER_NAME, out directSoundFilter); DsError.ThrowExceptionForHR(hr); IPin rendererPinIn = DsFindPin.ByConnectionStatus(directSoundFilter, PinConnectedStatus.Connected, 0); if (rendererPinIn != null) { IPin audioPinOut; hr = rendererPinIn.ConnectedTo(out audioPinOut); DsError.ThrowExceptionForHR(hr); if (audioPinOut != null) { //Disconect audio decoder to directsound renderer hr = audioPinOut.Disconnect(); DsError.ThrowExceptionForHR(hr); hr = _graph.RemoveFilter(directSoundFilter); DsError.ThrowExceptionForHR(hr); //Add Sample Grabber ISampleGrabber sampleGrabber = new SampleGrabber() as ISampleGrabber; hr = sampleGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); AMMediaType media; media = new AMMediaType(); media.majorType = MediaType.Audio; media.subType = MediaSubType.PCM; media.formatType = FormatType.WaveEx; hr = sampleGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); IPin sampleGrabberPinIn = DsFindPin.ByDirection((IBaseFilter)sampleGrabber, PinDirection.Input, 0); IPin sampleGrabberPinOut = DsFindPin.ByDirection((IBaseFilter)sampleGrabber, PinDirection.Output, 0); hr = _graph.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); PinInfo pinInfo; hr = audioPinOut.QueryPinInfo(out pinInfo); DsError.ThrowExceptionForHR(hr); FilterInfo filterInfo; hr = pinInfo.filter.QueryFilterInfo(out filterInfo); DsError.ThrowExceptionForHR(hr); hr = _graph.Connect(audioPinOut, sampleGrabberPinIn); DsError.ThrowExceptionForHR(hr); //Add null renderer NullRenderer nullRenderer = new NullRenderer(); hr = _graph.AddFilter((IBaseFilter)nullRenderer, "NullRenderer"); DsError.ThrowExceptionForHR(hr); IPin nullRendererPinIn = DsFindPin.ByDirection((IBaseFilter)nullRenderer, PinDirection.Input, 0); hr = _graph.Connect(sampleGrabberPinOut, nullRendererPinIn); DsError.ThrowExceptionForHR(hr); _audioEngine.Setup(this.GetSampleGrabberFormat(sampleGrabber)); } } }
private void BuildGraph(DirectShowLib.DsDevice dsDevice) { int hr = 0; pGraph = new FilterGraph() as IFilterGraph2; //graph builder ICaptureGraphBuilder2 pBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); try { hr = pBuilder.SetFiltergraph(pGraph); DsError.ThrowExceptionForHR(hr); // Add camera IBaseFilter camera; //hr = pGraph.FindFilterByName(dsDevice.Name, out camera); hr = ((IFilterGraph2)pGraph).AddSourceFilterForMoniker(dsDevice.Mon, null, dsDevice.Name, out camera); DsError.ThrowExceptionForHR(hr); hr = pGraph.AddFilter(camera, "camera"); DsError.ThrowExceptionForHR(hr); // Set format for camera AMMediaType pmt = new AMMediaType(); pmt.majorType = MediaType.Video; pmt.subType = MediaSubType.YUY2; pmt.formatType = FormatType.VideoInfo; pmt.fixedSizeSamples = true; pmt.formatSize = 88; pmt.sampleSize = 829440; pmt.temporalCompression = false; VideoInfoHeader format = new VideoInfoHeader(); format.SrcRect = new DsRect(); format.TargetRect = new DsRect(); format.BitRate = 20736000; format.AvgTimePerFrame = 400000; format.BmiHeader = new BitmapInfoHeader(); format.BmiHeader.Size = 40; format.BmiHeader.Width = 720; format.BmiHeader.Height = 576; format.BmiHeader.Planes = 1; format.BmiHeader.BitCount = 24; format.BmiHeader.Compression = 844715353; format.BmiHeader.ImageSize = 827440; pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(format)); Marshal.StructureToPtr(format, pmt.formatPtr, false); hr = ((IAMStreamConfig)DsFindPin.ByCategory(camera, PinCategory.Capture, 0)).SetFormat(pmt); //hr = ((IAMStreamConfig)GetPin(pUSB20Camera, "Capture")).SetFormat(pmt); DsUtils.FreeAMMediaType(pmt); DsError.ThrowExceptionForHR(hr); IAMCrossbar crossBar = null; object dummy; hr = pBuilder.FindInterface(PinCategory.Capture, MediaType.Video, camera, typeof(IAMCrossbar).GUID, out dummy); if( hr >=0) { crossBar = (IAMCrossbar)dummy; int oPin, iPin; int ovLink, ivLink; ovLink = ivLink = 0; crossBar.get_PinCounts(out oPin, out iPin); int pIdxRel; PhysicalConnectorType physicalConType; for (int i = 0; i < iPin; i++) { crossBar.get_CrossbarPinInfo(true, i, out pIdxRel, out physicalConType); if (physicalConType == PhysicalConnectorType.Video_Composite) ivLink = i; } for (int i = 0; i < oPin; i++) { crossBar.get_CrossbarPinInfo(false, i, out pIdxRel, out physicalConType); if (physicalConType == PhysicalConnectorType.Video_VideoDecoder) ovLink = i; } try { crossBar.Route(ovLink, ivLink); } catch { throw new Exception("Failed to get IAMCrossbar"); } } //add AVI Decompressor IBaseFilter pAVIDecompressor = (IBaseFilter)new AVIDec(); hr = pGraph.AddFilter(pAVIDecompressor, "AVI Decompressor"); //add color space converter IBaseFilter pColorSpaceConverter = (IBaseFilter)new Colour(); hr = pGraph.AddFilter(pColorSpaceConverter, "Color space converter"); DsError.ThrowExceptionForHR(hr); // Connect camera and AVI Decomp hr = pGraph.ConnectDirect(DsFindPin.ByCategory(camera, PinCategory.Capture, 0), DsFindPin.ByName(pAVIDecompressor, "XForm In"), null); DsError.ThrowExceptionForHR(hr); // Connect AVI Decomp and color space converter hr = pGraph.ConnectDirect(DsFindPin.ByName(pAVIDecompressor, "XForm Out"), DsFindPin.ByName(pColorSpaceConverter, "Input"), null); DsError.ThrowExceptionForHR(hr); //add SampleGrabber //IBaseFilter pSampleGrabber = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber)); //hr = pGraph.AddFilter(pSampleGrabber, "SampleGrabber"); IBaseFilter sampleGrabber = new SampleGrabber() as IBaseFilter; hr = pGraph.AddFilter(sampleGrabber, "Sample grabber"); DsError.ThrowExceptionForHR(hr); // Configure the samplegrabber AMMediaType pSampleGrabber_pmt = new AMMediaType(); pSampleGrabber_pmt.majorType = MediaType.Video; pSampleGrabber_pmt.subType = MediaSubType.ARGB32; pSampleGrabber_pmt.formatType = FormatType.VideoInfo; pSampleGrabber_pmt.fixedSizeSamples = true; pSampleGrabber_pmt.formatSize = 88; pSampleGrabber_pmt.sampleSize = 1658880; pSampleGrabber_pmt.temporalCompression = false; VideoInfoHeader pSampleGrabber_format = new VideoInfoHeader(); pSampleGrabber_format.SrcRect = new DsRect(); pSampleGrabber_format.SrcRect.right = 720; pSampleGrabber_format.SrcRect.bottom = 576; pSampleGrabber_format.TargetRect = new DsRect(); pSampleGrabber_format.TargetRect.right = 720; pSampleGrabber_format.TargetRect.bottom = 576; pSampleGrabber_format.BitRate = 331776000; pSampleGrabber_format.AvgTimePerFrame = 400000; pSampleGrabber_format.BmiHeader = new BitmapInfoHeader(); pSampleGrabber_format.BmiHeader.Size = 40; pSampleGrabber_format.BmiHeader.Width = 720; pSampleGrabber_format.BmiHeader.Height = 576; pSampleGrabber_format.BmiHeader.Planes = 1; pSampleGrabber_format.BmiHeader.BitCount = 32; pSampleGrabber_format.BmiHeader.ImageSize = 1658880; pSampleGrabber_pmt.formatPtr = Marshal.AllocCoTaskMem(Marshal.SizeOf(pSampleGrabber_format)); Marshal.StructureToPtr(pSampleGrabber_format, pSampleGrabber_pmt.formatPtr, false); hr = ((ISampleGrabber)sampleGrabber).SetMediaType(pSampleGrabber_pmt); DsError.ThrowExceptionForHR(hr); //connect MJPG dec and SampleGrabber //hr = pGraph.ConnectDirect(GetPin(pMJPGDecompressor, "XForm Out"), GetPin(pSampleGrabber, "Input"), null); hr = pGraph.ConnectDirect(DsFindPin.ByName(pColorSpaceConverter, "XForm Out"), DsFindPin.ByName(sampleGrabber, "Input"), null); DsError.ThrowExceptionForHR(hr); //set callback hr = ((ISampleGrabber)sampleGrabber).SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); } finally { // Clean this mess up! } }
public void PrepareCapture(int i_width, int i_height,float i_frame_rate) { const int BBP = 32; //既にキャプチャ中なら諦める。 if (this.m_graphi_active) { throw new Exception(); } //現在確保中のグラフインスタンスを全て削除 CleanupGraphiObjects(); int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pSampleIn = null; //グラフビルダを作る。 this.m_FilterGraph = new FilterGraph() as IFilterGraph2; try { //フィルタグラフにキャプチャを追加して、capFilterにピンを受け取る。 hr = m_FilterGraph.AddSourceFilterForMoniker(this.m_dev.Mon, null, this.m_dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); //stillピンを探す m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); //stillピンが無ければPreviewを探す。 if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { // There is no still pin m_VidControl = null; m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); }else{ // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); } if (i_height + i_width + BBP > 0) { SetConfigParms(m_pinStill, i_width, i_height,i_frame_rate, BBP); } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; //sampGrabberの設定 IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); }else{ // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } hr = sampGrabber.GetConnectedMediaType(this._capture_mediatype); DsError.ThrowExceptionForHR(hr); //ビデオフォーマット等の更新 upateVideoInfo(sampGrabber); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iSampleRate, int iChannels) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; IBaseFilter baseGrabFlt = null; IBaseFilter nullrenderer = null; IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2)new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber)new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the audio device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Audio input", out capFilter); DsError.ThrowExceptionForHR(hr); // If any of the default config items are set if (iSampleRate + iChannels > 0) { SetConfigParms(capGraph, capFilter, iSampleRate, iChannels); } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the file filter to the sample grabber // Hopefully this will be the audio pin, we could check by reading it's mediatype IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); // Get the input pin from the sample grabber IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Add the null renderer to the graph nullrenderer = new NullRenderer() as IBaseFilter; hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the sample grabber to the null renderer iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Read and cache the resulting settings SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
public void InitDevice(DsDevice device, int iWidth, int iHeight) { int hr; object camDevice; Guid iid = typeof(IBaseFilter).GUID; device.Mon.BindToObject(null, null, ref iid, out camDevice); IBaseFilter camFilter = camDevice as IBaseFilter; m_CameraControl = camFilter as IAMCameraControl; m_VideoControl = camFilter as IAMVideoProcAmp; ISampleGrabber sampGrabber = null; graphBuilder = (IGraphBuilder)new FilterGraph(); //Create the Capture Graph Builder ICaptureGraphBuilder2 captureGraphBuilder = null; captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); // Attach the filter graph to the capture graph hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); //Add the Video input device to the graph hr = graphBuilder.AddFilter(camFilter, "WebCam" + deviceNumber); DsError.ThrowExceptionForHR(hr); // Configure the sample grabber sampGrabber = new SampleGrabber() as ISampleGrabber; ConfigureSampleGrabber(sampGrabber); IBaseFilter sampGrabberBF = sampGrabber as IBaseFilter; //Add the Video compressor filter to the graph hr = graphBuilder.AddFilter(sampGrabberBF, "SampleGrabber" + deviceNumber); DsError.ThrowExceptionForHR(hr); IBaseFilter nullRender = new NullRenderer() as IBaseFilter; graphBuilder.AddFilter(nullRender, "NullRenderer" + deviceNumber); InitResolution(captureGraphBuilder, camFilter, iWidth, iHeight); hr = captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, camDevice, sampGrabberBF, nullRender); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); Marshal.ReleaseComObject(sampGrabber); Marshal.ReleaseComObject(captureGraphBuilder); }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(string FileName, Control hWin) { int hr; IBaseFilter ibfRenderer = null; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin iPinInFilter = null; IPin iPinOutFilter = null; IPin iPinInDest = null; IBasicAudio basicAudio = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif try { // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Add the video source hr = m_FilterGraph.AddSourceFilter(FileName, "Ds.NET FileFilter", out capFilter); DsError.ThrowExceptionForHR(hr); // Hopefully this will be the video pin IPin iPinOutSource = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); iPinInFilter = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); iPinOutFilter = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(iPinOutSource, iPinInFilter); DsError.ThrowExceptionForHR(hr); // Get the default video renderer ibfRenderer = (IBaseFilter)new VideoRendererDefault(); // Add it to the graph hr = m_FilterGraph.AddFilter(ibfRenderer, "Ds.NET VideoRendererDefault"); DsError.ThrowExceptionForHR(hr); iPinInDest = DsFindPin.ByDirection(ibfRenderer, PinDirection.Input, 0); // Connect the graph. Many other filters automatically get added here hr = m_FilterGraph.Connect(iPinOutFilter, iPinInDest); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); // Set the output window IVideoWindow videoWindow = m_FilterGraph as IVideoWindow; hr = videoWindow.put_Owner(hWin.Handle); DsError.ThrowExceptionForHR(hr); hr = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren | WindowStyle.ClipSiblings); DsError.ThrowExceptionForHR(hr); hr = videoWindow.put_Visible(OABool.True); DsError.ThrowExceptionForHR(hr); //TODO : Need a better way to hide the video in the parent Window Rectangle rc = hWin.ClientRectangle; if (mParentWindowDisplay) hr = videoWindow.SetWindowPosition(0, 0, rc.Right, rc.Bottom); else hr = videoWindow.SetWindowPosition(0, 0, 0, 0); DsError.ThrowExceptionForHR(hr); IGraphBuilder graphBuilder = m_FilterGraph as IGraphBuilder; ICaptureGraphBuilder2 captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); // Attach the filter graph to the capture graph hr = captureGraphBuilder.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(hr); hr = captureGraphBuilder.RenderStream(null, MediaType.Audio, capFilter, null, null); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (ibfRenderer != null) { Marshal.ReleaseComObject(ibfRenderer); ibfRenderer = null; } if (iPinInFilter != null) { Marshal.ReleaseComObject(iPinInFilter); iPinInFilter = null; } if (iPinOutFilter != null) { Marshal.ReleaseComObject(iPinOutFilter); iPinOutFilter = null; } if (iPinInDest != null) { Marshal.ReleaseComObject(iPinInDest); iPinInDest = null; } } }
public void PrepareCapture(int i_width, int i_height, float i_frame_rate) { const int BBP = 32; //既にキャプチャ中なら諦める。 if (this.m_graphi_active) { throw new Exception(); } //現在確保中のグラフインスタンスを全て削除 CleanupGraphiObjects(); int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pSampleIn = null; //グラフビルダを作る。 this.m_FilterGraph = new FilterGraph() as IFilterGraph2; try { //フィルタグラフにキャプチャを追加して、capFilterにピンを受け取る。 hr = m_FilterGraph.AddSourceFilterForMoniker(this.m_dev.Mon, null, this.m_dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); //stillピンを探す m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); //stillピンが無ければPreviewを探す。 if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { // There is no still pin m_VidControl = null; m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); } if (i_height + i_width + BBP > 0) { SetConfigParms(m_pinStill, i_width, i_height, i_frame_rate, BBP); } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; //sampGrabberの設定 IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } hr = sampGrabber.GetConnectedMediaType(this._capture_mediatype); DsError.ThrowExceptionForHR(hr); //ビデオフォーマット等の更新 upateVideoInfo(sampGrabber); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(string FileName) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter baseGrabFlt = null; IBaseFilter capFilter = null; IBaseFilter nullrenderer = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; m_mediaCtrl = m_FilterGraph as IMediaControl; m_MediaEvent = m_FilterGraph as IMediaEvent; IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter; try { #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif // Add the video source hr = m_FilterGraph.AddSourceFilter(FileName, "Ds.NET FileFilter", out capFilter); DsError.ThrowExceptionForHR(hr); // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the file filter to the sample grabber // Hopefully this will be the video pin, we could check by reading it's mediatype IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); // Get the input pin from the sample grabber IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Add the null renderer to the graph nullrenderer = new NullRenderer() as IBaseFilter; hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the sample grabber to the null renderer iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Turn off the clock. This causes the frames to be sent // thru the graph as fast as possible hr = mediaFilt.SetSyncSource(null); DsError.ThrowExceptionForHR(hr); // Read and cache the image sizes SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (nullrenderer != null) { Marshal.ReleaseComObject(nullrenderer); nullrenderer = null; } } }
public void Open(bool run = true) { DsDevice device = null; foreach (DsDevice d in DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice)) { if (d.Name == this.Name) { device = d; break; } } if (device == null) { throw new NullReferenceException("DsDevice"); } try { // Create this.captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; if (this.captureGraphBuilder == null) { throw new NullReferenceException("ICaptureGraphBuilder2"); } this.filterGraph = new FilterGraph() as IFilterGraph2; if (this.filterGraph == null) { throw new NullReferenceException("IFilterGraph2"); } this.mediaControl = this.filterGraph as IMediaControl; if (this.mediaControl == null) { throw new NullReferenceException("IMediaControl"); } // Filter Graph (Video Capture -> Sample Grabber -> Null Renderer) int hr = this.captureGraphBuilder.SetFiltergraph(this.filterGraph); if (hr < 0) { throw new COMException("ICaptureGraphBuilder2::SetFiltergraph", hr); } // Video Capture hr = this.filterGraph.AddSourceFilterForMoniker(device.Mon, null, device.Name, out this.videoCapture); if (hr < 0) { throw new COMException("IFilterGraph2::AddSourceFilterForMoniker", hr); } if (run) { hr = this.captureGraphBuilder.FindInterface(PinCategory.Capture, DirectShowLib.MediaType.Video, this.videoCapture, typeof(IAMStreamConfig).GUID, out object intrface); if (hr < 0) { throw new COMException("ICaptureGraphBuilder2::FindInterface::IAMStreamConfig", hr); } IAMStreamConfig streamConfig = intrface as IAMStreamConfig; if (streamConfig == null) { throw new NullReferenceException("IAMStreamConfig"); } hr = streamConfig.GetFormat(out AMMediaType media); if (hr < 0) { throw new COMException("IAMStreamConfig::GetFormat", hr); } if (this.MediaType == null) { this.MediaType = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, this.MediaType); DsUtils.FreeAMMediaType(media); media = null; } else { Marshal.StructureToPtr(this.MediaType, media.formatPtr, false); hr = streamConfig.SetFormat(media); DsUtils.FreeAMMediaType(media); media = null; if (hr < 0) { throw new COMException("IAMStreamConfig::SetFormat", hr); } } this.Width = this.MediaType.BmiHeader.Width; this.Height = this.MediaType.BmiHeader.Height; this.FrameRate = 10000000.0 / this.MediaType.AvgTimePerFrame; // Sample Grabber ISampleGrabber sampleGrabber = new SampleGrabber() as ISampleGrabber; media = new AMMediaType(); media.majorType = DirectShowLib.MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampleGrabber.SetMediaType(media); DsUtils.FreeAMMediaType(media); media = null; if (hr < 0) { throw new COMException("ISampleGrabber::SetMediaType", hr); } hr = sampleGrabber.SetCallback(this, 1); if (hr < 0) { throw new COMException("ISampleGrabber::SetCallback", hr); } hr = this.filterGraph.AddFilter(sampleGrabber as IBaseFilter, "SampleGrabber"); if (hr < 0) { throw new COMException("IFilterGraph2::AddFilter::SampleGrabber", hr); } // Null Renderer NullRenderer nullRenderer = new NullRenderer(); hr = this.filterGraph.AddFilter(nullRenderer as IBaseFilter, "NullRenderer"); if (hr < 0) { throw new COMException("IFilterGraph2::AddFilter::NullRenderer", hr); } hr = this.captureGraphBuilder.RenderStream(PinCategory.Capture, DirectShowLib.MediaType.Video, this.videoCapture, sampleGrabber as IBaseFilter, nullRenderer as IBaseFilter); if (hr < 0) { throw new COMException("ICaptureGraphBuilder2::RenderStream", hr); } // ROT (Running Object Table) Entry this.rotEntry = new DsROTEntry(this.filterGraph); // Frames this.frames = new LinkedList <Frame>(); for (int b = 0; b < this.Backtrace; b++) { this.frames.AddLast(new Frame()); } // Run Filter Graph hr = this.mediaControl.Run(); if (hr < 0) { throw new COMException("IMediaControl::Run", hr); } } } catch (Exception e) { this.Close(); throw e; } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP, Control hControl) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; try { #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif // add the video input device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); // Find the still pin m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); // Didn't find one. Is there a preview pin? if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; // There is no still pin m_VidControl = null; // Add a splitter IBaseFilter iSmartTee = (IBaseFilter)new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); // Find the find the capture pin from the video device and the // input pin for the splitter, and connnect them pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); // Now set the capture and still pins (from the splitter) m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); // If any of the default config items are set, perform the config // on the actual video device (rather than the splitter) if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // If any of the default config items are set if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Get the default video renderer IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } // Learn the video properties SaveSizeInfo(sampGrabber); ConfigVideoWindow(hControl); // Start the graph IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
private IFilterGraph2 BuildGraph(string sFileName) { int hr; IBaseFilter ibfRenderer = null; IBaseFilter ibfAVISource = null; IPin IPinIn = null; IPin IPinOut = null; IPin iSampleIn = null; IPin iSampleOut = null; SampleGrabber sg = null; IFilterGraph2 graphBuilder = new FilterGraph() as IFilterGraph2; try { // Get the file source filter ibfAVISource = new AsyncReader() as IBaseFilter; // Add it to the graph hr = graphBuilder.AddFilter(ibfAVISource, "Ds.NET AsyncReader"); Marshal.ThrowExceptionForHR(hr); // Set the file name IFileSourceFilter fsf = ibfAVISource as IFileSourceFilter; hr = fsf.Load(sFileName, null); Marshal.ThrowExceptionForHR(hr); IPinOut = DsFindPin.ByDirection(ibfAVISource, PinDirection.Output, 0); // Get a SampleGrabber sg = new SampleGrabber(); IBaseFilter grabFilt = sg as IBaseFilter; m_isg = sg as ISampleGrabber; // Add the sample grabber to the graph hr = graphBuilder.AddFilter(grabFilt, "Ds.NET SampleGrabber"); Marshal.ThrowExceptionForHR(hr); iSampleIn = DsFindPin.ByDirection(grabFilt, PinDirection.Input, 0); iSampleOut = DsFindPin.ByDirection(grabFilt, PinDirection.Output, 0); // Get the default video renderer ibfRenderer = (IBaseFilter) new VideoRendererDefault(); // Add it to the graph hr = graphBuilder.AddFilter(ibfRenderer, "Ds.NET VideoRendererDefault"); Marshal.ThrowExceptionForHR(hr); IPinIn = DsFindPin.ByDirection(ibfRenderer, PinDirection.Input, 0); // Connect the file to the sample grabber hr = graphBuilder.Connect(IPinOut, iSampleIn); Marshal.ThrowExceptionForHR(hr); // Connect the sample grabber to the renderer hr = graphBuilder.Connect(iSampleOut, IPinIn); Marshal.ThrowExceptionForHR(hr); } catch { Marshal.ReleaseComObject(graphBuilder); throw; } finally { Marshal.ReleaseComObject(ibfRenderer); Marshal.ReleaseComObject(ibfAVISource); Marshal.ReleaseComObject(IPinIn); Marshal.ReleaseComObject(IPinOut); Marshal.ReleaseComObject(iSampleIn); Marshal.ReleaseComObject(iSampleOut); } return(graphBuilder); }
protected virtual void AddRenderers(IFilterGraph2 graphBuilder) { int hr = 0; Guid iid = typeof(IBaseFilter).GUID; if (this.AudioRendererDevice != null) { try { this.audioRenderer = null; IBaseFilter tmp; object o; this.AudioRendererDevice.Mon.BindToObject(null, null, ref iid, out o); tmp = o as IBaseFilter; //Add the Video input device to the graph hr = graphBuilder.AddFilter(tmp, this.AudioRendererDevice.Name); if (hr >= 0) { // Got it ! this.audioRenderer = tmp; } else { // Try another... int hr1 = graphBuilder.RemoveFilter(tmp); Marshal.ReleaseComObject(tmp); //DsError.ThrowExceptionForHR(hr); } } catch { } } if (this.audioRenderer == null) { // Add default audio renderer this.audioRenderer = (IBaseFilter)new DSoundRender(); hr = graphBuilder.AddFilter(this.audioRenderer, "DirectSound Renderer"); ThrowExceptionForHR("Adding the DirectSound Renderer: ", hr); } // To see something if (useWPF) { // Get the SampleGrabber interface ISampleGrabber sampleGrabber = new SampleGrabber() as ISampleGrabber; this.videoRenderer = sampleGrabber as IBaseFilter; // Set the media type to Video AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; //media.subType = MediaSubType.YUY2; // RGB24; //media.formatType = FormatType.Null; //media.sampleSize = 1; //media.temporalCompression = false; //media.fixedSizeSamples = false; //media.unkPtr = IntPtr.Zero; //media.formatType = FormatType.None; //media.formatSize = 0; //media.formatPtr = IntPtr.Zero; media.subType = MediaSubType.RGB32; // RGB24; media.formatType = FormatType.VideoInfo; hr = sampleGrabber.SetMediaType(media); ThrowExceptionForHR("Setting the MediaType on the SampleGrabber: ", hr); DsUtils.FreeAMMediaType(media); // Configure the samplegrabber hr = sampleGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); // Add the frame grabber to the graph hr = graphBuilder.AddFilter(this.videoRenderer, "SampleGrabber"); ThrowExceptionForHR("Adding the SampleGrabber: ", hr); //hr = ConnectFilters(this.videoRenderer, nullRenderer); } else { try { this.videoRenderer = (IBaseFilter)new EnhancedVideoRenderer(); hr = graphBuilder.AddFilter(this.videoRenderer, "Enhanced Video Renderer"); ThrowExceptionForHR("Adding the EVR: ", hr); useEVR = true; } catch (Exception) { } if (!useEVR) { this.videoRenderer = (IBaseFilter)new VideoMixingRenderer9(); hr = graphBuilder.AddFilter(this.videoRenderer, "Video Mixing Renderer 9"); ThrowExceptionForHR("Adding the VMR9: ", hr); } } //IReferenceClock clock = this.audioRenderer as IReferenceClock; //if(clock != null) //{ // // Set the graph clock. // this.FilterGraph.SetDefaultSyncSource( // pGraph->QueryInterface(IID_IMediaFilter, (void**)&pMediaFilter); // pMediaFilter->SetSyncSource(pClock); // pClock->Release(); // pMediaFilter->Release(); //} }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iWidth, int iHeight, short iBPP, Control hControl) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; IPin pCaptureOut = null; IPin pSampleIn = null; IPin pRenderIn = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; try { #if DEBUG m_rot = new DsROTEntry(m_FilterGraph); #endif // add the video input device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, dev.Name, out capFilter); DsError.ThrowExceptionForHR(hr); // Find the still pin m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Still, 0); // Didn't find one. Is there a preview pin? if (m_pinStill == null) { m_pinStill = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); } // Still haven't found one. Need to put a splitter in so we have // one stream to capture the bitmap from, and one to display. Ok, we // don't *have* to do it that way, but we are going to anyway. if (m_pinStill == null) { IPin pRaw = null; IPin pSmart = null; // There is no still pin m_VidControl = null; // Add a splitter IBaseFilter iSmartTee = (IBaseFilter) new SmartTee(); try { hr = m_FilterGraph.AddFilter(iSmartTee, "SmartTee"); DsError.ThrowExceptionForHR(hr); // Find the find the capture pin from the video device and the // input pin for the splitter, and connnect them pRaw = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); pSmart = DsFindPin.ByDirection(iSmartTee, PinDirection.Input, 0); hr = m_FilterGraph.Connect(pRaw, pSmart); DsError.ThrowExceptionForHR(hr); // Now set the capture and still pins (from the splitter) m_pinStill = DsFindPin.ByName(iSmartTee, "Preview"); pCaptureOut = DsFindPin.ByName(iSmartTee, "Capture"); // If any of the default config items are set, perform the config // on the actual video device (rather than the splitter) if (iHeight + iWidth + iBPP > 0) { SetConfigParms(pRaw, iWidth, iHeight, iBPP); } } finally { if (pRaw != null) { Marshal.ReleaseComObject(pRaw); } if (pRaw != pSmart) { Marshal.ReleaseComObject(pSmart); } if (pRaw != iSmartTee) { Marshal.ReleaseComObject(iSmartTee); } } } else { // Get a control pointer (used in Click()) m_VidControl = capFilter as IAMVideoControl; pCaptureOut = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); // If any of the default config items are set if (iHeight + iWidth + iBPP > 0) { SetConfigParms(m_pinStill, iWidth, iHeight, iBPP); } } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; // Configure the sample grabber IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); pSampleIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); // Get the default video renderer IBaseFilter pRenderer = new VideoRendererDefault() as IBaseFilter; hr = m_FilterGraph.AddFilter(pRenderer, "Renderer"); DsError.ThrowExceptionForHR(hr); pRenderIn = DsFindPin.ByDirection(pRenderer, PinDirection.Input, 0); // Add the sample grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); if (m_VidControl == null) { // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); } else { // Connect the capture pin to the renderer hr = m_FilterGraph.Connect(pCaptureOut, pRenderIn); DsError.ThrowExceptionForHR(hr); // Connect the Still pin to the sample grabber hr = m_FilterGraph.Connect(m_pinStill, pSampleIn); DsError.ThrowExceptionForHR(hr); } // Learn the video properties SaveSizeInfo(sampGrabber); ConfigVideoWindow(hControl); // Start the graph IMediaControl mediaCtrl = m_FilterGraph as IMediaControl; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); } finally { if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (pCaptureOut != null) { Marshal.ReleaseComObject(pCaptureOut); pCaptureOut = null; } if (pRenderIn != null) { Marshal.ReleaseComObject(pRenderIn); pRenderIn = null; } if (pSampleIn != null) { Marshal.ReleaseComObject(pSampleIn); pSampleIn = null; } } }
private void Form1_Load(object sender, EventArgs e) { m_PictureReady = new ManualResetEvent(false); // ¨PlayMovieInWindow( "c:\\temp\\beckscen2.avi" ); int hr; this.graphBuilder = (IGraphBuilder)new FilterGraph(); // this.graphBuilder.AddFilter( // Get the SampleGrabber interface ISampleGrabber sampGrabber = new SampleGrabber() as ISampleGrabber; IBaseFilter ibfRenderer = null; IBaseFilter capFilter = null; IPin iPinInFilter = null; IPin iPinOutFilter = null; IPin iPinInDest = null; // Add the video source hr = graphBuilder.AddSourceFilter(Filename, "Ds.NET FileFilter", out capFilter); DsError.ThrowExceptionForHR(hr); // Hopefully this will be the video pin IPin iPinOutSource = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); iPinInFilter = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); iPinOutFilter = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); // Add the frame grabber to the graph hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.Connect(iPinOutSource, iPinInFilter); DsError.ThrowExceptionForHR(hr); // Get the default video renderer ibfRenderer = (IBaseFilter)new VideoRendererDefault(); // Add it to the graph hr = graphBuilder.AddFilter(ibfRenderer, "Ds.NET VideoRendererDefault"); DsError.ThrowExceptionForHR(hr); iPinInDest = DsFindPin.ByDirection(ibfRenderer, PinDirection.Input, 0); // Connect the graph. Many other filters automatically get added here hr = graphBuilder.Connect(iPinOutFilter, iPinInDest); DsError.ThrowExceptionForHR(hr); this.mediaControl = (IMediaControl)this.graphBuilder; this.mediaEventEx = (IMediaEventEx)this.graphBuilder; this.mediaSeeking = (IMediaSeeking)this.graphBuilder; this.mediaPosition = (IMediaPosition)this.graphBuilder; this.videoWindow = this.graphBuilder as IVideoWindow; this.basicVideo = this.graphBuilder as IBasicVideo; this.basicAudio = this.graphBuilder as IBasicAudio; hr = this.videoWindow.put_Owner(panel1.Handle); DsError.ThrowExceptionForHR(hr); hr = this.videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren); DsError.ThrowExceptionForHR(hr); if (this.basicVideo == null) return; int lHeight, lWidth; hr = this.basicVideo.GetVideoSize(out lWidth, out lHeight); Console.WriteLine("video: %d x %d\n", lWidth, lHeight); m_videoWidth = lWidth; m_videoHeight = lHeight; SaveSizeInfo(sampGrabber); newbitmap = new Bitmap(lWidth, lHeight, PixelFormat.Format24bppRgb); origbitmap = new Bitmap(lWidth, lHeight, PixelFormat.Format24bppRgb); m_ImageChanged = true; pictureBox1.Width = lWidth; pictureBox1.Height = lHeight; pictureBox2.Width = lWidth; pictureBox2.Height = lHeight; pictureBox2.Top = pictureBox1.Top + lHeight + 4; duration = 0.0f; this.mediaPosition.get_Duration(out duration); m_ipBuffer = Marshal.AllocCoTaskMem(Math.Abs(m_stride) * m_videoHeight); // this.ClientSize = new Size(lWidth, lHeight); Application.DoEvents(); hr = this.videoWindow.SetWindowPosition(0, 0, panel1.Width, panel1.Height); this.mediaControl.Run(); timer1.Enabled = true; // buildCaptureGRaph( this.de ( (capDevices[iDeviceNum], iWidth, iHeight, iBPP, hControl); // buildCaptureaph(); }