private void ConfigureSampleGrabber(ISampleGrabber sampGrabber) { AMMediaType media; int hr; // Set the media type to Video/RBG24 media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; sampGrabber.SetBufferSamples(false); sampGrabber.SetOneShot(false); hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; // Configure the samplegrabber hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
/// <summary>Set up the filter graph for grabbing snapshots</summary> public void EnableGrabbing() { ICaptureGraphBuilder2 icgb2 = null; try { // Get a ICaptureGraphBuilder2 to help build the graph // Link the ICaptureGraphBuilder2 to the IFilterGraph2 icgb2 = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; if (icgb2 == null) { throw new Exception("failed to create direct show CaptureGraphBuilder2"); } DsError.ThrowExceptionForHR(icgb2.SetFiltergraph(m_filter_graph)); // Get the SampleGrabber interface m_samp_grabber = (ISampleGrabber) new SampleGrabber(); { // Set the media type to Video/RBG24 AMMediaType media = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24, formatType = FormatType.VideoInfo }; try { DsError.ThrowExceptionForHR(m_samp_grabber.SetMediaType(media)); } finally { DsUtils.FreeAMMediaType(media); } } // Configure the sample grabber DsError.ThrowExceptionForHR(m_samp_grabber.SetBufferSamples(true)); // Add the sample graber to the filter graph IBaseFilter grab_filter = (IBaseFilter)m_samp_grabber; DsError.ThrowExceptionForHR(m_filter_graph.AddFilter(grab_filter, "DS.NET Grabber")); } finally { if (icgb2 != null) { Marshal.ReleaseComObject(icgb2); } } }
/// <summary> /// Crée le filtre SampleGrabber /// </summary> void CreateSampleGrabber() { Type comType = Type.GetTypeFromCLSID(new Guid(SAMPLE_GRABBER)); _sampleGrabber = (ISampleGrabber)Activator.CreateInstance(comType); AMMediaType mediaType = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB32, formatType = FormatType.VideoInfo }; _sampleGrabber.SetMediaType(mediaType); DsUtils.FreeAMMediaType(mediaType); int hr = _sampleGrabber.SetOneShot(true); DsError.ThrowExceptionForHR(hr); hr = _sampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); }
private static ISampleGrabber AddSampleGrabber(IGraphBuilder graph, string filterName, Guid majorType, Guid minorType) { ISampleGrabber isg = (ISampleGrabber) new SampleGrabber(); int hr = graph.AddFilter((IBaseFilter)isg, filterName); DsError.ThrowExceptionForHR(hr); AMMediaType mt = new AMMediaType(); mt.majorType = majorType; mt.subType = minorType; hr = isg.SetMediaType(mt); DsError.ThrowExceptionForHR(hr); hr = isg.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); hr = isg.SetOneShot(true); DsError.ThrowExceptionForHR(hr); return(isg); }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread( ) { // grabber Grabber grabber = new Grabber(this); // objects object graphObject = null; object sourceObject = null; object grabberObject = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IFileSourceFilter fileSource = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object type = Type.GetTypeFromCLSID(Clsid.AsyncReader); if (type == null) { throw new ApplicationException("Failed creating filter async reader"); } sourceObject = Activator.CreateInstance(type); sourceBase = (IBaseFilter)sourceObject; fileSource = (IFileSourceFilter)sourceObject; fileSource.Load(fileName, null); // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add source and grabber filters to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(mediaType); // connect pins if (graph.Connect(Tools.GetOutPin(sourceBase, 0), Tools.GetInPin(grabberBase, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } // let's do rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBase, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // get media control mediaControl = (IMediaControl)graphObject; // run mediaControl.Run( ); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mediaControl.StopWhenReady( ); } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects graph = null; sourceBase = null; grabberBase = null; sampleGrabber = null; mediaControl = null; fileSource = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceObject != null) { Marshal.ReleaseComObject(sourceObject); sourceObject = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } } }
private void setupDirectShowFilterGraph() { if (mediaControl != null && running) { Stop(); } if (device == null) { filterGraph = null; mediaControl = null; } else { filterGraph = (IFilterGraph2) new FilterGraph(); mediaControl = (IMediaControl)filterGraph; ICaptureGraphBuilder2 captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); captureGraphBuilder.SetFiltergraph((IGraphBuilder)filterGraph); // capture filter IBaseFilter captureFilter; filterGraph.AddSourceFilterForMoniker(device.Moniker, null, device.Name, out captureFilter); // sample grabber ISampleGrabber sampleGrabber = (ISampleGrabber) new SampleGrabber(); IBaseFilter sampleGrabberFilter = (IBaseFilter)sampleGrabber; AMMediaType mediaType = new AMMediaType(); mediaType.majorType = new Guid(0x73646976, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71); // MEDIATYPE_Video mediaType.subType = new Guid(0xe436eb7d, 0x524f, 0x11ce, 0x9f, 0x53, 0x00, 0x20, 0xaf, 0x0b, 0xa7, 0x70); // MEDIASUBTYPE_RGB24 mediaType.formatType = new Guid(0x05589f80, 0xc356, 0x11ce, 0xbf, 0x01, 0x00, 0xaa, 0x00, 0x55, 0x59, 0x5a); // FORMAT_VideoInfo sampleGrabber.SetMediaType(mediaType); mediaType.Free(); sampleGrabber.SetOneShot(false); sampleGrabber.SetBufferSamples(false); sampleGrabber.SetCallback(this, 1); filterGraph.AddFilter(sampleGrabberFilter, "ZunTzu Sample Grabber"); // configure the video stream to 160x120@15fps object interfaceFound; captureGraphBuilder.FindInterface( new Guid(0xfb6c4281, 0x0353, 0x11d1, 0x90, 0x5f, 0x00, 0x00, 0xc0, 0xcc, 0x16, 0xba), // PIN_CATEGORY_CAPTURE new Guid(0x73646976, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71), // MEDIATYPE_Video captureFilter, typeof(IAMStreamConfig).GUID, out interfaceFound); IAMStreamConfig videoStreamConfig = (IAMStreamConfig)interfaceFound; videoStreamConfig.GetFormat(out mediaType); VideoInfoHeader infoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(mediaType.formatPtr, infoHeader); infoHeader.AvgTimePerFrame = 10000000 / 15; infoHeader.BmiHeader.Width = 160; infoHeader.BmiHeader.Height = 120; Marshal.StructureToPtr(infoHeader, mediaType.formatPtr, false); videoStreamConfig.SetFormat(mediaType); mediaType.Free(); // renderer IBaseFilter nullRenderer = (IBaseFilter) new NullRenderer(); filterGraph.AddFilter(nullRenderer, "Null Renderer"); captureGraphBuilder.RenderStream( new Guid(0xfb6c4281, 0x0353, 0x11d1, 0x90, 0x5f, 0x00, 0x00, 0xc0, 0xcc, 0x16, 0xba), // PIN_CATEGORY_CAPTURE new Guid(0x73646976, 0x0000, 0x0010, 0x80, 0x00, 0x00, 0xaa, 0x00, 0x38, 0x9b, 0x71), // MEDIATYPE_Video captureFilter, sampleGrabberFilter, nullRenderer); // retrieve frame size sampleGrabber.GetConnectedMediaType(mediaType); Marshal.PtrToStructure(mediaType.formatPtr, infoHeader); frameRate = 10000000.0f / infoHeader.AvgTimePerFrame; frameSize = new Size(infoHeader.BmiHeader.Width, infoHeader.BmiHeader.Height); mediaType.Free(); } }
public void CreateGraph() { try { int result = 0; // フィルタグラフマネージャ作成 graphBuilder = new FilterGraph() as IFilterGraph2; // キャプチャグラフビルダ作成 captureGraphBuilder = new CaptureGraphBuilder2() as ICaptureGraphBuilder2; //captureGraphBuilder(キャプチャグラフビルダ)をgraphBuilder(フィルタグラフマネージャ)に追加. result = captureGraphBuilder.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(result); // ソースフィルタ作成 // キャプチャデバイスをソースフィルタに対応付ける captureFilter = null; result = graphBuilder.AddSourceFilterForMoniker( _capDevice.Mon, null, _capDevice.Name, out captureFilter); DsError.ThrowExceptionForHR(result); // サンプルグラバ作成 sampleGrabber = new SampleGrabber() as ISampleGrabber; // フィルタと関連付ける IBaseFilter grabFilter = sampleGrabber as IBaseFilter; // キャプチャするオーディオのフォーマットを設定 AMMediaType amMediaType = new AMMediaType(); amMediaType.majorType = MediaType.Audio; amMediaType.subType = MediaSubType.PCM; amMediaType.formatPtr = IntPtr.Zero; result = sampleGrabber.SetMediaType(amMediaType); DsError.ThrowExceptionForHR(result); DsUtils.FreeAMMediaType(amMediaType); // callback 登録 sampleGrabber.SetOneShot(false); DsError.ThrowExceptionForHR(result); result = sampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(result); // キャプチャするフォーマットを取得 object o; result = captureGraphBuilder.FindInterface( DsGuid.FromGuid(PinCategory.Capture), DsGuid.FromGuid(MediaType.Audio), captureFilter, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(result); IAMStreamConfig config = o as IAMStreamConfig; AMMediaType media; result = config.GetFormat(out media); DsError.ThrowExceptionForHR(result); WaveFormatEx wf = new WaveFormatEx(); Marshal.PtrToStructure(media.formatPtr, wf); CaptureOption opt = new CaptureOption(wf); _sampler = new DSAudioSampler(opt); DsUtils.FreeAMMediaType(media); Marshal.ReleaseComObject(config); result = sampleGrabber.SetCallback(_sampler, 1); DsError.ThrowExceptionForHR(result); //grabFilter(変換フィルタ)をgraphBuilder(フィルタグラフマネージャ)に追加. result = graphBuilder.AddFilter(grabFilter, "Audio Grab Filter"); DsError.ThrowExceptionForHR(result); //キャプチャフィルタをサンプルグラバーフィルタに接続する result = captureGraphBuilder.RenderStream( DsGuid.FromGuid(PinCategory.Capture), DsGuid.FromGuid(MediaType.Audio), captureFilter, null, grabFilter); DsError.ThrowExceptionForHR(result); } catch (Exception ex) { System.Windows.MessageBox.Show(ex.Message); } }
private void WorkerThread(bool runGraph) { ReasonToFinishPlaying reason = ReasonToFinishPlaying.StoppedByUser; bool flag = false; Grabber grabber = new Grabber(this, snapshotMode: false); Grabber grabber2 = new Grabber(this, snapshotMode: true); object obj = null; object obj2 = null; object obj3 = null; object obj4 = null; object retInterface = null; ICaptureGraphBuilder2 captureGraphBuilder = null; IFilterGraph2 filterGraph = null; IBaseFilter baseFilter = null; IBaseFilter baseFilter2 = null; IBaseFilter baseFilter3 = null; ISampleGrabber sampleGrabber = null; ISampleGrabber sampleGrabber2 = null; IMediaControl mediaControl = null; IAMVideoControl iAMVideoControl = null; IMediaEventEx mediaEventEx = null; IPin pin = null; IAMCrossbar iAMCrossbar = null; try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating capture graph builder"); } obj = Activator.CreateInstance(typeFromCLSID); captureGraphBuilder = (ICaptureGraphBuilder2)obj; typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } obj2 = Activator.CreateInstance(typeFromCLSID); filterGraph = (IFilterGraph2)obj2; captureGraphBuilder.SetFiltergraph((IGraphBuilder)filterGraph); sourceObject = FilterInfo.CreateFilter(deviceMoniker); if (sourceObject == null) { throw new ApplicationException("Failed creating device object for moniker"); } baseFilter = (IBaseFilter)sourceObject; try { iAMVideoControl = (IAMVideoControl)sourceObject; } catch { } typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj3 = Activator.CreateInstance(typeFromCLSID); sampleGrabber = (ISampleGrabber)obj3; baseFilter2 = (IBaseFilter)obj3; obj4 = Activator.CreateInstance(typeFromCLSID); sampleGrabber2 = (ISampleGrabber)obj4; baseFilter3 = (IBaseFilter)obj4; filterGraph.AddFilter(baseFilter, "source"); filterGraph.AddFilter(baseFilter2, "grabber_video"); filterGraph.AddFilter(baseFilter3, "grabber_snapshot"); AMMediaType aMMediaType = new AMMediaType(); aMMediaType.MajorType = MediaType.Video; aMMediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(aMMediaType); sampleGrabber2.SetMediaType(aMMediaType); captureGraphBuilder.FindInterface(FindDirection.UpstreamOnly, Guid.Empty, baseFilter, typeof(IAMCrossbar).GUID, out retInterface); if (retInterface != null) { iAMCrossbar = (IAMCrossbar)retInterface; } isCrossbarAvailable = (iAMCrossbar != null); crossbarVideoInputs = ColletCrossbarVideoInputs(iAMCrossbar); if (iAMVideoControl != null) { captureGraphBuilder.FindPin(sourceObject, PinDirection.Output, PinCategory.StillImage, MediaType.Video, unconnected: false, 0, out pin); if (pin != null) { iAMVideoControl.GetCaps(pin, out VideoControlFlags flags); flag = ((flags & VideoControlFlags.ExternalTriggerEnable) != 0); } } sampleGrabber.SetBufferSamples(bufferThem: false); sampleGrabber.SetOneShot(oneShot: false); sampleGrabber.SetCallback(grabber, 1); sampleGrabber2.SetBufferSamples(bufferThem: true); sampleGrabber2.SetOneShot(oneShot: false); sampleGrabber2.SetCallback(grabber2, 1); GetPinCapabilitiesAndConfigureSizeAndRate(captureGraphBuilder, baseFilter, PinCategory.Capture, videoResolution, ref videoCapabilities); if (flag) { GetPinCapabilitiesAndConfigureSizeAndRate(captureGraphBuilder, baseFilter, PinCategory.StillImage, snapshotResolution, ref snapshotCapabilities); } else { snapshotCapabilities = new VideoCapabilities[0]; } lock (cacheVideoCapabilities) { if (videoCapabilities != null && !cacheVideoCapabilities.ContainsKey(deviceMoniker)) { cacheVideoCapabilities.Add(deviceMoniker, videoCapabilities); } } lock (cacheSnapshotCapabilities) { if (snapshotCapabilities != null && !cacheSnapshotCapabilities.ContainsKey(deviceMoniker)) { cacheSnapshotCapabilities.Add(deviceMoniker, snapshotCapabilities); } } if (runGraph) { captureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, baseFilter, null, baseFilter2); if (sampleGrabber.GetConnectedMediaType(aMMediaType) == 0) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = videoInfoHeader.BmiHeader.Width; grabber.Height = videoInfoHeader.BmiHeader.Height; aMMediaType.Dispose(); } if (flag && provideSnapshots) { captureGraphBuilder.RenderStream(PinCategory.StillImage, MediaType.Video, baseFilter, null, baseFilter3); if (sampleGrabber2.GetConnectedMediaType(aMMediaType) == 0) { VideoInfoHeader videoInfoHeader2 = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader)); grabber2.Width = videoInfoHeader2.BmiHeader.Width; grabber2.Height = videoInfoHeader2.BmiHeader.Height; aMMediaType.Dispose(); } } mediaControl = (IMediaControl)obj2; mediaEventEx = (IMediaEventEx)obj2; mediaControl.Run(); if (flag && provideSnapshots) { startTime = DateTime.Now; iAMVideoControl.SetMode(pin, VideoControlFlags.ExternalTriggerEnable); } do { if (mediaEventEx != null && mediaEventEx.GetEvent(out DsEvCode lEventCode, out IntPtr lParam, out IntPtr lParam2, 0) >= 0) { mediaEventEx.FreeEventParams(lEventCode, lParam, lParam2); if (lEventCode == DsEvCode.DeviceLost) { reason = ReasonToFinishPlaying.DeviceLost; break; } } if (needToSetVideoInput) { needToSetVideoInput = false; if (isCrossbarAvailable.Value) { SetCurrentCrossbarInput(iAMCrossbar, crossbarVideoInput); crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar); } } if (needToSimulateTrigger) { needToSimulateTrigger = false; if (flag && provideSnapshots) { iAMVideoControl.SetMode(pin, VideoControlFlags.Trigger); } } if (needToDisplayPropertyPage) { needToDisplayPropertyPage = false; DisplayPropertyPage(parentWindowForPropertyPage, sourceObject); if (iAMCrossbar != null) { crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar); } } if (needToDisplayCrossBarPropertyPage) { needToDisplayCrossBarPropertyPage = false; if (iAMCrossbar != null) { DisplayPropertyPage(parentWindowForPropertyPage, iAMCrossbar); crossbarVideoInput = GetCurrentCrossbarInput(iAMCrossbar); } } }while (!stopEvent.WaitOne(100, exitContext: false)); mediaControl.Stop(); } } catch (Exception ex) { if (this.VideoSourceError != null) { this.VideoSourceError(this, new VideoSourceErrorEventArgs(ex.Message)); } } finally { captureGraphBuilder = null; filterGraph = null; baseFilter = null; mediaControl = null; iAMVideoControl = null; mediaEventEx = null; pin = null; iAMCrossbar = null; baseFilter2 = null; baseFilter3 = null; sampleGrabber = null; sampleGrabber2 = null; if (obj2 != null) { Marshal.ReleaseComObject(obj2); obj2 = null; } if (sourceObject != null) { Marshal.ReleaseComObject(sourceObject); sourceObject = null; } if (obj3 != null) { Marshal.ReleaseComObject(obj3); obj3 = null; } if (obj4 != null) { Marshal.ReleaseComObject(obj4); obj4 = null; } if (obj != null) { Marshal.ReleaseComObject(obj); obj = null; } if (retInterface != null) { Marshal.ReleaseComObject(retInterface); retInterface = null; } } if (this.PlayingFinished != null) { this.PlayingFinished(this, reason); } }
/// <summary>; /// Starts grabbing images from the capture device /// </summary> public virtual void Start() { if (_captureTask != null) { Stop(); } _captureTask = new Task(() => { // Create new grabber _capGrabber = new CapGrabber(); _capGrabber.PropertyChanged += capGrabber_PropertyChanged; _capGrabber.NewFrameArrived += capGrabber_NewFrameArrived; _stopSignal = new ManualResetEvent(false); _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IFilterGraph2; _sourceObject = FilterInfo.CreateFilter(_monikerString); var outputPin = _sourceObject.GetPin(PinCategory.Capture, 0); SelectWebcamResolution(outputPin); _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; _grabberObject = _grabber as IBaseFilter; if (_graph == null) { return; } ; _graph.AddFilter(_sourceObject, "source"); _graph.AddFilter(_grabberObject, "grabber"); using (var mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; if (_grabber != null) { _grabber.SetMediaType(mediaType); var inputPin = _grabberObject.GetPin(PinDirection.Input, 0); if (_graph.Connect(outputPin, inputPin) >= 0) { if (_grabber.GetConnectedMediaType(mediaType) == 0) { var header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); _capGrabber.Width = header.BmiHeader.Width; _capGrabber.Height = header.BmiHeader.Height; } } _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0)); _grabber.SetBufferSamples(false); _grabber.SetOneShot(false); _grabber.SetCallback(_capGrabber, 1); } // Get the video window var wnd = (IVideoWindow)_graph; wnd.put_AutoShow(false); // Create the control and run _control = (IMediaControl)_graph; _control.Run(); // Wait for the stop signal _stopSignal.WaitOne(); Cleanup(); } }); _captureTask.Start(); }
private void ApplyVideoInput() { int iRet; Dispose(); /*Frame = new byte[(width * height) * PixelSize]; * CapturedFrame = new byte[(width * height) * PixelSize]; * PreviewFrame = new byte[(width / PreviewDivider * height / PreviewDivider) * PixelSize];*/ if (VideoInput == null) { return; } //Original Code GraphBuilder = (IGraphBuilder) new FilterGraph(); CaptureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); MediaControl = (IMediaControl)GraphBuilder; iRet = CaptureGraphBuilder.SetFiltergraph(GraphBuilder); if (iRet != 0) { Console.WriteLine("TheKing--> Error Found SetFiltergraph"); } SampleGrabber = new SampleGrabber() as ISampleGrabber; iRet = GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render"); if (iRet != 0) { Console.WriteLine("TheKing--> Error Found AddFilter 1"); } SetResolution(width, height); iRet = GraphBuilder.AddFilter(VideoInput, "Camera"); if (iRet != 0) { Console.WriteLine("TheKing--> Error Found AddFilter 2"); } iRet = SampleGrabber.SetBufferSamples(true); if (iRet != 0) { Console.WriteLine("TheKing--> Error Found SetBufferSamples"); } iRet = SampleGrabber.SetOneShot(false); if (iRet != 0) { Console.WriteLine("TheKing--> Error Found SetOneShot"); } iRet = SampleGrabber.SetCallback(this, 1); if (iRet != 0) { Console.WriteLine("TheKing--> Error Found SetCallback"); } iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, SampleGrabber as IBaseFilter); if (iRet < 0) { Console.WriteLine("TheKing--> Error Found in CaptureGraphBuilder.RenderStream, iRet = " + iRet + ", Initialization TryNumber = " + counter); if (counter == 1) { ApplyVideoInput(); } } //GraphBuilder.Connect() //iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, null); //if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 1"); //iRet = CaptureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, VideoInput, null, SampleGrabber as IBaseFilter); //if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 2, iRet = " + iRet); if (UpdateThread != null) { UpdateThread.Abort(); } //UpdateThread = new Thread(UpdateBuffer); //UpdateThread.Start(); MediaControl.Run(); Marshal.ReleaseComObject(VideoInput); }
void RunWorker() { try { graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; sourceObject = FilterInfo.CreateFilter(deviceMoniker); grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; grabberObject = grabber as IBaseFilter; graph.AddFilter(sourceObject, "source"); graph.AddFilter(grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; grabber.SetMediaType(mediaType); if (graph.Connect(sourceObject.GetPin(PinDirection.Output, 0), grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (grabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); capGrabber.Width = header.BmiHeader.Width; capGrabber.Height = header.BmiHeader.Height; } } graph.Render(grabberObject.GetPin(PinDirection.Output, 0)); grabber.SetBufferSamples(false); grabber.SetOneShot(false); grabber.SetCallback(capGrabber, 1); IVideoWindow wnd = (IVideoWindow)graph; wnd.put_AutoShow(false); wnd = null; control = (IMediaControl)graph; control.Run(); while (!stopSignal.WaitOne(0, true)) { Thread.Sleep(10); } control.StopWhenReady(); } } catch (Exception ex) { System.Diagnostics.Debug.WriteLine(ex); } finally { graph = null; sourceObject = null; grabberObject = null; grabber = null; capGrabber = null; control = null; } }
private void InitializeCapture() { graphBuilder = (IGraphBuilder)new FilterGraph(); mediaControl = (IMediaControl)graphBuilder; captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); hr = captureGraphBuilder.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(hr); IBaseFilter videoInput = GetVideoInputObject(); if (null != videoInput) { SetConfigurations(videoInput); sampleGrabber = new SampleGrabber() as ISampleGrabber; hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Render"); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.AddFilter(videoInput, "Camera"); DsError.ThrowExceptionForHR(hr); AMMediaType type = new AMMediaType() { majorType = MediaType.Video, subType = MediaSubType.ARGB32, formatType = FormatType.VideoInfo }; hr = sampleGrabber.SetMediaType(type); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(type); sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.GetConnectedMediaType(new AMMediaType()); sampleGrabber.SetCallback((ISampleGrabberCB)this, 1); hr = captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, videoInput, null, sampleGrabber as IBaseFilter); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(videoInput); } }
/// <summary> /// Worker thread that captures the images /// </summary> private void Init() { try { log.Trace("Start worker thread"); // Create the main graph _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; // Create the webcam source _sourceObject = FilterInfo.CreateFilter(_monikerString); // Create the grabber _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; _grabberObject = _grabber as IBaseFilter; // Add the source and grabber to the main graph _graph.AddFilter(_sourceObject, "source"); _graph.AddFilter(_grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; _grabber.SetMediaType(mediaType); if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (_grabber.GetConnectedMediaType(mediaType) == 0) { // During startup, this code can be too fast, so try at least 3 times int retryCount = 0; bool succeeded = false; while ((retryCount < 3) && !succeeded) { // Tried again retryCount++; try { // Retrieve the grabber information VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); _capGrabber.Width = header.BmiHeader.Width; _capGrabber.Height = header.BmiHeader.Height; // Succeeded succeeded = true; } catch { // Trace log.InfoFormat("Failed to retrieve the grabber information, tried {0} time(s)", retryCount); // Sleep Thread.Sleep(50); } } } } _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0)); _grabber.SetBufferSamples(false); _grabber.SetOneShot(false); _grabber.SetCallback(_capGrabber, 1); log.Trace("_grabber set up"); // Get the video window IVideoWindow wnd = (IVideoWindow)_graph; wnd.put_AutoShow(false); wnd = null; // Create the control and run _control = (IMediaControl)_graph; _control.Run(); log.Trace("control runs"); // Wait for the stop signal //while (!_stopSignal.WaitOne(0, true)) //{ // Thread.Sleep(10); //} } }catch (Exception ex) { // Trace log.Debug(ex); Release(); } }
public void SetUpForTs(ISampleGrabberCB grabber, int methodToCall) { FilterGraphTools.DisconnectPins(mpeg2Demux); //FilterGraphTools.DisconnectPins(demodulator); FilterGraphTools.DisconnectPins(audioRenderer); FilterGraphTools.DisconnectPins(videoRenderer); //graphBuilder.RemoveFilter(audioRenderer); //graphBuilder.RemoveFilter(videoRenderer); sampleGrabber = (ISampleGrabber)new SampleGrabber(); AMMediaType media = new AMMediaType(); media.majorType = MediaType.Stream; media.subType = MediaSubType.Mpeg2Transport; media.formatType = FormatType.MpegStreams; sampleGrabber.SetOneShot(false); sampleGrabber.SetBufferSamples(true); int hr = sampleGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "Sample Grabber"); nullRenderer = (IBaseFilter)new NullRenderer(); graphBuilder.AddFilter(nullRenderer, "NULL Renderer"); IPin pinIn = DsFindPin.ByName((IBaseFilter)sampleGrabber, "Input"); IPin pinOut = DsFindPin.ByDirection(capture, PinDirection.Output, 0); IEnumMediaTypes eMedia; pinOut.EnumMediaTypes(out eMedia); AMMediaType[] mediaTypes = new AMMediaType[1]; eMedia.Next(mediaTypes.Length, mediaTypes, IntPtr.Zero); hr = sampleGrabber.SetMediaType(mediaTypes[0]); DsError.ThrowExceptionForHR(hr); pinOut.Disconnect(); PinInfo info; pinOut.QueryPinInfo(out info); hr = graphBuilder.ConnectDirect(pinOut, pinIn, mediaTypes[0]); //hr = graphBuilder.Connect(pinOut, pinIn); DsError.ThrowExceptionForHR(hr); // Release the Pin Marshal.ReleaseComObject(pinIn); pinIn = DsFindPin.ByName(nullRenderer, "In"); pinOut = DsFindPin.ByName((IBaseFilter)sampleGrabber, "Output"); hr = graphBuilder.Connect(pinOut, pinIn); DsError.ThrowExceptionForHR(hr); sampleGrabber.SetCallback(grabber, methodToCall); // Release the Pin Marshal.ReleaseComObject(pinIn); pinIn = null; }
/// <summary> /// Open a new video feed (either web-cam or video file). /// </summary> /// <param name="filter">Specifies the web-cam filter to use, or <i>null</i> when opening a video file.</param> /// <param name="pb">Specifies the output window, or <i>null</i> when running headless and only receiving snapshots.</param> /// <param name="strFile">Specifies the video file to use, or <i>null</i> when opening a web-cam feed.</param> /// <param name="vidCap">Optionally specifies the video capabilities to use, or <i>null</i> to ignore and use the default video capabilities.</param> /// <returns>The duration (if any) is returned, or 0.</returns> /// <remarks>To get the video capabilities see the GetVideoCapatiblities method.</remarks> public long Open(Filter filter, PictureBox pb, string strFile, VideoCapability vidCap = null) { int hr; if (filter != null && strFile != null) { throw new ArgumentException("Both the filter and file are non NULL - only one of these can be used at a time; The filter is used with the web-cam and the file is used with a video file."); } m_selectedFilter = filter; m_graphBuilder = (IFilterGraph2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); // When using a web-cam, create the moniker for the filter and add the filter to the graph. if (strFile == null) { IMoniker moniker = m_selectedFilter.CreateMoniker(); m_graphBuilder.AddSourceFilterForMoniker(moniker, null, m_selectedFilter.Name, out m_camFilter); Marshal.ReleaseComObject(moniker); m_camControl = m_camFilter as IAMCameraControl; // Create the capture builder used to build the web-cam filter graph. m_captureGraphBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2, true)); hr = m_captureGraphBuilder.SetFiltergraph(m_graphBuilder as IGraphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Add the web-cam filter to the graph. hr = m_graphBuilder.AddFilter(m_camFilter, m_selectedFilter.Name); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Set the desired video capabilities. if (vidCap != null) { setVideoCapabilities(m_captureGraphBuilder, m_camFilter, vidCap); } } else { // Build the graph with the video file. hr = m_graphBuilder.RenderFile(strFile, null); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } m_mediaSeek = m_graphBuilder as IMediaSeeking; if (pb != null) { m_videoFrameStep = m_graphBuilder as IVideoFrameStep; } } // Create the sample grabber used to get snapshots. m_sampleGrabber = (ISampleGrabber)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber, true)); m_baseGrabFilter = m_sampleGrabber as IBaseFilter; m_mediaControl = m_graphBuilder as IMediaControl; // When using a target window, get the video window used with the target output window if (pb != null) { m_mediaEventEx = m_graphBuilder as IMediaEventEx; m_videoWindow = m_graphBuilder as IVideoWindow; } // Otherwise create the null renderer for no video output is needed (only snapshots). else { m_nullRenderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.NullRenderer, true)); } // Add the sample grabber to the filter graph. hr = m_graphBuilder.AddFilter(m_baseGrabFilter, "Ds.Lib Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Turn off the sample grabber buffers. hr = m_sampleGrabber.SetBufferSamples(false); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Turn off the sample grabber one-shot. hr = m_sampleGrabber.SetOneShot(false); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Turn ON the sample grabber callback where video data is to be received. hr = m_sampleGrabber.SetCallback(this, 1); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Set the media format used by the sample grabber. AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = m_sampleGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Connect the WebCam Filters and Frame Grabber. if (m_selectedFilter != null) { Guid cat; Guid med; cat = PinCategory.Preview; med = MediaType.Video; hr = m_captureGraphBuilder.RenderStream(ref cat, ref med, m_camFilter, null, null); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } cat = PinCategory.Capture; med = MediaType.Video; hr = m_captureGraphBuilder.RenderStream(ref cat, ref med, m_camFilter, null, m_baseGrabFilter); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Connect the Frame Grabber and (optionally the Null Renderer) else { // Get the video decoder and its pins. m_videoFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Video Decoder", false); IPin pOutput; hr = Utility.GetPin(m_videoFilter, PinDirection.Output, out pOutput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IPin pInput; hr = pOutput.ConnectedTo(out pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } PinInfo pinInfo; hr = pInput.QueryPinInfo(out pinInfo); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the sample grabber pins. IPin pGrabInput; hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Input, out pGrabInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IPin pGrabOutput; hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Output, out pGrabOutput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Disconnect the source filter output and the input it is connected to. hr = pOutput.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = pInput.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Connect the source output to the Grabber input. hr = m_graphBuilder.Connect(pOutput, pGrabInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // When rendering video output, connect the Grabber output to the original downstream input that the source was connected to. if (m_nullRenderer == null) { hr = m_graphBuilder.Connect(pGrabOutput, pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } Marshal.ReleaseComObject(pOutput); Marshal.ReleaseComObject(pInput); Marshal.ReleaseComObject(pGrabInput); Marshal.ReleaseComObject(pGrabOutput); } // Remove sound filters. IBaseFilter soundFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Audio Decoder", false); if (soundFilter != null) { hr = m_graphBuilder.RemoveFilter(soundFilter); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(soundFilter); } soundFilter = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Sound", false); if (soundFilter != null) { hr = m_graphBuilder.RemoveFilter(soundFilter); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(soundFilter); } // When using a headless (no video rendering) setup, connect the null renderer to the Sample Grabber. if (m_nullRenderer != null) { // Add the null renderer. hr = m_graphBuilder.AddFilter(m_nullRenderer, "Null Renderer"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the sample grabber output pin. IPin pGrabOutput; hr = Utility.GetPin(m_baseGrabFilter, PinDirection.Output, out pGrabOutput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the null renderer input pin. IPin pInput; hr = Utility.GetPin(m_nullRenderer, PinDirection.Input, out pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Disconnect the sample grabber pin. hr = pGrabOutput.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Connect the Grabber output to the null renderer. hr = m_graphBuilder.Connect(pGrabOutput, pInput); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(pInput); Marshal.ReleaseComObject(pGrabOutput); // Remove the Video Renderer for it is no longer needed. IBaseFilter ivideorender = Utility.GetFilter(m_graphBuilder as IGraphBuilder, "Video Renderer"); if (ivideorender != null) { m_graphBuilder.RemoveFilter(ivideorender); Marshal.ReleaseComObject(ivideorender); } } // Get the sample grabber media settings and video header. media = new AMMediaType(); hr = m_sampleGrabber.GetConnectedMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if ((media.formatType != FormatType.VideoInfo && media.formatType != FormatType.WaveEx && media.formatType != FormatType.MpegVideo) || media.formatPtr == IntPtr.Zero) { throw new Exception("Media grabber format is unknown."); } // Get the video header with frame sizing information. m_videoInfoHeader = Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)) as VideoInfoHeader; Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; // If we are rendering video output, setup the video window (which requires a message pump). if (m_videoWindow != null) { // setup the video window hr = m_videoWindow.put_Owner(pb.Handle); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = m_videoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // resize the window hr = m_videoWindow.SetWindowPosition(0, 0, pb.Width, pb.Height); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = m_videoWindow.put_Visible(DsHlp.OATRUE); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Subscribe to the picturebox size changed event. pb.SizeChanged += Pb_SizeChanged; } // start the capturing hr = m_mediaControl.Run(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // When using a video file, immediately stop at the start. if (strFile != null) { hr = m_mediaControl.Pause(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // When using a media file, we need to save the video file's duration. if (m_mediaSeek != null) { hr = m_mediaSeek.GetDuration(out m_lDuration); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } m_bConnected = true; return(m_lDuration); }
// Thread entry point public void WorkerThread() { // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object sourceObj = null; object grabberObj = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sg = null; IMediaControl mc = null; try { // Get type for filter graph Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (srvType == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObj = Activator.CreateInstance(srvType); graph = (IGraphBuilder)graphObj; // ---- UCOMIBindCtx bindCtx = null; UCOMIMoniker moniker = null; int n = 0; // create bind context if (Win32.CreateBindCtx(0, out bindCtx) == 0) { // convert moniker`s string to a moniker if (Win32.MkParseDisplayName(bindCtx, source, ref n, out moniker) == 0) { // get device base filter Guid filterId = typeof(IBaseFilter).GUID; moniker.BindToObject(null, null, ref filterId, out sourceObj); Marshal.ReleaseComObject(moniker); moniker = null; } Marshal.ReleaseComObject(bindCtx); bindCtx = null; } // ---- if (sourceObj == null) { throw new ApplicationException("Failed creating device object for moniker"); } sourceBase = (IBaseFilter)sourceObj; // Get type for sample grabber srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (srvType == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObj = Activator.CreateInstance(srvType); sg = (ISampleGrabber)grabberObj; grabberBase = (IBaseFilter)grabberObj; // add source filter to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // connect pins if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // get media type if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mt.Dispose(); } // render graph.Render(DSTools.GetOutPin(grabberBase, 0)); // sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); // window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(false); win = null; // get media control mc = (IMediaControl)graphObj; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { System.Diagnostics.Debug.WriteLine("----: " + e.Message); } // finalization block finally { // release all objects mc = null; graph = null; sourceBase = null; grabberBase = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (sourceObj != null) { Marshal.ReleaseComObject(sourceObj); sourceObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } }
// Thread entry point public void WorkerThread() { bool failed = false; // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object sourceObj = null; object grabberObj = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sg = null; IFileSourceFilter fileSource = null; IMediaControl mc = null; IMediaEventEx mediaEvent = null; int code, param1, param2; while ((!failed) && (!stopEvent.WaitOne(0, true))) { try { // Get type for filter graph Type srvType = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (srvType == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObj = Activator.CreateInstance(srvType); graph = (IGraphBuilder)graphObj; // Get type for windows media source filter srvType = Type.GetTypeFromCLSID(Clsid.WindowsMediaSource); if (srvType == null) { throw new ApplicationException("Failed creating WM source"); } // create windows media source filter sourceObj = Activator.CreateInstance(srvType); sourceBase = (IBaseFilter)sourceObj; // Get type for sample grabber srvType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (srvType == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObj = Activator.CreateInstance(srvType); sg = (ISampleGrabber)grabberObj; grabberBase = (IBaseFilter)grabberObj; // add source filter to graph graph.AddFilter(sourceBase, "source"); graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // load file fileSource = (IFileSourceFilter)sourceObj; fileSource.Load(this.source, null); // connect pins if (graph.Connect(DSTools.GetOutPin(sourceBase, 0), DSTools.GetInPin(grabberBase, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // get media type if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mt.Dispose(); } // render graph.Render(DSTools.GetOutPin(grabberBase, 0)); // sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); // window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(false); win = null; // get events interface mediaEvent = (IMediaEventEx)graphObj; // get media control mc = (IMediaControl)graphObj; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); // get an event if (mediaEvent.GetEvent(out code, out param1, out param2, 0) == 0) { // release params mediaEvent.FreeEventParams(code, param1, param2); // if (code == (int)EventCode.Complete) { break; } } } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { System.Diagnostics.Debug.WriteLine("----: " + e.Message); failed = true; } // finalization block finally { // release all objects mediaEvent = null; mc = null; fileSource = null; graph = null; sourceBase = null; grabberBase = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (sourceObj != null) { Marshal.ReleaseComObject(sourceObj); sourceObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } } }
/// <summary> build the capture graph for grabber. </summary> private bool SetupGraph() { const int WS_CHILD = 0x40000000; const int WS_CLIPCHILDREN = 0x02000000; const int WS_CLIPSIBLINGS = 0x04000000; int hr; hr = _capGraphBuilder2.SetFiltergraph(_graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = _graphBuilder.AddFilter(_capFilter, "Ds.NET Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // will thow up user input for quality //DsUtils.ShowCapPinDialog(_capGraphBuilder2, _capFilter, IntPtr.Zero); AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; // hr = _sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = _graphBuilder.AddFilter(_baseGrabFilter, "Ds.NET Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Guid cat; Guid med; cat = PinCategory.Capture; med = MediaType.Video; hr = _capGraphBuilder2.RenderStream(ref cat, ref med, _capFilter, null, _baseGrabFilter); // _baseGrabFilter media = new AMMediaType(); hr = _sampGrabber.GetConnectedMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } _videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = _sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = _sampGrabber.SetOneShot(false); } if (hr == 0) { hr = _sampGrabber.SetCallback(null, 0); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Render preview (video -> renderer) hr = _capGraphBuilder2.RenderStream(PinCategory.Preview, ref med, _capFilter, null, null); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Get the IVideoWindow interface _videoWindow = (IVideoWindow)_graphBuilder; // Set the video window to be a child of the main window hr = _videoWindow.put_Owner(this._viewControl.Handle); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Set video window style hr = _videoWindow.put_WindowStyle(WS_CHILD | WS_CLIPCHILDREN | WS_CLIPSIBLINGS); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Position video window in client rect of owner window _viewControl.Resize += new EventHandler(onPreviewWindowResize); onPreviewWindowResize(this, null); //Make the video window visible, now that it is properly positioned hr = _videoWindow.put_Visible(DsHlp.OATRUE); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = _mediaCtrl.Run(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } return(true); }
/// <summary> build the capture graph for grabber. </summary> bool SetupGraph() { int hr; try { hr = capGraph.SetFiltergraph(graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.AddFilter(capFilter, " Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } DsUtils.ShowCapPinDialog(capGraph, capFilter, this.Handle); AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; // ??? hr = sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.AddFilter(baseGrabFlt, " Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Guid cat = PinCategory.Preview; Guid med = MediaType.Video; hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, null); // baseGrabFlt if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } cat = PinCategory.Capture; med = MediaType.Video; hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // baseGrabFlt if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = sampGrabber.SetOneShot(false); } if (hr == 0) { hr = sampGrabber.SetCallback(null, 0); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } return(true); } catch (Exception ee) { MessageBox.Show(this, "Could not setup graph\r\n" + ee.Message, "Error", MessageBoxButtons.OK, MessageBoxIcon.Stop); return(false); } }
/* protected void InitAudioSampleGrabber() { // Get the graph builder IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder); if (graphBuilder == null) return; try { // Build the sample grabber sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true)) as ISampleGrabber; if (sampleGrabber == null) return; // Add it to the filter graph int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber"); DsError.ThrowExceptionForHR(hr); AMMediaType mtAudio = new AMMediaType(); mtAudio.majorType = MediaType.Audio; mtAudio.subType = MediaSubType.PCM; mtAudio.formatPtr = IntPtr.Zero; _actualAudioFormat = null; hr = sampleGrabber.SetMediaType(mtAudio); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetOneShot(false); DsError.ThrowExceptionForHR(hr); hr = sampleGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); sampleAnalyzerMustStop.Reset(); sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop)); sampleAnalyzerThread.Priority = ThreadPriority.Highest; sampleAnalyzerThread.Start(); } catch(Exception ex) { Logger.LogException(ex); } rotEntry = new DsROTEntry(graphBuilder as IFilterGraph); }*/ protected void InitAudioSampleGrabber_v2() { // Get the graph builder IGraphBuilder graphBuilder = (mediaControl as IGraphBuilder); if (graphBuilder == null) return; try { // Build the sample grabber sampleGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(Filters.SampleGrabber, true)) as ISampleGrabber; if (sampleGrabber == null) return; // Add it to the filter graph int hr = graphBuilder.AddFilter(sampleGrabber as IBaseFilter, "ProTONE_SampleGrabber_v2"); DsError.ThrowExceptionForHR(hr); IBaseFilter ffdAudioDecoder = null; IPin ffdAudioDecoderOutput = null; IPin soundDeviceInput = null; IPin sampleGrabberInput = null; IPin sampleGrabberOutput = null; IntPtr pSoundDeviceInput = IntPtr.Zero; // When using FFDShow, typically we'll find // a ffdshow Audio Decoder connected to the sound device filter // // i.e. [ffdshow Audio Decoder] --> [DirectSound Device] // // Our audio sample grabber supports only PCM sample input and output. // Its entire processing is based on this assumption. // // Thus need to insert the audio sample grabber between the ffdshow Audio Decoder and the sound device // because this is the only place where we can find PCM samples. The sound device only accepts PCM. // // So we need to turn this graph: // // .. -->[ffdshow Audio Decoder]-->[DirectSound Device] // // into this: // // .. -->[ffdshow Audio Decoder]-->[Sample grabber]-->[DirectSound Device] // // Actions to do to achieve the graph change: // // 1. Locate the ffdshow Audio Decoder in the graph // 2. Find its output pin and the pin that it's connected to // 3. Locate the input and output pins of sample grabber // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin) // 5. Connect the ffdshow Audio Decoder to sample grabber input // 6. Connect the sample grabber output to sound device input // that's all. // -------------- // 1. Locate the ffdshow Audio Decoder in the graph hr = graphBuilder.FindFilterByName("ffdshow Audio Decoder", out ffdAudioDecoder); DsError.ThrowExceptionForHR(hr); // 2. Find its output pin and the pin that it's connected to hr = ffdAudioDecoder.FindPin("Out", out ffdAudioDecoderOutput); DsError.ThrowExceptionForHR(hr); hr = ffdAudioDecoderOutput.ConnectedTo(out pSoundDeviceInput); DsError.ThrowExceptionForHR(hr); soundDeviceInput = new DSPin(pSoundDeviceInput).Value; // 3. Locate the input and output pins of sample grabber hr = (sampleGrabber as IBaseFilter).FindPin("In", out sampleGrabberInput); DsError.ThrowExceptionForHR(hr); hr = (sampleGrabber as IBaseFilter).FindPin("Out", out sampleGrabberOutput); DsError.ThrowExceptionForHR(hr); // 4. Disconnect the ffdshow Audio Decoder and its correspondent (sound device input pin) hr = ffdAudioDecoderOutput.Disconnect(); DsError.ThrowExceptionForHR(hr); hr = soundDeviceInput.Disconnect(); DsError.ThrowExceptionForHR(hr); // 5. Connect the ffdshow Audio Decoder to sample grabber input hr = graphBuilder.Connect(ffdAudioDecoderOutput, sampleGrabberInput); DsError.ThrowExceptionForHR(hr); // 6. Connect the sample grabber output to sound device input hr = graphBuilder.Connect(sampleGrabberOutput, soundDeviceInput); DsError.ThrowExceptionForHR(hr); AMMediaType mtAudio = new AMMediaType(); mtAudio.majorType = MediaType.Audio; mtAudio.subType = MediaSubType.PCM; mtAudio.formatPtr = IntPtr.Zero; _actualAudioFormat = null; sampleGrabber.SetMediaType(mtAudio); sampleGrabber.SetBufferSamples(true); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(this, 1); sampleAnalyzerMustStop.Reset(); sampleAnalyzerThread = new Thread(new ThreadStart(SampleAnalyzerLoop)); sampleAnalyzerThread.Priority = ThreadPriority.Highest; sampleAnalyzerThread.Start(); } catch (Exception ex) { Logger.LogException(ex); } rotEntry = new DsROTEntry(graphBuilder as IFilterGraph); }
void SetupPlaybackGraph(string fname) { int hr; try { hr = graphBuilder.RenderFile(fname, null); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; // ??? hr = sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.AddFilter(smartTee, "smartTee"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IBaseFilter renderer; hr = graphBuilder.FindFilterByName("Video Renderer", out renderer); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IPin inPin; IPin srcPin; hr = DsUtils.GetPin(renderer, PinDirection.Input, out inPin, 0); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = inPin.ConnectedTo(out srcPin); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = srcPin.Disconnect(); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.RemoveFilter(renderer); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(renderer); Marshal.ReleaseComObject(inPin); hr = DsUtils.GetPin(smartTee, PinDirection.Input, out inPin, 0); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.Connect(srcPin, inPin); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(srcPin); Marshal.ReleaseComObject(inPin); srcPin = inPin = null; hr = DsUtils.GetPin(smartTee, PinDirection.Output, out srcPin, 1); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // grabber Input hr = DsUtils.GetPin(baseGrabFlt, PinDirection.Input, out inPin, 0); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // smartTee -> grabber hr = graphBuilder.Connect(srcPin, inPin); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(srcPin); Marshal.ReleaseComObject(inPin); srcPin = inPin = null; if (preview) { // grabber Input hr = DsUtils.GetPin(smartTee, PinDirection.Output, out srcPin, 0); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.Render(srcPin); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Marshal.ReleaseComObject(srcPin); srcPin = null; } media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; //Modified according to the platform SDK, to capture the buffer hr = sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = sampGrabber.SetOneShot(false); } if (hr == 0) { hr = sampGrabber.SetCallback(sampleGrabber, 1); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } catch (Exception ee) { throw new Exception("Could not setup graph\r\n" + ee.Message); } }
/// <summary> Set the options on the sample grabber </summary> private void ConfigureSampleGrabber(ISampleGrabber sampGrabber, int width, int height) { int hr; AMMediaType media = new AMMediaType(); //VideoInfoHeader v; // copy out the videoinfoheader //v = new VideoInfoHeader(); //Marshal.PtrToStructure(media.formatPtr, v); //// Set the size //v.BmiHeader.Width = width; //v.BmiHeader.Height = height; // Copy the media structure back //Marshal.StructureToPtr(v, media.formatPtr, false); // Set the media type to Video/RBG24 media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; hr = sampGrabber.SetMediaType(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; hr = sampGrabber.SetBufferSamples(false); hr = sampGrabber.SetOneShot(false); // Configure the samplegrabber callback hr = sampGrabber.SetCallback(this, 1); DsError.ThrowExceptionForHR(hr); }
/// <summary> /// Create a new filter graph and add filters (devices, compressors, misc), /// but leave the filters unconnected. Call RenderGraph() /// to connect the filters. /// </summary> void CreateGraph() { //Skip if already created if ((int)_actualGraphState < (int)GraphState.Created) { // Make a new filter graph _graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Uuid.Clsid.FilterGraph, true)); // Get the Capture Graph Builder var clsid = Uuid.Clsid.CaptureGraphBuilder2; var riid = typeof(ICaptureGraphBuilder2).GUID; _captureGraphBuilder = (ICaptureGraphBuilder2)Workaround.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph var hr = _captureGraphBuilder.SetFiltergraph(_graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } var comType = Type.GetTypeFromCLSID(Uuid.Clsid.SampleGrabber); if (comType == null) { throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!"); } var comObj = Activator.CreateInstance(comType); _sampGrabber = (ISampleGrabber)comObj; _baseGrabFlt = (IBaseFilter)_sampGrabber; var media = new AMMediaType(); // Get the video device and add it to the filter graph if (VideoDevice != null) { _videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString); hr = _graphBuilder.AddFilter(_videoDeviceFilter, "Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } media.majorType = Uuid.MediaType.Video; media.subType = Uuid.MediaSubType.Rgb32; //RGB24; media.formatType = Uuid.FormatType.VideoInfo; media.temporalCompression = true; //New hr = _sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = _graphBuilder.AddFilter(_baseGrabFlt, "Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type var cat = Uuid.PinCategory.Capture; var med = Uuid.MediaType.Interleaved; var iid = typeof(IAMStreamConfig).GUID; hr = _captureGraphBuilder.FindInterface(ref cat, ref med, _videoDeviceFilter, ref iid, out object o); if (hr != 0) { // If not found, try looking for a video media type med = Uuid.MediaType.Video; hr = _captureGraphBuilder.FindInterface(ref cat, ref med, _videoDeviceFilter, ref iid, out o); if (hr != 0) { o = null; } } //VideoStreamConfig = o as IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) _mediaControl = (IMediaControl)_graphBuilder; // Reload any video crossbars //if (videoSources != null) videoSources.Dispose(); videoSources = null; _videoInfoHeader = Marshal.PtrToStructure <VideoInfoHeader>(media.formatPtr); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = _sampGrabber.SetBufferSamples(true); if (hr == 0) { hr = _sampGrabber.SetOneShot(false); } if (hr == 0) { hr = _sampGrabber.SetCallback(null, 0); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Update the state now that we are done _actualGraphState = GraphState.Created; }
// Thread entry point public void WorkerThread() { int hr; Guid cat; Guid med; // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object grabberObj = null; // interfaces IGraphBuilder graphBuilder = null; ICaptureGraphBuilder2 captureGraphBuilder = null; IBaseFilter videoDeviceFilter = null; IBaseFilter grabberFilter = null; ISampleGrabber sg = null; IMediaControl mc = null; try { // Make a new filter graph graphObj = Activator.CreateInstance( Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); graphBuilder = (IGraphBuilder)graphObj; // Get the Capture Graph Builder Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; captureGraphBuilder = (ICaptureGraphBuilder2) TempFix.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph(graphBuilder); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Get the video device and add it to the filter graph if (source != null) { videoDeviceFilter = (IBaseFilter) Marshal.BindToMoniker(source); hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // create sample grabber, object and filter grabberObj = Activator.CreateInstance( Type.GetTypeFromCLSID(Clsid.SampleGrabber, true)); grabberFilter = (IBaseFilter)grabberObj; sg = (ISampleGrabber)grabberObj; // add sample grabber filter to filter graph hr = graphBuilder.AddFilter(grabberFilter, "grabber"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Try looking for an video device interleaved media type IBaseFilter testFilter = videoDeviceFilter; // grabberFilter (not supported) object o; cat = PinCategory.Capture; med = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, testFilter, ref iid, out o); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, testFilter, ref iid, out o); if (hr != 0) o = null; } // Set the video stream configuration to data member videoStreamConfig = o as IAMStreamConfig; o = null; // Experimental testing: Try to set the Frame Size & Rate // Results: When enabled, the grabber video breaks up into // several duplicate frames (6 frames) bool bdebug = true; if (bdebug) { BitmapInfoHeader bmiHeader; bmiHeader = (BitmapInfoHeader) getStreamConfigSetting(videoStreamConfig, "BmiHeader"); bmiHeader.Width = framesize.Width; bmiHeader.Height = framesize.Height; setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader); long avgTimePerFrame = (long)(10000000 / framerate); setStreamConfigSetting(videoStreamConfig, "AvgTimePerFrame", avgTimePerFrame); } // connect pins (Turns on the video device) if (graphBuilder.Connect(DSTools.GetOutPin( videoDeviceFilter, 0), DSTools.GetInPin(grabberFilter, 0)) < 0) throw new ApplicationException( "Failed connecting filters"); // Set the sample grabber media type settings AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // get media type if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); System.Diagnostics.Debug.WriteLine("width = " + vih.BmiHeader.Width + ", height = " + vih.BmiHeader.Height); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mt.Dispose(); } // render graphBuilder.Render(DSTools.GetOutPin(grabberFilter, 0)); // Set various sample grabber properties sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); // Do not show active (source) window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(false); win = null; // get media control mc = (IMediaControl)graphObj; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { System.Diagnostics.Debug.WriteLine("----: " + e.Message); } // finalization block finally { // release all objects mc = null; graphBuilder = null; captureGraphBuilder = null; videoDeviceFilter = null; grabberFilter = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } }
/// <summary> /// Create a new filter graph and add filters (devices, compressors, misc), /// but leave the filters unconnected. Call RenderGraph() /// to connect the filters. /// </summary> void CreateGraph() { //Skip if already created if ((int)_actualGraphState < (int)GraphState.Created) { // Make a new filter graph _graphBuilder = (IGraphBuilder) new FilterGraph(); // Get the Capture Graph Builder _captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Link the CaptureGraphBuilder to the filter graph var hr = _captureGraphBuilder.SetFiltergraph(_graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } var comObj = new SampleGrabber(); _sampGrabber = (ISampleGrabber)comObj; _baseGrabFlt = (IBaseFilter)_sampGrabber; var media = new AMMediaType(); // Get the video device and add it to the filter graph if (VideoDevice != null) { _videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString); hr = _graphBuilder.AddFilter(_videoDeviceFilter, "Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } media.majorType = MediaType.Video; media.subType = MediaSubType.RGB32; //RGB24; media.formatType = FormatType.VideoInfo; media.temporalCompression = true; //New hr = _sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = _graphBuilder.AddFilter(_baseGrabFlt, "Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type var cat = PinCategory.Capture; var med = MediaType.Interleaved; var iid = typeof(IAMStreamConfig).GUID; hr = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out var o); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; hr = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out o); if (hr != 0) { // ReSharper disable once RedundantAssignment o = null; } } //VideoStreamConfig = o as IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) _mediaControl = (IMediaControl)_graphBuilder; // Reload any video crossbars //if (videoSources != null) videoSources.Dispose(); videoSources = null; _videoInfoHeader = Marshal.PtrToStructure <VideoInfoHeader>(media.formatPtr); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = _sampGrabber.SetBufferSamples(true); if (hr == 0) { hr = _sampGrabber.SetOneShot(false); } if (hr == 0) { hr = _sampGrabber.SetCallback(null, 0); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Update the state now that we are done _actualGraphState = GraphState.Created; }
/// <summary> /// Connects to the property changed events of the camera settings. /// </summary> //private void Initialize() //{ // //Settings.Instance.Camera.OnCameraControlPropertyChanged += OnCameraControlPropertyChanged; // //Settings.Instance.Camera.OnVideoProcAmpPropertyChanged += OnVideoProcAmpPropertyChanged; // //Settings.Instance.Camera.OnVideoControlFlagsChanged += OnVideoControlFlagsChanged; // //stopwatch = new Stopwatch(); //} /// <summary> /// Build the capture graph for grabber. /// </summary> /// <param name="dev">The index of the new capture device.</param> /// <param name="frameRate">The framerate to use.</param> /// <param name="width">The width to use.</param> /// <param name="height">The height to use.</param> /// <returns>True, if successful, otherwise false.</returns> private bool SetupGraph(DsDevice dev, int frameRate, int width, int height) { int hr; fps = frameRate; // Not measured, only to expose FPS externally cameraControl = null; capFilter = null; // Get the graphbuilder object graphBuilder = (IFilterGraph2)new FilterGraph(); mediaControl = graphBuilder as IMediaControl; try { // Create the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); // Create the SampleGrabber interface sampGrabber = (ISampleGrabber)new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(graphBuilder); //if (hr != 0) // ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " + // DsError.GetErrorText(hr)); #if DEBUG this.rotEntry = new DsROTEntry(this.graphBuilder); #endif this.capFilter = CreateFilter( FilterCategory.VideoInputDevice, dev.Name); if (this.capFilter != null) { hr = graphBuilder.AddFilter(this.capFilter, "Video Source"); DsError.ThrowExceptionForHR(hr); } //// Add the video device //hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); //if (hr != 0) // ErrorLogger.WriteLine( // "Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " + // DsError.GetErrorText(hr)); var baseGrabFlt = (IBaseFilter)sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); //if (hr != 0) // ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " + // DsError.GetErrorText(hr)); // turn on the infrared leds ONLY FOR THE GENIUS WEBCAM /* if (!defaultMode) { m_icc = capFilter as IAMCameraControl; CameraControlFlags CamFlags = new CameraControlFlags(); int pMin, pMax, pStep, pDefault; hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags); m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None); } */ //IBaseFilter smartTee = new SmartTee() as IBaseFilter; //// Add the smart tee filter to the graph //hr = this.graphBuilder.AddFilter(smartTee, "Smart Tee"); //Marshal.ThrowExceptionForHR(hr); // Connect the video source output to the smart tee //hr = capGraph.RenderStream(null, null, capFilter, null, smartTee); hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt); var errorText = DsError.GetErrorText(hr); cameraControl = capFilter as IAMCameraControl; // Set videoProcAmp object obj; var iid_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770"); DirectShowDevices.Instance.Cameras[deviceNumber].DirectshowDevice.Mon.BindToObject( null, null, ref iid_IBaseFilter, out obj); videoProcAmp = obj as IAMVideoProcAmp; // If any of the default config items are set if (frameRate + height + width > 0) SetConfigParms(capGraph, capFilter, frameRate, width, height); // Check for successful rendering, if this failed the class cannot be used, so dispose the resources and return false. if (hr < 0) { Cleanup(); return false; } else { // Otherwise update the SampleGrabber. SaveSizeInfo(sampGrabber); hr = sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = sampGrabber.SetOneShot(false); hr = sampGrabber.SetCallback(this, 1); } //if (hr < 0) // ErrorLogger.WriteLine("Could not set callback function (SetupGraph) in Camera.Capture()"); } } catch (Exception) { //ErrorLogger.ProcessException(ex, false); Cleanup(); return false; } return true; }
private void WorkerThread(bool runGraph) { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; bool isSapshotSupported = false; Grabber videoGrabber = new Grabber(this, false); Grabber snapshotGrabber = new Grabber(this, true); object captureGraphObject = null; object graphObject = null; object videoGrabberObject = null; object snapshotGrabberObject = null; object crossbarObject = null; ICaptureGraphBuilder2 captureGraph = null; IFilterGraph2 graph = null; IBaseFilter sourceBase = null; IBaseFilter videoGrabberBase = null; IBaseFilter snapshotGrabberBase = null; ISampleGrabber videoSampleGrabber = null; ISampleGrabber snapshotSampleGrabber = null; IMediaControl mediaControl = null; IAMVideoControl videoControl = null; IMediaEventEx mediaEvent = null; IPin pinStillImage = null; IAMCrossbar crossbar = null; try { Type type = Type.GetTypeFromCLSID(Clsid.CaptureGraphBuilder2); if (type == null) { throw new ApplicationException("Failed creating capture graph builder"); } captureGraphObject = Activator.CreateInstance(type); captureGraph = (ICaptureGraphBuilder2)captureGraphObject; type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } graphObject = Activator.CreateInstance(type); graph = (IFilterGraph2)graphObject; captureGraph.SetFiltergraph((IGraphBuilder)graph); sourceObject = FilterInfo.CreateFilter(deviceMoniker); if (sourceObject == null) { throw new ApplicationException("Failed creating device object for moniker"); } sourceBase = (IBaseFilter)sourceObject; try { videoControl = (IAMVideoControl)sourceObject; } catch { } type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } videoGrabberObject = Activator.CreateInstance(type); videoSampleGrabber = (ISampleGrabber)videoGrabberObject; videoGrabberBase = (IBaseFilter)videoGrabberObject; snapshotGrabberObject = Activator.CreateInstance(type); snapshotSampleGrabber = (ISampleGrabber)snapshotGrabberObject; snapshotGrabberBase = (IBaseFilter)snapshotGrabberObject; graph.AddFilter(sourceBase, "source"); graph.AddFilter(videoGrabberBase, "grabber_video"); graph.AddFilter(snapshotGrabberBase, "grabber_snapshot"); AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; videoSampleGrabber.SetMediaType(mediaType); snapshotSampleGrabber.SetMediaType(mediaType); captureGraph.FindInterface(FindDirection.UpstreamOnly, Guid.Empty, sourceBase, typeof(IAMCrossbar).GUID, out crossbarObject); if (crossbarObject != null) { crossbar = (IAMCrossbar)crossbarObject; } isCrossbarAvailable = (crossbar != null); crossbarVideoInputs = ColletCrossbarVideoInputs(crossbar); if (videoControl != null) { captureGraph.FindPin(sourceObject, PinDirection.Output, PinCategory.StillImage, MediaType.Video, false, 0, out pinStillImage); if (pinStillImage != null) { VideoControlFlags caps; videoControl.GetCaps(pinStillImage, out caps); isSapshotSupported = ((caps & VideoControlFlags.ExternalTriggerEnable) != 0); } } videoSampleGrabber.SetBufferSamples(false); videoSampleGrabber.SetOneShot(false); videoSampleGrabber.SetCallback(videoGrabber, 1); snapshotSampleGrabber.SetBufferSamples(true); snapshotSampleGrabber.SetOneShot(false); snapshotSampleGrabber.SetCallback(snapshotGrabber, 1); GetPinCapabilitiesAndConfigureSizeAndRate(captureGraph, sourceBase, PinCategory.Capture, videoResolution, ref videoCapabilities); if (isSapshotSupported) { GetPinCapabilitiesAndConfigureSizeAndRate(captureGraph, sourceBase, PinCategory.StillImage, snapshotResolution, ref snapshotCapabilities); } else { snapshotCapabilities = new VideoCapabilities[0]; } lock ( cacheVideoCapabilities ) { if ((videoCapabilities != null) && (!cacheVideoCapabilities.ContainsKey(deviceMoniker))) { cacheVideoCapabilities.Add(deviceMoniker, videoCapabilities); } } lock ( cacheSnapshotCapabilities ) { if ((snapshotCapabilities != null) && (!cacheSnapshotCapabilities.ContainsKey(deviceMoniker))) { cacheSnapshotCapabilities.Add(deviceMoniker, snapshotCapabilities); } } if (runGraph) { captureGraph.RenderStream(PinCategory.Capture, MediaType.Video, sourceBase, null, videoGrabberBase); if (videoSampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); videoGrabber.Width = vih.BmiHeader.Width; videoGrabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } if ((isSapshotSupported) && (provideSnapshots)) { captureGraph.RenderStream(PinCategory.StillImage, MediaType.Video, sourceBase, null, snapshotGrabberBase); if (snapshotSampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); snapshotGrabber.Width = vih.BmiHeader.Width; snapshotGrabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } } mediaControl = (IMediaControl)graphObject; mediaEvent = (IMediaEventEx)graphObject; IntPtr p1, p2; DsEvCode code; mediaControl.Run( ); if ((isSapshotSupported) && (provideSnapshots)) { startTime = DateTime.Now; videoControl.SetMode(pinStillImage, VideoControlFlags.ExternalTriggerEnable); } do { if (mediaEvent != null) { if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0) { mediaEvent.FreeEventParams(code, p1, p2); if (code == DsEvCode.DeviceLost) { reasonToStop = ReasonToFinishPlaying.DeviceLost; break; } } } if (needToSetVideoInput) { needToSetVideoInput = false; if (isCrossbarAvailable.Value) { SetCurrentCrossbarInput(crossbar, crossbarVideoInput); crossbarVideoInput = GetCurrentCrossbarInput(crossbar); } } if (needToSimulateTrigger) { needToSimulateTrigger = false; if ((isSapshotSupported) && (provideSnapshots)) { videoControl.SetMode(pinStillImage, VideoControlFlags.Trigger); } } if (needToDisplayPropertyPage) { needToDisplayPropertyPage = false; DisplayPropertyPage(parentWindowForPropertyPage, sourceObject); if (crossbar != null) { crossbarVideoInput = GetCurrentCrossbarInput(crossbar); } } if (needToDisplayCrossBarPropertyPage) { needToDisplayCrossBarPropertyPage = false; if (crossbar != null) { DisplayPropertyPage(parentWindowForPropertyPage, crossbar); crossbarVideoInput = GetCurrentCrossbarInput(crossbar); } } }while (!stopEvent.WaitOne(100, false)); mediaControl.Stop( ); } } catch (Exception exception) { if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { captureGraph = null; graph = null; sourceBase = null; mediaControl = null; videoControl = null; mediaEvent = null; pinStillImage = null; crossbar = null; videoGrabberBase = null; snapshotGrabberBase = null; videoSampleGrabber = null; snapshotSampleGrabber = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceObject != null) { Marshal.ReleaseComObject(sourceObject); sourceObject = null; } if (videoGrabberObject != null) { Marshal.ReleaseComObject(videoGrabberObject); videoGrabberObject = null; } if (snapshotGrabberObject != null) { Marshal.ReleaseComObject(snapshotGrabberObject); snapshotGrabberObject = null; } if (captureGraphObject != null) { Marshal.ReleaseComObject(captureGraphObject); captureGraphObject = null; } if (crossbarObject != null) { Marshal.ReleaseComObject(crossbarObject); crossbarObject = null; } } if (PlayingFinished != null) { PlayingFinished(this, reasonToStop); } }
public void WorkerThread() { Grabber pCallback = new Grabber(this); object o = null; object ppvResult = null; object obj4 = null; IGraphBuilder builder = null; IBaseFilter pFilter = null; IBaseFilter filter2 = null; ISampleGrabber grabber2 = null; IMediaControl control = null; try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } o = Activator.CreateInstance(typeFromCLSID); builder = (IGraphBuilder)o; UCOMIBindCtx ppbc = null; UCOMIMoniker ppmk = null; int pchEaten = 0; if (Win32.CreateBindCtx(0, out ppbc) == 0) { if (Win32.MkParseDisplayName(ppbc, this.source, ref pchEaten, out ppmk) == 0) { Guid gUID = typeof(IBaseFilter).GUID; ppmk.BindToObject(null, null, ref gUID, out ppvResult); Marshal.ReleaseComObject(ppmk); ppmk = null; } Marshal.ReleaseComObject(ppbc); ppbc = null; } if (ppvResult == null) { throw new ApplicationException("Failed creating device object for moniker"); } pFilter = (IBaseFilter)ppvResult; typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj4 = Activator.CreateInstance(typeFromCLSID); grabber2 = (ISampleGrabber)obj4; filter2 = (IBaseFilter)obj4; builder.AddFilter(pFilter, "source"); builder.AddFilter(filter2, "grabber"); AMMediaType pmt = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB24 }; grabber2.SetMediaType(pmt); if (builder.Connect(DSTools.GetOutPin(pFilter, 0), DSTools.GetInPin(filter2, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } if (grabber2.GetConnectedMediaType(pmt) == 0) { VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.formatPtr, typeof(VideoInfoHeader)); pCallback.Width = header.BmiHeader.Width; pCallback.Height = header.BmiHeader.Height; pmt.Dispose(); } builder.Render(DSTools.GetOutPin(filter2, 0)); grabber2.SetBufferSamples(false); grabber2.SetOneShot(false); grabber2.SetCallback(pCallback, 1); ((IVideoWindow)o).put_AutoShow(false); control = (IMediaControl)o; control.Run(); while (!this.stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } control.StopWhenReady(); } catch (Exception) { } finally { control = null; builder = null; pFilter = null; filter2 = null; grabber2 = null; if (o != null) { Marshal.ReleaseComObject(o); o = null; } if (ppvResult != null) { Marshal.ReleaseComObject(ppvResult); ppvResult = null; } if (obj4 != null) { Marshal.ReleaseComObject(obj4); obj4 = null; } } }
/// <summary> build the capture graph. </summary> bool SetupGraph() { int hr; IBaseFilter mux = null; IFileSinkFilter sink = null; try { hr = capGraph.SetFiltergraph(graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; media.formatType = FormatType.VideoInfo; // ??? hr = sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } Guid cat = PinCategory.Preview; Guid med = MediaType.Video; hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, null); // preview if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } cat = PinCategory.Capture; med = MediaType.Video; hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // capture if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = sampGrabber.SetOneShot(false); } if (hr == 0) { hr = sampGrabber.SetCallback(null, 0); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } return(true); } catch (Exception ee) { MessageBox.Show("Could not setup graph\r\n" + ee.Message, "DirectShow.NET", MessageBoxButtons.OK, MessageBoxIcon.Stop); return(false); } finally { if (mux != null) { Marshal.ReleaseComObject(mux); } mux = null; if (sink != null) { Marshal.ReleaseComObject(sink); } sink = null; } }
/// <summary> /// Connects to the property changed events of the camera settings. /// </summary> //private void Initialize() //{ // //Settings.Instance.Camera.OnCameraControlPropertyChanged += OnCameraControlPropertyChanged; // //Settings.Instance.Camera.OnVideoProcAmpPropertyChanged += OnVideoProcAmpPropertyChanged; // //Settings.Instance.Camera.OnVideoControlFlagsChanged += OnVideoControlFlagsChanged; // //stopwatch = new Stopwatch(); //} /// <summary> /// Build the capture graph for grabber. /// </summary> /// <param name="dev">The index of the new capture device.</param> /// <param name="frameRate">The framerate to use.</param> /// <param name="width">The width to use.</param> /// <param name="height">The height to use.</param> /// <returns>True, if succesfull, otherwise false.</returns> private bool SetupGraph(DsDevice dev, int frameRate, int width, int height) { int hr; fps = frameRate; // Not measured, only to expose FPS externally cameraControl = null; capFilter = null; // Get the graphbuilder object graphBuilder = (IFilterGraph2) new FilterGraph(); mediaControl = graphBuilder as IMediaControl; try { // Create the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Create the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(graphBuilder); //if (hr != 0) // ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " + // DsError.GetErrorText(hr)); #if DEBUG this.rotEntry = new DsROTEntry(this.graphBuilder); #endif this.capFilter = CreateFilter( FilterCategory.VideoInputDevice, dev.Name); if (this.capFilter != null) { hr = graphBuilder.AddFilter(this.capFilter, "Video Source"); DsError.ThrowExceptionForHR(hr); } //// Add the video device //hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); //if (hr != 0) // ErrorLogger.WriteLine( // "Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " + // DsError.GetErrorText(hr)); var baseGrabFlt = (IBaseFilter)sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); //if (hr != 0) // ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " + // DsError.GetErrorText(hr)); // turn on the infrared leds ONLY FOR THE GENIUS WEBCAM /* * if (!defaultMode) * { * m_icc = capFilter as IAMCameraControl; * CameraControlFlags CamFlags = new CameraControlFlags(); * int pMin, pMax, pStep, pDefault; * * hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags); * m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None); * } */ //IBaseFilter smartTee = new SmartTee() as IBaseFilter; //// Add the smart tee filter to the graph //hr = this.graphBuilder.AddFilter(smartTee, "Smart Tee"); //Marshal.ThrowExceptionForHR(hr); // Connect the video source output to the smart tee //hr = capGraph.RenderStream(null, null, capFilter, null, smartTee); hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt); var errorText = DsError.GetErrorText(hr); cameraControl = capFilter as IAMCameraControl; // Set videoProcAmp object obj; var iid_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770"); DirectShowDevices.Instance.Cameras[deviceNumber].DirectshowDevice.Mon.BindToObject( null, null, ref iid_IBaseFilter, out obj); videoProcAmp = obj as IAMVideoProcAmp; // If any of the default config items are set if (frameRate + height + width > 0) { SetConfigParms(capGraph, capFilter, frameRate, width, height); } // Check for succesful rendering, if this failed the class cannot be used, so dispose the resources and return false. if (hr < 0) { Cleanup(); return(false); } else { // Otherwise update the SampleGrabber. SaveSizeInfo(sampGrabber); hr = sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = sampGrabber.SetOneShot(false); hr = sampGrabber.SetCallback(this, 1); } //if (hr < 0) // ErrorLogger.WriteLine("Could not set callback function (SetupGraph) in Camera.Capture()"); } } catch (Exception ex) { //ErrorLogger.ProcessException(ex, false); Cleanup(); return(false); } return(true); }
/// <summary> /// Worker thread that captures the images /// </summary> private void RunWorker() { try { // Create the main graph m_igrphbldGraph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; // Create the webcam source m_sourceObject = FilterInfo.CreateFilter(m_sMonikerString); // Create the grabber m_isplGrabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; m_grabberObject = m_isplGrabber as IBaseFilter; // Add the source and grabber to the main graph m_igrphbldGraph.AddFilter(m_sourceObject, "source"); m_igrphbldGraph.AddFilter(m_grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; m_isplGrabber.SetMediaType(mediaType); if (m_igrphbldGraph.Connect(m_sourceObject.GetPin(PinDirection.Output, 0), m_grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (m_isplGrabber.GetConnectedMediaType(mediaType) == 0) { // During startup, this code can be too fast, so try at least 3 times int retryCount = 0; bool succeeded = false; while ((retryCount < 3) && !succeeded) { // Tried again retryCount++; try { // Retrieve the grabber information VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); m_grbrCapGrabber.Width = header.BmiHeader.Width; m_grbrCapGrabber.Height = header.BmiHeader.Height; // Succeeded succeeded = true; } catch (Exception retryException) { // Trace Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount); // Sleep Thread.Sleep(50); } } } } m_igrphbldGraph.Render(m_grabberObject.GetPin(PinDirection.Output, 0)); m_isplGrabber.SetBufferSamples(false); m_isplGrabber.SetOneShot(false); m_isplGrabber.SetCallback(m_grbrCapGrabber, 1); // Get the video window IVideoWindow wnd = (IVideoWindow)m_igrphbldGraph; wnd.put_AutoShow(false); wnd = null; // Create the control and run m_imedctrlControl = (IMediaControl)m_igrphbldGraph; m_imedctrlControl.Run(); // Wait for the stop signal while (!m_rstevStopSignal.WaitOne(0, true)) { Thread.Sleep(10); } // Stop when ready // _control.StopWhenReady(); m_imedctrlControl.Stop(); // Wait a bit... It apparently takes some time to stop IMediaControl Thread.Sleep(1000); } } catch (Exception ex) { // Trace Trace.WriteLine(ex); } finally { // Clean up this.Release(); } }
// Thread entry point public void WorkerThread() { int hr; Guid cat; Guid med; // grabber Grabber grabber = new Grabber(this); // objects object graphObj = null; object grabberObj = null; // interfaces IGraphBuilder graphBuilder = null; DShowNET.ICaptureGraphBuilder2 captureGraphBuilder = null; IBaseFilter videoDeviceFilter = null; IBaseFilter grabberFilter = null; ISampleGrabber sg = null; IMediaControl mc = null; try { // Make a new filter graph graphObj = Activator.CreateInstance(Type.GetTypeFromCLSID(DShowNET.Clsid.FilterGraph, true)); graphBuilder = (IGraphBuilder)graphObj; // Get the Capture Graph Builder Guid clsid = DShowNET.Clsid.CaptureGraphBuilder2; Guid riid = typeof(DShowNET.ICaptureGraphBuilder2).GUID; captureGraphBuilder = (DShowNET.ICaptureGraphBuilder2)DShowNET.DsBugWO.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph((DShowNET.IGraphBuilder)graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } int rotCookie = 0; DShowNET.DsROT.AddGraphToRot(graphBuilder, out rotCookie); // Get the video device and add it to the filter graph if (deviceMoniker != null) { videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(deviceMoniker); hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // create sample grabber, object and filter grabberObj = Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.SampleGrabber, true)); grabberFilter = (IBaseFilter)grabberObj; sg = (ISampleGrabber)grabberObj; // add sample grabber filter to filter graph hr = graphBuilder.AddFilter(grabberFilter, "grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Try looking for an video device interleaved media type IBaseFilter testFilter = videoDeviceFilter; // grabberFilter (not supported) object o; cat = DShowNET.PinCategory.Capture; med = DShowNET.MediaType.Interleaved; Guid iid = typeof(DShowNET.IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface(ref cat, ref med, (DShowNET.IBaseFilter)testFilter, ref iid, out o); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, (DShowNET.IBaseFilter)testFilter, ref iid, out o); if (hr != 0) { o = null; } } // Set the video stream configuration to data member videoStreamConfig = o as DShowNET.IAMStreamConfig; o = null; //modifies the stream size and frame rate if (modifyStream) { //set size of frame BitmapInfoHeader bmiHeader; bmiHeader = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader"); bmiHeader.Width = streamSize.Width; bmiHeader.Height = streamSize.Height; setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader); //set frame rate (not supported on the cameras we have) /* * long avgTimePerFrame = (long)(10000000 / framerate); * setStreamConfigSetting(videoStreamConfig, "AvgTimePerFrame", avgTimePerFrame); */ } // connect pins (Turns on the video device) if (graphBuilder.Connect((IPin)AForge.Video.DirectShow.Internals.Tools.GetOutPin((AForge.Video.DirectShow.Internals.IBaseFilter)videoDeviceFilter, 0), (IPin)AForge.Video.DirectShow.Internals.Tools.GetInPin((AForge.Video.DirectShow.Internals.IBaseFilter)grabberFilter, 0)) < 0) { throw new ApplicationException("Failed connecting filters"); } // Set the sample grabber media type settings AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; sg.SetMediaType(mt); // get media type and set sample grabber parameters if (sg.GetConnectedMediaType(mt) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader)); if (vih.BmiHeader.Compression != 0) { YUYV = true; grabber.setYUYV(YUYV); } grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; //mt.Dispose(); } // Set various sample grabber properties sg.SetBufferSamples(false); sg.SetOneShot(false); sg.SetCallback(grabber, 1); if (!preventFreezing) { // render graphBuilder.Render((IPin)AForge.Video.DirectShow.Internals.Tools.GetOutPin((AForge.Video.DirectShow.Internals.IBaseFilter)grabberFilter, 0)); // Do not show active (source) window IVideoWindow win = (IVideoWindow)graphObj; win.put_AutoShow(0); win = null; } // get media control mc = (IMediaControl)graphBuilder; // run mc.Run(); while (!stopEvent.WaitOne(0, true)) { Thread.Sleep(100); } mc.StopWhenReady(); } // catch any exceptions catch (Exception e) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(e.Message)); } } // finalization block finally { // release all objects mc = null; graphBuilder = null; captureGraphBuilder = null; videoDeviceFilter = null; grabberFilter = null; sg = null; if (graphObj != null) { Marshal.ReleaseComObject(graphObj); graphObj = null; } if (grabberObj != null) { Marshal.ReleaseComObject(grabberObj); grabberObj = null; } } }
/// <summary> /// Worker thread that captures the images /// </summary> private void RunWorker() { try { // Create the main graph _graph = Activator.CreateInstance(Type.GetTypeFromCLSID(FilterGraph)) as IGraphBuilder; // Create the webcam source _sourceObject = FilterInfo.CreateFilter(_monikerString); // Create the grabber _grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(SampleGrabber)) as ISampleGrabber; _grabberObject = _grabber as IBaseFilter; // Add the source and grabber to the main graph _graph.AddFilter(_sourceObject, "source"); _graph.AddFilter(_grabberObject, "grabber"); using (AMMediaType mediaType = new AMMediaType()) { mediaType.MajorType = MediaTypes.Video; mediaType.SubType = MediaSubTypes.RGB32; _grabber.SetMediaType(mediaType); if (_graph.Connect(_sourceObject.GetPin(PinDirection.Output, 0), _grabberObject.GetPin(PinDirection.Input, 0)) >= 0) { if (_grabber.GetConnectedMediaType(mediaType) == 0) { // During startup, this code can be too fast, so try at least 3 times int retryCount = 0; bool succeeded = false; while ((retryCount < 3) && !succeeded) { // Tried again retryCount++; try { // Retrieve the grabber information VideoInfoHeader header = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); _capGrabber.Width = header.BmiHeader.Width; _capGrabber.Height = header.BmiHeader.Height; // Succeeded succeeded = true; } catch (Exception retryException) { // Trace Trace.TraceInformation("Failed to retrieve the grabber information, tried {0} time(s)", retryCount); // Sleep Thread.Sleep(50); } } } } _graph.Render(_grabberObject.GetPin(PinDirection.Output, 0)); _grabber.SetBufferSamples(false); _grabber.SetOneShot(false); _grabber.SetCallback(_capGrabber, 1); // Get the video window IVideoWindow wnd = (IVideoWindow)_graph; wnd.put_AutoShow(false); wnd = null; // Create the control and run _control = (IMediaControl)_graph; _control.Run(); // Wait for the stop signal while (!_stopSignal.WaitOne(0, true)) { Thread.Sleep(10); } // Stop when ready _control.StopWhenReady(); } } catch (Exception ex) { // Trace Trace.WriteLine(ex); } finally { // Clean up Release(); } }
// --------------------- Private Methods ----------------------- /// <summary> /// Create a new filter graph and add filters (devices, compressors, /// misc), but leave the filters unconnected. Call renderGraph() /// to connect the filters. /// </summary> /// protected void createGraph() { Guid cat; Guid med; int hr; Type comType = null; object comObj = null; // Ensure required properties are set if (videoDevice == null && audioDevice == null) throw new ArgumentException("The video and/or audio device have not been set. Please set one or both to valid capture devices.\n"); // Skip if we are already created if ((int)graphState < (int)GraphState.Created) { // Garbage collect, ensure that previous filters are released GC.Collect(); // Make a new filter graph graphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); // Get the Capture Graph Builder Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; captureGraphBuilder = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid); // Link the CaptureGraphBuilder to the filter graph hr = captureGraphBuilder.SetFiltergraph(graphBuilder); if (hr < 0) Marshal.ThrowExceptionForHR(hr); comType = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (comType == null) throw new NotImplementedException(@"DirectShow SampleGrabber not installed/registered!"); comObj = Activator.CreateInstance(comType); sampGrabber = (ISampleGrabber)comObj; comObj = null; baseGrabFlt = (IBaseFilter)sampGrabber; // Add the graph to the Running Object Table so it can be // viewed with GraphEdit #if DEBUG DsROT.AddGraphToRot(graphBuilder, out rotCookie); #endif AMMediaType media = new AMMediaType(); // Get the video device and add it to the filter graph if (VideoDevice != null) { videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(VideoDevice.MonikerString); hr = graphBuilder.AddFilter(videoDeviceFilter, "Video Capture Device"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Console.WriteLine("MediaEnineCheck ==> Inside StartVideoCapture.cs before MediaSudType"); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB24; //Rajib media.formatType = FormatType.VideoInfo; // ??? hr = sampGrabber.SetMediaType(media); if (hr < 0) Marshal.ThrowExceptionForHR(hr); hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // Get the audio device and add it to the filter graph if (AudioDevice != null) { audioDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(AudioDevice.MonikerString); hr = graphBuilder.AddFilter(audioDeviceFilter, "Audio Capture Device"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // Get the video compressor and add it to the filter graph if (VideoCompressor != null) { videoCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(VideoCompressor.MonikerString); hr = graphBuilder.AddFilter(videoCompressorFilter, "Video Compressor"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // Get the audio compressor and add it to the filter graph if (AudioCompressor != null) { audioCompressorFilter = (IBaseFilter)Marshal.BindToMoniker(AudioCompressor.MonikerString); hr = graphBuilder.AddFilter(audioCompressorFilter, "Audio Compressor"); if (hr < 0) Marshal.ThrowExceptionForHR(hr); } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional // upstream filters to function). // Try looking for an interleaved media type object o; cat = PinCategory.Capture; med = MediaType.Interleaved; Guid iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); if (hr != 0) o = null; } videoStreamConfig = o as IAMStreamConfig; // Retrieve the stream control interface for the audio device o = null; cat = PinCategory.Capture; med = MediaType.Audio; iid = typeof(IAMStreamConfig).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, audioDeviceFilter, ref iid, out o); if (hr != 0) o = null; audioStreamConfig = o as IAMStreamConfig; // Retreive the media control interface (for starting/stopping graph) mediaControl = (IMediaControl)graphBuilder; // Reload any video crossbars if (videoSources != null) videoSources.Dispose(); videoSources = null; // Reload any audio crossbars if (audioSources != null) audioSources.Dispose(); audioSources = null; // Reload any property pages exposed by filters if (propertyPages != null) propertyPages.Dispose(); propertyPages = null; // Reload capabilities of video device videoCaps = null; // Reload capabilities of video device audioCaps = null; // Retrieve TV Tuner if available o = null; cat = PinCategory.Capture; med = MediaType.Interleaved; iid = typeof(IAMTVTuner).GUID; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); if (hr != 0) { med = MediaType.Video; hr = captureGraphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out o); if (hr != 0) o = null; } IAMTVTuner t = o as IAMTVTuner; if (t != null) tuner = new Tuner(t); videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = sampGrabber.SetBufferSamples(false); if (hr == 0) hr = sampGrabber.SetOneShot(false); if (hr == 0) hr = sampGrabber.SetCallback(new SampleGrabberCallback(), 1); if (hr < 0) Marshal.ThrowExceptionForHR(hr); // Update the state now that we are done graphState = GraphState.Created; } }
/// <summary> /// Creates a new Video Player. Automatically creates the required Texture2D on the specificied GraphicsDevice. /// </summary> /// <param name="FileName">The video file to open</param> /// <param name="graphicsDevice">XNA Graphics Device</param> public XNAPlayer(Feel feel, string FileName, GraphicsDevice graphicsDevice, Action callback) { Utils.RunAsynchronously(() => { try { // Set video state currentState = VideoState.Stopped; // Store Filename filename = FileName; // Open DirectShow Interfaces InitInterfaces(); // Create a SampleGrabber Filter and add it to the FilterGraph SampleGrabber sg = new SampleGrabber(); ISampleGrabber sampleGrabber = (ISampleGrabber)sg; DsError.ThrowExceptionForHR(gb.AddFilter((IBaseFilter)sg, "Grabber")); // Setup Media type info for the SampleGrabber AMMediaType mt = new AMMediaType(); mt.majorType = MEDIATYPE_Video; // Video mt.subType = MEDIASUBTYPE_RGB24; // RGB24 mt.formatType = FORMAT_VideoInfo; // VideoInfo DsError.ThrowExceptionForHR(sampleGrabber.SetMediaType(mt)); // Construct the rest of the FilterGraph DsError.ThrowExceptionForHR(gb.RenderFile(filename, null)); // Set SampleGrabber Properties DsError.ThrowExceptionForHR(sampleGrabber.SetBufferSamples(true)); DsError.ThrowExceptionForHR(sampleGrabber.SetOneShot(false)); DsError.ThrowExceptionForHR(sampleGrabber.SetCallback((ISampleGrabberCB)this, 1)); // Hide Default Video Window IVideoWindow pVideoWindow = (IVideoWindow)gb; DsError.ThrowExceptionForHR(pVideoWindow.put_MessageDrain(IntPtr.Zero)); DsError.ThrowExceptionForHR(pVideoWindow.put_WindowState(WindowState.Hide)); DsError.ThrowExceptionForHR(pVideoWindow.put_AutoShow(OABool.False)); // Create AMMediaType to capture video information AMMediaType MediaType = new AMMediaType(); DsError.ThrowExceptionForHR(sampleGrabber.GetConnectedMediaType(MediaType)); VideoInfoHeader pVideoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(MediaType.formatPtr, pVideoHeader); // Store video information videoHeight = pVideoHeader.BmiHeader.Height; videoWidth = pVideoHeader.BmiHeader.Width; avgTimePerFrame = pVideoHeader.AvgTimePerFrame; bitRate = pVideoHeader.BitRate; DsError.ThrowExceptionForHR(ms.GetDuration(out videoDuration)); // Create byte arrays to hold video data videoFrameBytes = new byte[(videoHeight * videoWidth) * 4]; // RGBA format (4 bytes per pixel) bgrData = new byte[(videoHeight * videoWidth) * 4]; // BGR24 format (3 bytes per pixel + 1 for safety) // Create Output Frame Texture2D with the height and width of the video outputFrame = new Texture2D(graphicsDevice, videoWidth, videoHeight, 1, TextureUsage.None, SurfaceFormat.Color); feel.RunOnUIThread(callback); } catch { feel.ShowToast("Unable to Load or Play the video file"); } }, () => { }); }
private void CaptureVideo(IntPtr ctlHandle) { int hr = 0; IBaseFilter sourceFilter = null; try { // Get DirectShow interfaces GetInterfaces(ctlHandle); // Attach the filter graph to the capture graph hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); //captureGraphBuilder.RenderStream(PinCategory.Preview,MediaType.Video, DsError.ThrowExceptionForHR(hr); // Use the system device enumerator and class enumerator to find // a video capture/preview device, such as a desktop USB video camera. sourceFilter = FindCaptureDevice(); if (sourceFilter == null) { log("Couldn't find a video input device."); return; } // Add Capture filter to our graph. hr = this.graphBuilder.AddFilter(sourceFilter, "Video Capture"); DsError.ThrowExceptionForHR(hr); this.samplegrabber = (ISampleGrabber)new SampleGrabber(); AMMediaType mt = new AMMediaType(); mt.majorType = MediaType.Video; mt.subType = MediaSubType.RGB24; mt.formatType = FormatType.VideoInfo; samplegrabber.SetMediaType(mt); //samplegrabber. hr = this.graphBuilder.AddFilter((IBaseFilter)samplegrabber, "samplegrabber"); DsError.ThrowExceptionForHR(hr); IBaseFilter nullRenderer = (IBaseFilter)new NullRenderer(); hr = graphBuilder.AddFilter(nullRenderer, "Null Renderer"); // Render the preview pin on the video capture filter // Use this instead of this.graphBuilder.RenderFile hr = this.captureGraphBuilder.RenderStream(PinCategory.Preview, MediaType.Video, sourceFilter, (IBaseFilter)samplegrabber, nullRenderer); //DsError.ThrowExceptionForHR(hr); if (hr != 0) log(DsError.GetErrorText(hr)); // Now that the filter has been added to the graph and we have // rendered its stream, we can release this reference to the filter. Marshal.ReleaseComObject(sourceFilter); // Set video window style and position //SetupVideoWindow(ctlHandle); // Add our graph to the running object table, which will allow // the GraphEdit application to "spy" on our graph rot = new DsROTEntry(this.graphBuilder); // Start previewing video data hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); // Remember current state this.currentState = PlayState.Running; samplegrabber.SetBufferSamples(true); samplegrabber.SetOneShot(false); } catch { MessageBox.Show("CaptureVideo(ctlHandle) suffered a fatal error."); } }
public override int Initialize() { if (!File.Exists(filename)) { return(-1); } try { int hr = 0; graphBuilder2 = (IFilterGraph2) new FilterGraph(); lavSplitter = new LAVSplitter() as IBaseFilter; lavVideoDecoder = new LAVVideoDecoder() as IBaseFilter; lavAudioDecoder = new LAVAudioDecoder() as IBaseFilter; var lavSplitterSource = lavSplitter as IFileSourceFilter; soundDevice = new DirectSoundDevice() as IBaseFilter; videoRenderer = new VideoRenderer() as IBaseFilter; lavSplitterSource.Load(filename, null); hr = graphBuilder2.AddFilter(lavSplitter, "LAV Splitter"); DsError.ThrowExceptionForHR(hr); hr = graphBuilder2.AddFilter(lavVideoDecoder, "LAV Video Decoder"); DsError.ThrowExceptionForHR(hr); hr = graphBuilder2.AddFilter(lavAudioDecoder, "LAV Audio Decoder"); DsError.ThrowExceptionForHR(hr); hr = graphBuilder2.AddFilter(soundDevice, "Default Direct Sound Device"); DsError.ThrowExceptionForHR(hr); hr = graphBuilder2.AddFilter(videoRenderer, "Video Renderer"); DsError.ThrowExceptionForHR(hr); var videoPin = GetPin(lavSplitter, "Video"); var audioPin = GetPin(lavSplitter, "Audio"); var videoDecoderInputPin = GetPin(lavVideoDecoder, "Input"); var videoDecoderOutputPin = GetPin(lavVideoDecoder, "Output"); var audioDecoderInputPin = GetPin(lavAudioDecoder, "Input"); var audioDecoderOutputPin = GetPin(lavAudioDecoder, "Output"); var soundInputPin = GetPin(soundDevice, "Audio Input pin (rendered)"); var videoRendererInputPin = GetPin(videoRenderer, "Input"); hr = graphBuilder2.Connect(videoPin, videoDecoderInputPin); DsError.ThrowExceptionForHR(hr); hr = graphBuilder2.Connect(audioPin, audioDecoderInputPin); DsError.ThrowExceptionForHR(hr); hr = graphBuilder2.Connect(audioDecoderOutputPin, soundInputPin); DsError.ThrowExceptionForHR(hr); sampleGrabber = new SampleGrabber() as ISampleGrabber; var amMediaType = new AMMediaType { majorType = MediaType.Video, subType = MediaSubType.RGB32, formatType = FormatType.VideoInfo }; hr = sampleGrabber.SetMediaType(amMediaType); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(amMediaType); hr = graphBuilder2.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); var sampleGrabberInputPin = GetPin((IBaseFilter)sampleGrabber, "Input"); var sampleGrabberOutputPin = GetPin((IBaseFilter)sampleGrabber, "Output"); hr = graphBuilder2.Connect(videoDecoderOutputPin, sampleGrabberInputPin); DsError.ThrowExceptionForHR(hr); hr = graphBuilder2.Connect(sampleGrabberOutputPin, videoRendererInputPin); DsError.ThrowExceptionForHR(hr); base.Initialize(); sampleGrabber.SetCallback(this, 1); sampleGrabber.SetBufferSamples(true); sampleGrabber.SetOneShot(false); var mediaType = new AMMediaType(); videoPin.ConnectionMediaType(mediaType); var bitmapInfoHeader = (BitmapInfoHeader)mediaType; this.width = bitmapInfoHeader.Width; this.height = bitmapInfoHeader.Height; this.maxu = 1; this.maxv = 1; textures = new TextureBase[5]; for (var i = 0; i < textures.Length; i++) { textures[i] = TextureFactoryManager.Factory.Create(device, width, height, 1, false); } videoWindow = (IVideoWindow)graphBuilder2; hr = videoWindow.put_Visible((int)OABool.False); DsError.ThrowExceptionForHR(hr); hr = videoWindow.put_WindowState((int)WindowState.Hide); DsError.ThrowExceptionForHR(hr); hr = videoWindow.SetWindowPosition(-1000, -1000, 10, 10); DsError.ThrowExceptionForHR(hr); videoWindow.put_AutoShow((int)OABool.False); DsError.ThrowExceptionForHR(hr); hr = hr = videoWindow.put_Owner(MovieUtility.Window); DsError.ThrowExceptionForHR(hr); } catch (Exception e) { throw new Exception("Fatal Error in Movie Loading", e); } return(0); }
/// <summary> /// build the capture graph for grabber. /// </summary> private void SetupGraph() { int hr; Guid cat; Guid med; try { hr = capGraph.SetFiltergraph(graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.AddFilter(capFilter, "Ds.NET Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } AMMediaType media = new AMMediaType(); media.majorType = MediaType.Video; media.subType = MediaSubType.RGB32; media.formatType = FormatType.VideoInfo; // ??? hr = sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } object o; cat = PinCategory.Capture; med = MediaType.Video; Guid iid = typeof(IAMStreamConfig).GUID; hr = capGraph.FindInterface( ref cat, ref med, capFilter, ref iid, out o); videoStreamConfig = o as IAMStreamConfig; hr = sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = sampGrabber.SetOneShot(false); } if (hr == 0) { hr = sampGrabber.SetCallback(null, 0); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } BitmapInfoHeader bmiHeader; bmiHeader = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader"); bmiHeader.Width = cameraWidth; bmiHeader.Height = cameraHeight; setStreamConfigSetting(videoStreamConfig, "BmiHeader", bmiHeader); bmiHeader = (BitmapInfoHeader)getStreamConfigSetting(videoStreamConfig, "BmiHeader"); if (bmiHeader.Width != cameraWidth) { throw new GoblinException("Could not change the resolution to " + cameraWidth + "x" + cameraHeight + ". The resolution has to be " + bmiHeader.Width + "x" + bmiHeader.Height); } cat = PinCategory.Preview; med = MediaType.Video; hr = capGraph.RenderStream(ref cat, ref med, capFilter, null, baseGrabFlt); // baseGrabFlt if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } media = new AMMediaType(); hr = sampGrabber.GetConnectedMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if ((media.formatType != FormatType.VideoInfo) || (media.formatPtr == IntPtr.Zero)) { throw new NotSupportedException("Unknown Grabber Media Format"); } videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; } catch (Exception ee) { throw new GoblinException("Could not setup graph\r\n" + ee.Message); } }
/// <summary> /// <para>指定された動画ファイルから音声のみをエンコードし、WAVファイルイメージを作成して返す。</para> /// </summary> public static void t変換(string fileName, out byte[] wavFileImage) { int hr = 0; IGraphBuilder graphBuilder = null; try { graphBuilder = (IGraphBuilder) new FilterGraph(); #region [ オーディオ用サンプルグラバの作成と追加。] //----------------- ISampleGrabber sampleGrabber = null; try { sampleGrabber = (ISampleGrabber) new SampleGrabber(); // サンプルグラバのメディアタイプの設定。 var mediaType = new AMMediaType() { majorType = MediaType.Audio, subType = MediaSubType.PCM, formatType = FormatType.WaveEx, }; try { hr = sampleGrabber.SetMediaType(mediaType); DsError.ThrowExceptionForHR(hr); } finally { if (mediaType != null) { DsUtils.FreeAMMediaType(mediaType); } } // サンプルグラバのバッファリングを有効にする。 hr = sampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); // サンプルグラバにコールバックを追加する。 sampleGrabberProc = new CSampleGrabberCallBack(); hr = sampleGrabber.SetCallback(sampleGrabberProc, 1); // 1:コールバックの BufferCB() メソッドの方を呼び出す。 // サンプルグラバをグラフに追加する。 hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber for Audio/PCM"); DsError.ThrowExceptionForHR(hr); } finally { C共通.tCOMオブジェクトを解放する(ref sampleGrabber); } //----------------- #endregion var e = new DirectShowLib.DsROTEntry(graphBuilder); // fileName からグラフを自動生成。 hr = graphBuilder.RenderFile(fileName, null); // IMediaControl.RenderFile() は推奨されない DsError.ThrowExceptionForHR(hr); // ビデオレンダラを除去。 // オーディオレンダラをNullに変えるより前に実行すること。 // (CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する() の中で一度再生するので、 // そのときにActiveウィンドウが表示されてしまうため。) // chnmr0 : ウィンドウを表示しないだけなら IVideoWindow で put_AutoShow した。 IVideoWindow vw = graphBuilder as IVideoWindow; vw.put_AutoShow(OABool.False); // オーディオレンダラを NullRenderer に置換。 WaveFormat wfx; byte[] wfx拡張領域; CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する(graphBuilder, out wfx, out wfx拡張領域); // 基準クロックを NULL(最高速)に設定する。 IMediaFilter mediaFilter = graphBuilder as IMediaFilter; mediaFilter.SetSyncSource(null); mediaFilter = null; // メモリストリームにデコードデータを出力する。 sampleGrabberProc.MemoryStream = new MemoryStream(); // CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する() で一度再生しているので、ストリームをクリアする。 var ms = sampleGrabberProc.MemoryStream; var bw = new BinaryWriter(ms); bw.Write(new byte[] { 0x52, 0x49, 0x46, 0x46 }); // 'RIFF' bw.Write((UInt32)0); // ファイルサイズ - 8 [byte];今は不明なので後で上書きする。 bw.Write(new byte[] { 0x57, 0x41, 0x56, 0x45 }); // 'WAVE' bw.Write(new byte[] { 0x66, 0x6D, 0x74, 0x20 }); // 'fmt ' bw.Write((UInt32)(16 + ((wfx拡張領域.Length > 0) ? (2 /*sizeof(WAVEFORMATEX.cbSize)*/ + wfx拡張領域.Length) : 0))); // fmtチャンクのサイズ[byte] bw.Write((UInt16)wfx.Encoding); // フォーマットID(リニアPCMなら1) bw.Write((UInt16)wfx.Channels); // チャンネル数 bw.Write((UInt32)wfx.SampleRate); // サンプリングレート bw.Write((UInt32)wfx.AverageBytesPerSecond); // データ速度 bw.Write((UInt16)wfx.BlockAlign); // ブロックサイズ bw.Write((UInt16)wfx.BitsPerSample); // サンプルあたりのビット数 if (wfx拡張領域.Length > 0) { bw.Write((UInt16)wfx拡張領域.Length); // 拡張領域のサイズ[byte] bw.Write(wfx拡張領域); // 拡張データ } bw.Write(new byte[] { 0x64, 0x61, 0x74, 0x61 }); // 'data' int nDATAチャンクサイズ位置 = (int)ms.Position; bw.Write((UInt32)0); // dataチャンクのサイズ[byte];今は不明なので後で上書きする。 #region [ 再生を開始し、終了を待つ。- 再生中、sampleGrabberProc.MemoryStream に PCM データが蓄積されていく。] //----------------- IMediaControl mediaControl = graphBuilder as IMediaControl; mediaControl.Run(); // 再生開始 IMediaEvent mediaEvent = graphBuilder as IMediaEvent; EventCode eventCode; hr = mediaEvent.WaitForCompletion(-1, out eventCode); DsError.ThrowExceptionForHR(hr); if (eventCode != EventCode.Complete) { throw new Exception("再生待ちに失敗しました。"); } mediaControl.Stop(); mediaEvent = null; mediaControl = null; //----------------- #endregion bw.Seek(4, SeekOrigin.Begin); bw.Write((UInt32)ms.Length - 8); // ファイルサイズ - 8 [byte] bw.Seek(nDATAチャンクサイズ位置, SeekOrigin.Begin); bw.Write((UInt32)ms.Length - (nDATAチャンクサイズ位置 + 4)); // dataチャンクサイズ [byte] // 出力その2を作成。 wavFileImage = ms.ToArray(); // 終了処理。 bw.Close(); sampleGrabberProc.Dispose(); // ms.Close() } finally { C共通.tCOMオブジェクトを解放する(ref graphBuilder); } }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread( ) { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; // grabber Grabber grabber = new Grabber(this); // objects object graphObject = null; object grabberObject = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IMediaEventEx mediaEvent = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { throw new ApplicationException("Failed creating source filter"); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add grabber filters to graph graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(mediaType); // connect pins int pinToTry = 0; IPin inPin = Tools.GetInPin(grabberBase, 0); IPin outPin = null; // find output pin acceptable by sample grabber while (true) { outPin = Tools.GetOutPin(sourceBase, pinToTry); if (outPin == null) { Marshal.ReleaseComObject(inPin); throw new ApplicationException("Did not find acceptable output video pin in the given source"); } if (graph.Connect(outPin, inPin) < 0) { Marshal.ReleaseComObject(outPin); outPin = null; pinToTry++; } else { break; } } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } // let's do rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBase, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; mediaFilter.SetSyncSource(null); } // get media control mediaControl = (IMediaControl)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; IntPtr p1, p2; DsEvCode code; // run mediaControl.Run( ); do { if (mediaEvent != null) { if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0) { mediaEvent.FreeEventParams(code, p1, p2); if (code == DsEvCode.Complete) { reasonToStop = ReasonToFinishPlaying.EndOfStreamReached; break; } } } }while (!stopEvent.WaitOne(100, false)); mediaControl.Stop( ); } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects graph = null; grabberBase = null; sampleGrabber = null; mediaControl = null; mediaEvent = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceBase != null) { Marshal.ReleaseComObject(sourceBase); sourceBase = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } } if (PlayingFinished != null) { PlayingFinished(this, reasonToStop); } }
private void ApplyVideoInput() { int iRet; Dispose(); /*Frame = new byte[(width * height) * PixelSize]; CapturedFrame = new byte[(width * height) * PixelSize]; PreviewFrame = new byte[(width / PreviewDivider * height / PreviewDivider) * PixelSize];*/ if (VideoInput == null) { return; } //Original Code GraphBuilder = (IGraphBuilder)new FilterGraph(); CaptureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); MediaControl = (IMediaControl)GraphBuilder; iRet = CaptureGraphBuilder.SetFiltergraph(GraphBuilder); if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetFiltergraph"); SampleGrabber = new SampleGrabber() as ISampleGrabber; iRet = GraphBuilder.AddFilter((IBaseFilter)SampleGrabber, "Render"); if (iRet != 0) Console.WriteLine("TheKing--> Error Found AddFilter 1"); SetResolution(width, height); iRet = GraphBuilder.AddFilter(VideoInput, "Camera"); if (iRet != 0) Console.WriteLine("TheKing--> Error Found AddFilter 2"); iRet = SampleGrabber.SetBufferSamples(true); if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetBufferSamples"); iRet = SampleGrabber.SetOneShot(false); if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetOneShot"); iRet = SampleGrabber.SetCallback(this, 1); if (iRet != 0) Console.WriteLine("TheKing--> Error Found SetCallback"); iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, SampleGrabber as IBaseFilter); if (iRet < 0) { Console.WriteLine("TheKing--> Error Found in CaptureGraphBuilder.RenderStream, iRet = " + iRet+", Initialization TryNumber = " + counter); if(counter == 1) ApplyVideoInput(); } //GraphBuilder.Connect() //iRet = CaptureGraphBuilder.RenderStream(null, null, VideoInput, null, null); //if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 1"); //iRet = CaptureGraphBuilder.RenderStream(PinCategory.Capture, MediaType.Video, VideoInput, null, SampleGrabber as IBaseFilter); //if (iRet != 0) Console.WriteLine("TheKing--> Error Found RenderStream 2, iRet = " + iRet); if (UpdateThread != null) { UpdateThread.Abort(); } //UpdateThread = new Thread(UpdateBuffer); //UpdateThread.Start(); MediaControl.Run(); Marshal.ReleaseComObject(VideoInput); }