/// <summary> Initialize collection with property pages from existing graph. </summary> internal PropertyPageCollection( ICaptureGraphBuilder2 graphBuilder, IBaseFilter videoDeviceFilter, IBaseFilter audioDeviceFilter, IBaseFilter videoCompressorFilter, IBaseFilter audioCompressorFilter, SourceCollection videoSources, SourceCollection audioSources) { addFromGraph( graphBuilder, videoDeviceFilter, audioDeviceFilter, videoCompressorFilter, audioCompressorFilter, videoSources, audioSources ); }
public static bool ShowTunerPinDialog(ICaptureGraphBuilder2 bld, IBaseFilter flt, IntPtr hwnd) { object ppint = null; ISpecifyPropertyPages pages = null; bool flag; DsCAUUID pPages = new DsCAUUID(); try { Guid capture = PinCategory.Capture; Guid interleaved = MediaType.Interleaved; Guid gUID = typeof(IAMTVTuner).GUID; if (bld.FindInterface(ref capture, ref interleaved, flt, ref gUID, out ppint) != 0) { interleaved = MediaType.Video; if (bld.FindInterface(ref capture, ref interleaved, flt, ref gUID, out ppint) != 0) { return false; } } pages = ppint as ISpecifyPropertyPages; if (pages == null) { return false; } int num = pages.GetPages(out pPages); num = OleCreatePropertyFrame(hwnd, 30, 30, null, 1, ref ppint, pPages.cElems, pPages.pElems, 0, 0, IntPtr.Zero); flag = true; } catch (Exception exception) { Trace.WriteLine("!Ds.NET: ShowCapPinDialog " + exception.Message); flag = false; } finally { if (pPages.pElems != IntPtr.Zero) { Marshal.FreeCoTaskMem(pPages.pElems); } pages = null; if (ppint != null) { Marshal.ReleaseComObject(ppint); } ppint = null; } return flag; }
public static bool ShowTunerPinDialog( ICaptureGraphBuilder2 bld, IBaseFilter flt, IntPtr hwnd ) { int hr; object comObj = null; ISpecifyPropertyPages spec = null; DsCAUUID cauuid = new DsCAUUID(); try { DsGuid cat = PinCategory.Capture; DsGuid type = MediaType.Interleaved; DsGuid iid = typeof(IAMTVTuner).GUID; hr = bld.FindInterface( cat, type, flt, iid, out comObj ); if( hr != 0 ) { type = MediaType.Video; hr = bld.FindInterface( cat, type, flt, iid, out comObj ); if( hr != 0 ) return false; } spec = comObj as ISpecifyPropertyPages; if( spec == null ) return false; hr = spec.GetPages( out cauuid ); hr = OleCreatePropertyFrame( hwnd, 30, 30, null, 1, ref comObj, cauuid.cElems, cauuid.pElems, 0, 0, IntPtr.Zero ); return true; } catch( Exception ee ) { Trace.WriteLine( "!Ds.NET: ShowCapPinDialog " + ee.Message ); return false; } finally { if( cauuid.pElems != IntPtr.Zero ) Marshal.FreeCoTaskMem( cauuid.pElems ); spec = null; if( comObj != null ) Marshal.ReleaseComObject( comObj ); comObj = null; } }
/// <summary> /// Create a new filter graph and add filters (devices, compressors, misc), /// but leave the filters unconnected. Call RenderGraph() /// to connect the filters. /// </summary> void CreateGraph() { // Skip if already created if (_actualGraphState < GraphState.Created) { // Make a new filter graph _graphBuilder = (IGraphBuilder) new FilterGraph(); // Get the Capture Graph Builder _captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Link the CaptureGraphBuilder to the filter graph var hr = _captureGraphBuilder.SetFiltergraph(_graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } _sampGrabber = (ISampleGrabber) new SampleGrabber(); _baseGrabFlt = (IBaseFilter)_sampGrabber; var media = new AMMediaType(); // Get the video device and add it to the filter graph if (_videoDevice != null) { _videoDeviceFilter = (IBaseFilter)Marshal.BindToMoniker(_videoDevice.MonikerString); hr = _graphBuilder.AddFilter(_videoDeviceFilter, "Video Capture Device"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } media.majorType = MediaType.Video; media.subType = MediaSubType.RGB32; media.formatType = FormatType.VideoInfo; media.temporalCompression = true; hr = _sampGrabber.SetMediaType(media); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } hr = _graphBuilder.AddFilter(_baseGrabFlt, "Grabber"); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Retrieve the stream control interface for the video device // FindInterface will also add any required filters // (WDM devices in particular may need additional upstream filters to function). // Try looking for an interleaved media type var cat = PinCategory.Capture; var med = MediaType.Interleaved; var iid = typeof(IAMStreamConfig).GUID; hr = _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out _); if (hr != 0) { // If not found, try looking for a video media type med = MediaType.Video; _captureGraphBuilder.FindInterface(cat, med, _videoDeviceFilter, iid, out _); } // Retreive the media control interface (for starting/stopping graph) _mediaControl = (IMediaControl)_graphBuilder; _videoInfoHeader = Marshal.PtrToStructure <VideoInfoHeader>(media.formatPtr); Marshal.FreeCoTaskMem(media.formatPtr); media.formatPtr = IntPtr.Zero; hr = _sampGrabber.SetBufferSamples(true); if (hr == 0) { hr = _sampGrabber.SetOneShot(false); } if (hr == 0) { hr = _sampGrabber.SetCallback(null, 0); } if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } } // Update the state now that we are done _actualGraphState = GraphState.Created; }
// Configure specified pin and collect its capabilities if required private void GetPinCapabilitiesAndConfigureSizeAndRate( ICaptureGraphBuilder2 graphBuilder, IBaseFilter baseFilter, Guid pinCategory, VideoCapabilities resolutionToSet, ref VideoCapabilities[] capabilities ) { object streamConfigObject; graphBuilder.FindInterface( pinCategory, MediaType.Video, baseFilter, typeof( IAMStreamConfig ).GUID, out streamConfigObject ); if ( streamConfigObject != null ) { IAMStreamConfig streamConfig = null; try { streamConfig = (IAMStreamConfig) streamConfigObject; } catch ( InvalidCastException ex) { Logger.LogExceptionToFile(ex, "GetPinCapabilities"); } if ( streamConfig != null ) { if ( capabilities == null ) { try { // get all video capabilities capabilities = iSpyPRO.DirectShow.VideoCapabilities.FromStreamConfig( streamConfig ); } catch(Exception ex) { Logger.LogExceptionToFile(ex, "Device Caps"); } } // check if it is required to change capture settings if ( resolutionToSet != null ) { SetResolution( streamConfig, resolutionToSet ); } } } // if failed resolving capabilities, then just create empty capabilities array, // so we don't try again if ( capabilities == null ) { capabilities = new VideoCapabilities[0]; } }
/// <summary> /// Retrieve a list of crossbar filters in the graph. /// Most hardware devices should have a maximum of 2 crossbars, /// one for video and another for audio. /// </summary> protected ArrayList findCrossbars(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter) { ArrayList crossbars = new ArrayList(); Guid category = FindDirection.UpstreamOnly; Guid type = new Guid(); Guid riid = typeof(IAMCrossbar).GUID; int hr; object comObj = null; object comObjNext = null; // Find the first interface, look upstream from the selected device hr = graphBuilder.FindInterface( ref category, ref type, deviceFilter, ref riid, out comObj ); while ( (hr == 0) && (comObj != null) ) { // If found, add to the list if ( comObj is IAMCrossbar ) { crossbars.Add( comObj as IAMCrossbar ); // Find the second interface, look upstream from the next found crossbar hr = graphBuilder.FindInterface( ref category, ref type, comObj as IBaseFilter, ref riid, out comObjNext ); comObj = comObjNext; } else comObj = null; } return( crossbars ); }
// Set the Framerate, and video size private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iFrameRate, int iWidth, int iHeight) { int hr; object o; AMMediaType media; // Find the stream config interface hr = capGraph.FindInterface( PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o ); IAMStreamConfig videoStreamConfig = o as IAMStreamConfig; if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } // Get the existing format block hr = videoStreamConfig.GetFormat( out media); DsError.ThrowExceptionForHR( hr ); // copy out the videoinfoheader VideoInfoHeader v = new VideoInfoHeader(); Marshal.PtrToStructure( media.formatPtr, v ); // if overriding the framerate, set the frame rate if (iFrameRate > 0) { v.AvgTimePerFrame = 10000000 / iFrameRate; } // if overriding the width, set the width if (iWidth > 0) { v.BmiHeader.Width = iWidth; } // if overriding the Height, set the Height if (iHeight > 0) { v.BmiHeader.Height = iHeight; } // Copy the media structure back Marshal.StructureToPtr( v, media.formatPtr, false ); // Set the new format hr = videoStreamConfig.SetFormat( media ); DsError.ThrowExceptionForHR( hr ); DsUtils.FreeAMMediaType(media); media = null; }
public void SetupGraph(DsDevice dev, bool runOCR, VideoFormatHelper.SupportedVideoFormat selectedFormat, ref float iFrameRate, ref int iWidth, ref int iHeight) { try { filterGraph = (IFilterGraph2)new FilterGraph(); mediaCtrl = filterGraph as IMediaControl; capBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); samplGrabber = (ISampleGrabber)new SampleGrabber(); int hr = capBuilder.SetFiltergraph(filterGraph); DsError.ThrowExceptionForHR(hr); if (Settings.Default.VideoGraphDebugMode) { if (rot != null) { rot.Dispose(); rot = null; } rot = new DsROTEntry(filterGraph); } SetupGraphInternal(dev, selectedFormat, ref iFrameRate, ref iWidth, ref iHeight); // Now that sizes are fixed/known, store the sizes SaveSizeInfo(samplGrabber); crossbar = CrossbarHelper.SetupTunerAndCrossbar(capBuilder, capFilter); latestBitmap = new Bitmap(iWidth, iHeight, PixelFormat.Format24bppRgb); fullRect = new Rectangle(0, 0, latestBitmap.Width, latestBitmap.Height); NativeHelpers.SetupCamera( Settings.Default.CameraModel, iWidth, iHeight, Settings.Default.HorizontalFlip, Settings.Default.VerticalFlip, Settings.Default.IsIntegrating, (float)Settings.Default.MinSignatureDiffRatio, (float)Settings.Default.MinSignatureDiff, Settings.Default.GammaDiff, Settings.Default.ForceNewFrameOnLockedRate, dev.Name, selectedFormat.AsSerialized(), selectedFormat.FrameRate); NativeHelpers.SetupAav(Settings.Default.RecordStatusSectionOnly ? AavImageLayout.StatusSectionOnly : Settings.Default.AavImageLayout, Settings.Default.AavCompression); ocrEnabled = false; string errorMessage; if (runOCR) { OcrConfiguration ocrConfig = OcrSettings.Instance[Settings.Default.SelectedOcrConfiguration]; errorMessage = NativeHelpers.SetupBasicOcrMetrix(ocrConfig); if (errorMessage != null && callbacksObject != null) callbacksObject.OnError(-1, errorMessage); else { NativeHelpers.SetupOcr(ocrConfig); ocrEnabled = true; } } else { errorMessage = NativeHelpers.SetupTimestampPreservation(false, 0, 0); if (errorMessage != null && callbacksObject != null) callbacksObject.OnError(-1, errorMessage); } } catch { CloseResources(); if (callbacksObject != null) callbacksObject.OnError(-1, "Error initialising the camera. The selected video mode may not be supported by the camera."); throw; } }
/// <summary> /// 解放 /// </summary> public virtual void Dispose() { try { Stop(); } catch (Exception) { } DeviceName = ""; SampleGrabberCB.FrameSize = new Size(0, 0); SampleGrabberCB.Notify -= SampleGrabberCB_Notify; CaptureFilter = null; CaptureOutPin = null; SampleGrabber = null; Renderer = null; Mux = null; Sync = null; if (CaptureBuilder != null) Marshal.ReleaseComObject(CaptureBuilder); CaptureBuilder = null; if (GraphBuilder != null) Marshal.ReleaseComObject(GraphBuilder); GraphBuilder = null; }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, AMMediaType media) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); /* * // check for crossbar * var capDevices2 = DsDevice.GetDevicesOfCat(FilterCategory.AMKSCrossbar); * if (capDevices2.Length > 0) * { * * IBaseFilter cross; * hr = m_FilterGraph.AddSourceFilterForMoniker(capDevices2[0].Mon, null, "crossbar", out cross); * ISpecifyPropertyPages pProp = cross as ISpecifyPropertyPages; * * //Get the name of the filter from the FilterInfo struct * FilterInfo filterInfo; * hr = cross.QueryFilterInfo(out filterInfo); * DsError.ThrowExceptionForHR(hr); * * // Get the propertypages from the property bag * DsCAUUID caGUID; * hr = pProp.GetPages(out caGUID); * DsError.ThrowExceptionForHR(hr); * * //Create and display the OlePropertyFrame * object oDevice = (object)cross; * hr = NativeMethods.OleCreatePropertyFrame(IntPtr.Zero, 0, 0, filterInfo.achName, 1, ref oDevice, caGUID.cElems, caGUID.pElems, 0, 0, IntPtr.Zero); * DsError.ThrowExceptionForHR(hr); * * Marshal.ReleaseComObject(oDevice); * * //IAMCrossbar crossbar2 = cross as IAMCrossbar; * //int inputPinCount, outputPinCount; * //crossbar2.get_PinCounts(out inputPinCount, out outputPinCount); * //crossbar2.Route(0, (int)PhysicalConnectorType.Video_Composite); * cross = null; * }*/ // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the video device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); DsError.ThrowExceptionForHR(hr); // add video crossbar // thanks to Andrew Fernie - this is to get tv tuner cards working IAMCrossbar crossbar = null; object o; hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMCrossbar).GUID, out o); if (hr >= 0) { crossbar = (IAMCrossbar)o; int oPin, iPin; int ovLink, ivLink; ovLink = ivLink = 0; crossbar.get_PinCounts(out oPin, out iPin); int pIdxRel; PhysicalConnectorType tp; for (int i = 0; i < iPin; i++) { crossbar.get_CrossbarPinInfo(true, i, out pIdxRel, out tp); if (tp == PhysicalConnectorType.Video_Composite) { ivLink = i; } } for (int i = 0; i < oPin; i++) { crossbar.get_CrossbarPinInfo(false, i, out pIdxRel, out tp); if (tp == PhysicalConnectorType.Video_VideoDecoder) { ovLink = i; } } try { crossbar.Route(ovLink, ivLink); o = null; } catch { throw new Exception("Failed to get IAMCrossbar"); } } //add AVI Decompressor IBaseFilter pAVIDecompressor = (IBaseFilter) new AVIDec(); hr = m_FilterGraph.AddFilter(pAVIDecompressor, "AVI Decompressor"); DsError.ThrowExceptionForHR(hr); // IBaseFilter baseGrabFlt = (IBaseFilter)sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); SetConfigParms(capGraph, capFilter, media); hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, pAVIDecompressor, baseGrabFlt); if (hr < 0) { hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt); } DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
// Set the Framerate, and video size private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int iSampleRate, int iChannels) { int hr; object o; AMMediaType media; // Find the stream config interface hr = capGraph.FindInterface( PinCategory.Capture, MediaType.Audio, capFilter, typeof(IAMStreamConfig).GUID, out o); IAMStreamConfig audioStreamConfig = o as IAMStreamConfig; if (audioStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } // Get the existing format block hr = audioStreamConfig.GetFormat(out media); DsError.ThrowExceptionForHR(hr); // copy out the videoinfoheader WaveFormatEx i = new WaveFormatEx(); Marshal.PtrToStructure(media.formatPtr, i); i.wFormatTag = 0x0001; // WAVE_FORMAT_PCM i.wBitsPerSample = 16; i.nSamplesPerSec = 44100; i.nChannels = m_Channels; i.nBlockAlign = 2; i.nAvgBytesPerSec = (i.nSamplesPerSec * i.nBlockAlign); i.cbSize = 0; // if overriding the framerate, set the frame rate if (iSampleRate > 0) { i.nSamplesPerSec = iSampleRate; } // if overriding the width, set the width if (iChannels > 0) { i.nChannels = (short)iChannels; } // Copy the media structure back Marshal.StructureToPtr(i, media.formatPtr, false); // Set the new format hr = audioStreamConfig.SetFormat(media); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; }
/// <summary> /// WMV 形式のファイルを読み込む処理 /// </summary> /// <remarks> /// RenderStream (NULL, MEDIATYPE_Video, source, videoGrabber, renderner)<br/> /// RenderStream (NULL, MEDIATYPE_Audio, source, audioGrabber, renderner)<br/> /// <pre> /// source grabber mux renderner /// +--------+ +---------+ +-------+ +-------+ /// | audio 0 ----0 audio 0 --- 1 0 --- 0 | /// | | +---------+ | | +-------+ /// | | | | /// | | +---------+ | | /// | video 1 --- 0 video 0 --- 0 | /// +--------+ +---------+ | | /// 2 | /// +-------+ /// </pre> /// </remarks> public static void Sample31() { string __FUNCTION__ = MethodBase.GetCurrentMethod().Name; Console.WriteLine(__FUNCTION__); IGraphBuilder graph = null; ICaptureGraphBuilder2 builder = null; IBaseFilter videoSource = null; IBaseFilter videoGrabber = null; IBaseFilter audioGrabber = null; IBaseFilter videoRenderer = null; IBaseFilter audioRenderer = null; var videoGrabberCB = new CxSampleGrabberCB(); var audioGrabberCB = new CxSampleGrabberCB(); string src_filename = Path.Combine(TestFiles, "stopwatch_320x240.wmv"); try { #region グラフビルダーの生成: { graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph); if (graph == null) { throw new System.IO.IOException("Failed to create a GraphBuilder."); } builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2); if (builder == null) { throw new System.IO.IOException("Failed to create a GraphBuilder."); } builder.SetFiltergraph(graph); } #endregion #region 像入力用: ソースフィルタを生成します. { #if true graph.AddSourceFilter(src_filename, "VideoSource", ref videoSource); if (videoSource == null) { throw new System.IO.IOException("Failed to create a videoSource."); } #else videoSource = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_WMAsfReader); if (videoSource == null) { throw new System.IO.IOException("Failed to create a videoSource."); } graph.AddFilter(videoSource, "VideoSource"); // Configure the file source filter. var pConfig = (IFileSourceFilter)videoSource; { HRESULT hr = (HRESULT)pConfig.Load(src_filename, IntPtr.Zero); if (hr < HRESULT.S_OK) { throw new System.IO.IOException("Failed to set the src_filename."); } } #endif } #endregion #region 像捕獲用: サンプルグラバーを生成します. { videoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (videoGrabber == null) { throw new System.IO.IOException("Failed to create a videoGrabber."); } graph.AddFilter(videoGrabber, "videoGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)videoGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Video); mt.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24); mt.formattype = new Guid(GUID.FORMAT_VideoInfo); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(videoGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(videoGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 音声捕獲用: サンプルグラバーを生成します. { audioGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (audioGrabber == null) { throw new System.IO.IOException("Failed to create a audioGrabber."); } graph.AddFilter(audioGrabber, "audioGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)audioGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Audio); mt.subtype = new Guid(GUID.MEDIASUBTYPE_PCM); mt.formattype = new Guid(GUID.FORMAT_WaveFormatEx); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(audioGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(audioGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 像出力用: レンダラーを生成します. { videoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (videoRenderer == null) { throw new System.IO.IOException("Failed to create a videoRenderer."); } graph.AddFilter(videoRenderer, "videoRenderer"); } #endregion #region 音声出力用: レンダラーを生成します. { audioRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (audioRenderer == null) { throw new System.IO.IOException("Failed to create a audioRenderer."); } graph.AddFilter(audioRenderer, "audioRenderer"); } #endregion #region フィルタの接続: unsafe { HRESULT hr; // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), videoSource, videoGrabber, videoRenderer); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } // フィルタの接続: (音声入力) var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), videoSource, audioGrabber, audioRenderer); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } } #endregion #region DEBUG: GraphEdit ファイルを保存します. /* * 現在のフィルタ構成を指定されたファイル(GRF 拡張子)に保存します。 * 保存されたファイルは graphedt.exe (Windws SDK 同梱) で確認できます。 */ try { Axi.SaveGraphFile(graph, Path.GetFullPath(__FUNCTION__ + ".GRF")); } catch (System.Exception ex) { Console.WriteLine(ex.StackTrace); } #endregion // ------------------------------ #region 取り込み処理: { var mediaControl = (IMediaControl)graph; var mediaEvent = (IMediaEvent)graph; var mediaSeeking = (IMediaSeeking)graph; // 映像サイズの取得. var vih = Axi.GetVideoInfo((ISampleGrabber)videoGrabber); var images = new List <Bitmap>(); var watch = new Stopwatch(); watch.Start(); // 取り込み処理. videoGrabberCB.Notify += delegate(object _sender, CxSampleGrabberEventArgs _e) { Console.WriteLine("{0}: SampleTime={1:F6}", images.Count, _e.SampleTime); images.Add(_e.ToImage(vih)); }; // 再生. Console.WriteLine("Run ..."); { HRESULT hr; int state; hr = (HRESULT)mediaControl.Run(); hr = (HRESULT)mediaControl.GetState(1000, out state); } Console.WriteLine("Running ... {0:F3} msec", watch.Elapsed.TotalMilliseconds); // 再生が完了するまで待機する. { HRESULT hr; int code; hr = (HRESULT)mediaEvent.WaitForCompletion(-1, out code); hr = (HRESULT)mediaControl.Stop(); } // 確認用: Console.WriteLine("Save ... {0:F3} msec", watch.Elapsed.TotalMilliseconds); { string subdir = Path.Combine(Results, __FUNCTION__); if (Directory.Exists(subdir) == false) { Directory.CreateDirectory(subdir); } for (int i = 0; i < images.Count; i++) { var filename = string.Format("image{0}.png", i); images[i].Save(Path.Combine(subdir, filename)); } } Console.WriteLine("Completed. {0:F3} msec", watch.Elapsed.TotalMilliseconds); } #endregion } catch (System.Exception ex) { Console.WriteLine("{0}", ex.StackTrace); } finally { #region 解放: if (videoSource != null) { Marshal.ReleaseComObject(videoSource); } videoSource = null; if (videoGrabber != null) { Marshal.ReleaseComObject(videoGrabber); } videoGrabber = null; if (audioGrabber != null) { Marshal.ReleaseComObject(audioGrabber); } audioGrabber = null; if (videoRenderer != null) { Marshal.ReleaseComObject(videoRenderer); } videoRenderer = null; if (audioRenderer != null) { Marshal.ReleaseComObject(audioRenderer); } audioRenderer = null; if (builder != null) { Marshal.ReleaseComObject(builder); } builder = null; if (graph != null) { Marshal.ReleaseComObject(graph); } graph = null; #endregion } }
private void StartCapture() { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; if (System.IO.File.Exists(txtAviFileName.Text)) { // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the video source hr = m_FilterGraph.AddSourceFilter(txtAviFileName.Text, "File Source (Async.)", out capFilter); DsError.ThrowExceptionForHR(hr); //add AVI Decompressor IBaseFilter pAVIDecompressor = (IBaseFilter) new AVIDec(); hr = m_FilterGraph.AddFilter(pAVIDecompressor, "AVI Decompressor"); DsError.ThrowExceptionForHR(hr); // IBaseFilter baseGrabFlt = (IBaseFilter)sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); IBaseFilter vidrender = (IBaseFilter) new VideoRenderer(); hr = m_FilterGraph.AddFilter(vidrender, "Render"); DsError.ThrowExceptionForHR(hr); IPin captpin = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); IPin samppin = DsFindPin.ByName(baseGrabFlt, "Input"); hr = m_FilterGraph.Connect(captpin, samppin); DsError.ThrowExceptionForHR(hr); FileWriter filewritter = new FileWriter(); IFileSinkFilter filemux = (IFileSinkFilter)filewritter; //filemux.SetFileName("test.avi",); //hr = capGraph.RenderStream(null, MediaType.Video, capFilter, null, vidrender); // DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); // setup buffer if (m_handle == IntPtr.Zero) { m_handle = Marshal.AllocCoTaskMem(m_stride * m_videoHeight); } // tell the callback to ignore new images m_PictureReady = new ManualResetEvent(false); m_bGotOne = false; m_bRunning = false; timer1 = new Thread(timer); timer1.IsBackground = true; timer1.Start(); m_mediaextseek = m_FilterGraph as IAMExtendedSeeking; m_mediapos = m_FilterGraph as IMediaPosition; m_mediaseek = m_FilterGraph as IMediaSeeking; double length = 0; m_mediapos.get_Duration(out length); trackBar_mediapos.Minimum = 0; trackBar_mediapos.Maximum = (int)length; Start(); } else { MessageBox.Show("File does not exist"); } }
/// <summary> /// Sets the capture parameters for the video capture device /// </summary> private bool SetVideoCaptureParameters(ICaptureGraphBuilder2 capGraph, IBaseFilter captureFilter, Guid mediaSubType) { /* The stream config interface */ object streamConfig; /* Get the stream's configuration interface */ int hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, captureFilter, typeof(IAMStreamConfig).GUID, out streamConfig); DsError.ThrowExceptionForHR(hr); var videoStreamConfig = streamConfig as IAMStreamConfig; /* If QueryInterface fails... */ if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } /* The media type of the video */ AMMediaType media; /* Get the AMMediaType for the video out pin */ hr = videoStreamConfig.GetFormat(out media); DsError.ThrowExceptionForHR(hr); /* Make the VIDEOINFOHEADER 'readable' */ var videoInfo = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, videoInfo); /* Setup the VIDEOINFOHEADER with the parameters we want */ videoInfo.AvgTimePerFrame = DSHOW_ONE_SECOND_UNIT / FPS; videoInfo.BmiHeader.Width = DesiredWidth; videoInfo.BmiHeader.Height = DesiredHeight; if (mediaSubType != Guid.Empty) { videoInfo.BmiHeader.Compression = new FourCC(mediaSubType).ToInt32(); media.subType = mediaSubType; } /* Copy the data back to unmanaged memory */ Marshal.StructureToPtr(videoInfo, media.formatPtr, false); /* Set the format */ hr = videoStreamConfig.SetFormat(media); /* We don't want any memory leaks, do we? */ DsUtils.FreeAMMediaType(media); if (hr < 0) { return(false); } return(true); }
protected virtual void Dispose(bool disposing) { if (disposing) { // get rid of managed resources } // get rid of unmanaged resources lock (instanceMutex) { //remove event handlers TelemetryUpdate = null; PropertyChanged = null; Complete = null; //detach from HWND DetachControl(); if (_netSrc != null) { Marshal.FinalReleaseComObject(_netSrc); _netSrc = null; } if (_decoderFilter != null) { Marshal.FinalReleaseComObject(_decoderFilter); _decoderFilter = null; } if (_telemetryTimer != null) { _telemetryTimer.Dispose(); _telemetryTimer = null; } if (_windowlessControl != null) { Marshal.FinalReleaseComObject(_windowlessControl); _windowlessControl = null; } if (_videoRender != null) { Marshal.FinalReleaseComObject(_videoRender); _videoRender = null; } //recording graph Disposal if (_infPinTee != null) { Marshal.FinalReleaseComObject(_infPinTee); _infPinTee = null; } if (_fileSink != null) { Marshal.FinalReleaseComObject(_fileSink); _fileSink = null; } if (_bridgeSink != null) { Marshal.FinalReleaseComObject(_bridgeSink); _bridgeSink = null; } if (_bridgeSource != null) { Marshal.FinalReleaseComObject(_bridgeSource); _bridgeSource = null; } if (_bridgeController != null) { Marshal.FinalReleaseComObject(_bridgeController); _bridgeController = null; } if (_fileSinkMediaControl != null) { Marshal.FinalReleaseComObject(_fileSinkMediaControl); _fileSinkMediaControl = null; } if (_fileSinkCaptureGraphBuilder != null) { Marshal.FinalReleaseComObject(_fileSinkCaptureGraphBuilder); _fileSinkCaptureGraphBuilder = null; } if (_fileSinkGraphBuilder != null) { Marshal.FinalReleaseComObject(_fileSinkGraphBuilder); _fileSinkGraphBuilder = null; } if (_fileSinkFilter != null) { Marshal.FinalReleaseComObject(_fileSinkFilter); _fileSinkFilter = null; } if (_muxer != null) { Marshal.FinalReleaseComObject(_muxer); _muxer = null; } if (_captureGraphBuilder != null) { Marshal.FinalReleaseComObject(_captureGraphBuilder); _captureGraphBuilder = null; } if (_graphBuilder != null) { Marshal.FinalReleaseComObject(_graphBuilder); //TODO race on RCW cleanup during app quit _graphBuilder = null; } } }
/// <summary> /// Completely tear down a filter graph and /// release all associated resources. /// </summary> void DestroyGraph() { // Derender the graph (This will stop the graph // and release preview window. It also destroys // half of the graph which is unnecessary but // harmless here.) (ignore errors) try { DerenderGraph(); } catch { } // Update the state after derender because it // depends on correct status. But we also want to // update the state as early as possible in case // of error. _actualGraphState = GraphState.Null; _isPreviewRendered = false; // Remove filters from the graph // This should be unnecessary but the Nvidia WDM // video driver cannot be used by this application // again unless we remove it. Ideally, we should // simply enumerate all the filters in the graph // and remove them. (ignore errors) if (_graphBuilder != null) { if (_videoCompressorFilter != null) { _graphBuilder.RemoveFilter(_videoCompressorFilter); } if (_videoDeviceFilter != null) { _graphBuilder.RemoveFilter(_videoDeviceFilter); } // Cleanup Marshal.ReleaseComObject(_graphBuilder); _graphBuilder = null; } if (_captureGraphBuilder != null) { Marshal.ReleaseComObject(_captureGraphBuilder); _captureGraphBuilder = null; } if (_videoDeviceFilter != null) { Marshal.ReleaseComObject(_videoDeviceFilter); _videoDeviceFilter = null; } if (_videoCompressorFilter != null) { Marshal.ReleaseComObject(_videoCompressorFilter); _videoCompressorFilter = null; } // These are copies of graphBuilder _mediaControl = null; _videoWindow = null; // For unmanaged objects we haven't released explicitly GC.Collect(); }
private void Init() { graphBuilder = (IGraphBuilder)new FilterGraph(); //Create the media control for controlling the graph mediaControl = (IMediaControl)graphBuilder; mediaEvent = (IMediaEvent)graphBuilder; volume = 100; captureGraphBuilder = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); // initialize the Capture Graph Builder int hr = captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); DeinterlaceLayoutList = new DeinterlaceList(); DeinterlaceLayoutList.Add(new DeInterlaceAlparyLayout(this)); DeinterlaceLayoutList.Add(new DeInterlaceDscalerLayout(this)); DeinterlaceLayoutList.Add(new DeInterlaceFFDShowLayout(this)); aspectRatio = 4.0f / 3.0f; }
public void BuildGraph() { ICaptureGraphBuilder2 captureGraphBuilder2 = (ICaptureGraphBuilder2)null; IBaseFilter ppFilter = (IBaseFilter)null; ISampleGrabber sampleGrabber = (ISampleGrabber)null; List <DeviceEnumerator> deviceEnumeratorList = (List <DeviceEnumerator>)null; try { Logger.Info("Creating List of devices"); deviceEnumeratorList = DeviceEnumerator.ListDevices(Guids.VideoInputDeviceCategory); } catch (Exception ex) { Logger.Error("Exception in finding Video device. Err : {0}", (object)ex.ToString()); } if (deviceEnumeratorList != null) { if (deviceEnumeratorList.Count != 0) { try { Logger.Info("found {0} Camera, Opening {1}", (object)deviceEnumeratorList.Count, (object)this.m_Unit); DeviceEnumerator deviceEnumerator = this.m_Unit >= deviceEnumeratorList.Count ? deviceEnumeratorList[0] : deviceEnumeratorList[this.m_Unit]; this.m_FilterGraph = (IFilterGraph2) new FilterGraph(); this.m_mediaCtrl = this.m_FilterGraph as IMediaControl; captureGraphBuilder2 = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); sampleGrabber = (ISampleGrabber) new SampleGrabber(); ErrorHandler errorHandler1 = (ErrorHandler)captureGraphBuilder2.SetFiltergraph((IGraphBuilder)this.m_FilterGraph); if (errorHandler1.GetError() != 0) { Logger.Error("SetFiltergraph failed with {0:X}..", (object)errorHandler1.GetError()); } ErrorHandler errorHandler2 = (ErrorHandler)this.m_FilterGraph.AddSourceFilterForMoniker(deviceEnumerator.Moniker, (IBindCtx)null, "Video input", out ppFilter); if (errorHandler2.GetError() != 0) { Logger.Error("AddSourceFilterForMoniker failed with {0:X}", (object)errorHandler2.GetError()); } AMMediaType pmt = new AMMediaType() { majorType = Guids.MediaTypeVideo }; if (this.m_color == SupportedColorFormat.YUV2) { pmt.subType = Guids.MediaSubtypeYUY2; } else { if (this.m_color != SupportedColorFormat.RGB24) { throw new Exception("Unsupported color format"); } pmt.subType = Guids.MediaSubtypeRGB24; } pmt.formatType = Guids.FormatTypesVideoInfo; ErrorHandler errorHandler3 = (ErrorHandler)sampleGrabber.SetMediaType(pmt); this.FreeAMMedia(pmt); ErrorHandler errorHandler4 = (ErrorHandler)sampleGrabber.SetCallback((ISampleGrabberCB)this, 1); if (errorHandler4.GetError() != 0) { Logger.Error("Grabber setcallback failed with {0:X}", (object)errorHandler4.GetError()); } IBaseFilter baseFilter = (IBaseFilter)sampleGrabber; ErrorHandler errorHandler5 = (ErrorHandler)this.m_FilterGraph.AddFilter(baseFilter, "FrameGrabber"); if (errorHandler5.GetError() != 0) { Logger.Error("AddFilter failed with {0:X}", (object)errorHandler5.GetError()); } object ppint; ErrorHandler errorHandler6 = (ErrorHandler)captureGraphBuilder2.FindInterface(Guids.PinCategoryCapture, Guids.MediaTypeVideo, ppFilter, typeof(IAMStreamConfig).GUID, out ppint); if (errorHandler6.GetError() != 0) { Logger.Error("FindInterface failed with {0:X}", (object)errorHandler6.GetError()); } if (!(ppint is IAMStreamConfig amStreamConfig)) { throw new Exception("Stream config Error"); } errorHandler3 = (ErrorHandler)amStreamConfig.GetFormat(out pmt); VideoInfoHeader videoInfoHeader = new VideoInfoHeader(); Marshal.PtrToStructure(pmt.pbFormat, (object)videoInfoHeader); videoInfoHeader.AvgTimePerFrame = (long)(10000000 / this.m_FrameRate); videoInfoHeader.BmiHeader.Width = this.m_Width; videoInfoHeader.BmiHeader.Height = this.m_Height; Marshal.StructureToPtr((object)videoInfoHeader, pmt.pbFormat, false); ErrorHandler errorHandler7 = (ErrorHandler)amStreamConfig.SetFormat(pmt); if (errorHandler7.GetError() != 0) { Logger.Error("conf.setformat failed with {0:X}", (object)errorHandler7.GetError()); } this.FreeAMMedia(pmt); ErrorHandler errorHandler8 = (ErrorHandler)captureGraphBuilder2.RenderStream(Guids.PinCategoryCapture, Guids.MediaTypeVideo, (object)ppFilter, (IBaseFilter)null, baseFilter); if (errorHandler8.GetError() != 0) { Logger.Error("RenderStream failed with {0:X}", (object)errorHandler8.GetError()); } AMMediaType amMediaType = new AMMediaType(); errorHandler3 = (ErrorHandler)sampleGrabber.GetConnectedMediaType(amMediaType); if (amMediaType.formatType != Guids.FormatTypesVideoInfo) { throw new ColorFormatNotSupported("Not able to connect to Video Media"); } if (amMediaType.pbFormat == IntPtr.Zero) { throw new Exception("Format Array is null"); } VideoInfoHeader structure = (VideoInfoHeader)Marshal.PtrToStructure(amMediaType.pbFormat, typeof(VideoInfoHeader)); this.m_Width = structure.BmiHeader.Width; this.m_Height = structure.BmiHeader.Height; this.m_Stride = this.m_Width * ((int)structure.BmiHeader.BitCount / 8); if (this.m_Buffer == IntPtr.Zero) { this.m_Buffer = Marshal.AllocCoTaskMem(this.m_Stride * this.m_Height); } this.FreeAMMedia(amMediaType); return; } catch { throw; } finally { if (ppFilter != null) { Marshal.ReleaseComObject((object)ppFilter); } if (sampleGrabber != null) { Marshal.ReleaseComObject((object)sampleGrabber); } if (captureGraphBuilder2 != null) { Marshal.ReleaseComObject((object)captureGraphBuilder2); } } } } Logger.Info("CAMERA: Could not find a camera device!"); }
private void InitTuner(ICaptureGraphBuilder2 captureGraphBuilder) { Object o; var hr = captureGraphBuilder.FindInterface(null, null, videoDevice, typeof(IAMTVTuner).GUID, out o); if (hr >= 0) { tuner = (IAMTVTuner)o; //tuner.put_Mode(AMTunerModeType.TV); o = null; //find crossbar var list = findCrossbars(captureGraphBuilder, (IBaseFilter)tuner); /*hr = captureGraphBuilder.FindInterface(null, null, (IBaseFilter)Tuner, typeof(IAMCrossbar).GUID, out o); if (hr >= 0) { crossbar = (IAMCrossbar)o; InitCrossbar(); } else crossbar = null; */ if (list.Count > 0) { crossbar = (IAMCrossbar)list[0]; InitCrossbar(); } o = null; // find amtvaudio hr = captureGraphBuilder.FindInterface(null, null, videoDevice, typeof(IAMTVAudio).GUID, out o); if (hr >= 0) { TVAudio = (IAMTVAudio)o; } o = null; // find IAMAnalogVideoDecoder hr = captureGraphBuilder.FindInterface(null, null, videoDevice, typeof(IAMAnalogVideoDecoder).GUID, out o); if (hr >= 0) { analogVideoDecoder = (o as IAMAnalogVideoDecoder); AnalogVideoStandard avs; analogVideoDecoder.get_TVFormat(out avs); } o = null; } else tuner = null; }
// ------------------ Public Methods -------------------- /// <summary> Populate the collection by looking for commonly implemented property pages. </summary> protected void addFromGraph( ICaptureGraphBuilder2 graphBuilder, IBaseFilter videoDeviceFilter, IBaseFilter audioDeviceFilter, IBaseFilter videoCompressorFilter, IBaseFilter audioCompressorFilter, SourceCollection videoSources, SourceCollection audioSources) { object filter = null; Guid cat; Guid med; Guid iid; int hr; Trace.Assert(graphBuilder != null); // 1. the video capture filter addIfSupported(videoDeviceFilter, "Video Capture Device"); // 2. the video capture pin cat = PinCategory.Capture; med = MediaType.Interleaved; iid = typeof(IAMStreamConfig).GUID; hr = graphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out filter); if (hr != 0) { med = MediaType.Video; hr = graphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out filter); if (hr != 0) { filter = null; } } addIfSupported(filter, "Video Capture Pin"); // 3. the video preview pin cat = PinCategory.Preview; med = MediaType.Interleaved; iid = typeof(IAMStreamConfig).GUID; hr = graphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out filter); if (hr != 0) { med = MediaType.Video; hr = graphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out filter); if (hr != 0) { filter = null; } } addIfSupported(filter, "Video Preview Pin"); // 4. the video crossbar(s) ArrayList crossbars = new ArrayList(); int num = 1; for (int c = 0; c < videoSources.Count; c++) { CrossbarSource s = videoSources[c] as CrossbarSource; if (s != null) { if (crossbars.IndexOf(s.Crossbar) < 0) { crossbars.Add(s.Crossbar); if (addIfSupported(s.Crossbar, "Video Crossbar " + (num == 1 ? "" : num.ToString()))) { num++; } } } } crossbars.Clear(); // 5. the video compressor addIfSupported(videoCompressorFilter, "Video Compressor"); // 6. the video TV tuner cat = PinCategory.Capture; med = MediaType.Interleaved; iid = typeof(IAMTVTuner).GUID; hr = graphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out filter); if (hr != 0) { med = MediaType.Video; hr = graphBuilder.FindInterface( ref cat, ref med, videoDeviceFilter, ref iid, out filter); if (hr != 0) { filter = null; } } addIfSupported(filter, "TV Tuner"); // 7. the video compressor (VFW) IAMVfwCompressDialogs compressDialog = videoCompressorFilter as IAMVfwCompressDialogs; if (compressDialog != null) { VfwCompressorPropertyPage page = new VfwCompressorPropertyPage("Video Compressor", compressDialog); InnerList.Add(page); } // 8. the audio capture filter addIfSupported(audioDeviceFilter, "Audio Capture Device"); // 9. the audio capture pin cat = PinCategory.Capture; med = MediaType.Audio; iid = typeof(IAMStreamConfig).GUID; hr = graphBuilder.FindInterface( ref cat, ref med, audioDeviceFilter, ref iid, out filter); if (hr != 0) { filter = null; } addIfSupported(filter, "Audio Capture Pin"); // 9. the audio preview pin cat = PinCategory.Preview; med = MediaType.Audio; iid = typeof(IAMStreamConfig).GUID; hr = graphBuilder.FindInterface( ref cat, ref med, audioDeviceFilter, ref iid, out filter); if (hr != 0) { filter = null; } addIfSupported(filter, "Audio Preview Pin"); // 10. the audio crossbar(s) num = 1; for (int c = 0; c < audioSources.Count; c++) { CrossbarSource s = audioSources[c] as CrossbarSource; if (s != null) { if (crossbars.IndexOf(s.Crossbar) < 0) { crossbars.Add(s.Crossbar); if (addIfSupported(s.Crossbar, "Audio Crossbar " + (num == 1 ? "" : num.ToString()))) { num++; } } } } crossbars.Clear(); // 11. the audio compressor addIfSupported(audioCompressorFilter, "Audio Compressor"); }
private void InitCaptureInterface() { // release com object (useless here but can't hurt) Cleanup(true); this.fmc = new FilgraphManagerClass(); // create the cg object and add the filter graph to it Type t = Type.GetTypeFromCLSID(CLSID_CaptureGraphBuilder2); this.icgb = (ICaptureGraphBuilder2)Activator.CreateInstance(t); t = Type.GetTypeFromCLSID(CLSID_SampleGrabber); this.isg = (ISampleGrabber)Activator.CreateInstance(t); // source filter (the capture device) this.sf = (IBaseFilter)this.SourceFilterList[this.cbxDevice.SelectedIndex]; // sample grabber filter this.sgf = (IBaseFilter)this.isg; object o = null; this.icgb.RemoteFindInterface(ref PIN_CATEGORY_CAPTURE, ref MEDIATYPE_Video, sf, ref IID_IAMStreamConfig, out o); this.iamsc = (IAMStreamConfig)o; // set sample grabber media type this.SGMediaType = new _AMMediaType(); this.SGMediaType.majortype = MEDIATYPE_Video; this.SGMediaType.subtype = MEDIASUBTYPE_RGB24; this.SGMediaType.formattype = FORMAT_VideoInfo; this.isg.SetMediaType(ref SGMediaType); this.isg.SetOneShot(0); this.isg.SetBufferSamples(1); }
public void Init(Hashtable config = null) { //m_FilterGraph = (IFilterGraph2)new FilterGraph(); m_FilterGraph = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(Clsid.FilterGraph, true)); // Get the ICaptureGraphBuilder2 Guid clsid = Clsid.CaptureGraphBuilder2; Guid riid = typeof(ICaptureGraphBuilder2).GUID; ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2)DsBugWO.CreateDsInstance(ref clsid, ref riid); IBaseFilter capVideoFilter = null; IBaseFilter capAudioFilter = null; IBaseFilter asfWriter = null; IServiceProvider serviceProvider = null; int hr; object iwmWriter2; try { // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); Marshal.ThrowExceptionForHR(hr); // Add the video device to the graph if (videoDevChosen != null) { capVideoFilter = GetCapFilter(ref videoDevChosen); hr = m_FilterGraph.AddFilter(capVideoFilter, "Video Capture Device"); Marshal.ThrowExceptionForHR(hr); } // Add the audio device to the graph if (audioDevChosen != null) { capAudioFilter = GetCapFilter(ref audioDevChosen); hr = m_FilterGraph.AddFilter(capAudioFilter, "Audio Capture Device"); Marshal.ThrowExceptionForHR(hr); } // if we need some shitty quality if (config.Contains("shitty")) { InitAsfWriter(out asfWriter, true); } else { InitAsfWriter(out asfWriter); } //GEtting IWMAdvancedWriter2; serviceProvider = (IServiceProvider)asfWriter; Guid IID_IWMWriterAdvanced2 = new Guid("{962dc1ec-c046-4db8-9cc7-26ceae500817}"); hr = serviceProvider.QueryService(IID_IWMWriterAdvanced2, IID_IWMWriterAdvanced2, out iwmWriter2); Marshal.ThrowExceptionForHR(hr); m_writerAdvanced2 = (IWMWriterAdvanced2)iwmWriter2; m_writerAdvanced2.SetLiveSource(true); if (config.ContainsKey("cap")) { outputFilename = config["cap"] as string; Console.WriteLine("[MODE] Capturing to a local file: {0}", outputFilename); } IFileSinkFilter cap = (IFileSinkFilter)asfWriter; cap.SetFileName(outputFilename, null); if (!config.ContainsKey("cap")) { //deleting useless sink (writer to a file on a disk). IWMWriterSink uselessSink = null; m_writerAdvanced2.GetSink(0, out uselessSink); m_writerAdvanced2.RemoveSink(uselessSink); if (uselessSink != null) { Marshal.ReleaseComObject(uselessSink); uselessSink = null; } } if (config.Contains("send")) { string url = config["send"] as string; Console.WriteLine("[MODE] Streaming to a remote server: {0}", url); WriterNetworkSink sender = new WriterNetworkSink(url); m_writerAdvanced2.AddSink(sender); } if (config.Contains("share")) { int port = (int)config["share"]; WriterNetworkSink listener = new WriterNetworkSink(port); Console.WriteLine("[MODE] Started listening on port {0}", port); m_writerAdvanced2.AddSink(listener); } //Connecting VideoDev to asfWriter if (videoDevChosen != null) { hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capVideoFilter, null, asfWriter); //hr = capGraph.RenderStream(null, null, capVideoFilter, null, asfWriter); Marshal.ThrowExceptionForHR(hr); } //Connecting AudioDev to asfWriter if (audioDevChosen != null) { hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Audio, capAudioFilter, null, asfWriter); //hr = capGraph.RenderStream(null, null, capAudioFilter, null, asfWriter); Marshal.ThrowExceptionForHR(hr); } m_mediaCtrl = m_FilterGraph as IMediaControl; //debug, dumps graph //DirectShowLib.Utils.FilterGraphTools.SaveGraphFile(m_FilterGraph, ".\\mygraph.grf"); } finally { if (capVideoFilter != null) { Marshal.ReleaseComObject(capVideoFilter); capVideoFilter = null; } if (capAudioFilter != null) { Marshal.ReleaseComObject(capAudioFilter); capAudioFilter = null; } if (asfWriter != null) { Marshal.ReleaseComObject(asfWriter); asfWriter = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } if (serviceProvider != null) { Marshal.ReleaseComObject(serviceProvider); serviceProvider = null; } } Console.WriteLine("INIT done"); }
public void CloseInterfaces() { // Stop previewing data if (this.mediaControl != null) this.mediaControl.StopWhenReady(); this.currentState = PlayState.Stopped; // Stop receiving events if (this.mediaEventEx != null) this.mediaEventEx.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero); // Relinquish ownership (IMPORTANT!) of the video window. // Failing to call put_Owner can lead to assert failures within // the video renderer, as it still assumes that it has a valid // parent window. if(this.videoWindow != null) { this.videoWindow.put_Visible(OABool.False); this.videoWindow.put_Owner(IntPtr.Zero); } // Remove filter graph from the running object table if (rot != null) { rot.Dispose(); rot = null; } // Release DirectShow interfaces Marshal.ReleaseComObject(this.mediaControl); this.mediaControl = null; Marshal.ReleaseComObject(this.mediaEventEx); this.mediaEventEx = null; Marshal.ReleaseComObject(this.videoWindow); this.videoWindow = null; Marshal.ReleaseComObject(this.graphBuilder); this.graphBuilder = null; Marshal.ReleaseComObject(this.captureGraphBuilder); this.captureGraphBuilder = null; }
/// <summary> /// 获取指定摄像头支持的分辨率 /// </summary> /// <param name="CameraName">摄像头的名称</param> /// <returns></returns> public static List <string> GetCameraSupportResolution(string CameraName) { DsDevice[] dsVideoDevice = GetAllVideoDevice(); IBaseFilter theCamera = TestManager.CreateFilter(dsVideoDevice, CameraName); IFilterGraph2 graphBuilder = (IFilterGraph2) new FilterGraph(); // 获取IFilterGraph2接口对象 ICaptureGraphBuilder2 captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); //获取ICaptureGraphBuilder2接口对象 int hr = captureGraphBuilder.SetFiltergraph(graphBuilder); //将过滤器图形附加到捕获图 DsError.ThrowExceptionForHR(hr); //将视频输入设备添加到图形 hr = graphBuilder.AddFilter(theCamera, "source filter"); DsError.ThrowExceptionForHR(hr); //AMMediaType mediaType = new AMMediaType(); //IntPtr pmt = IntPtr.Zero; //object oVideoStreamConfig;//视频流配置信息 //hr = captureGraphBuilder.FindInterface(PinCategory.Capture, MediaType.Video, theCamera, typeof(IAMStreamConfig).GUID, out oVideoStreamConfig); //if (!(oVideoStreamConfig is IAMStreamConfig videoStreamConfig)) //{ // throw new Exception("Failed to get IAMStreamConfig"); //} List <string> AvailableResolutions = new List <string>(); object streamConfig; // 获取配置接口 hr = captureGraphBuilder.FindInterface(PinCategory.Capture, MediaType.Video, theCamera, typeof(IAMStreamConfig).GUID, out streamConfig); Marshal.ThrowExceptionForHR(hr); var videoStreamConfig = streamConfig as IAMStreamConfig; if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } var videoInfo = new VideoInfoHeader(); int iCount; int iSize; int bitCount = 0; //IAMStreamConfig::GetNumberOfCapabilities获得设备所支持的媒体类型的数量。这个方法返回两个值,一个是媒体类型的数量,二是属性所需结构的大小。 hr = videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize); Marshal.ThrowExceptionForHR(hr); IntPtr TaskMemPointer = Marshal.AllocCoTaskMem(iSize); AMMediaType pmtConfig = null; for (int iFormat = 0; iFormat < iCount; iFormat++) { IntPtr ptr = IntPtr.Zero; //通过函数IAMStreamConfig::GetStreamCaps来枚举媒体类型,要给这个函数传递一个序号作为参数,这个函数返回媒体类型和相应的属性结构体 videoStreamConfig.GetStreamCaps(iFormat, out ptr, TaskMemPointer); pmtConfig = (AMMediaType)Marshal.PtrToStructure(ptr, typeof(AMMediaType)); videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(pmtConfig.formatPtr, typeof(VideoInfoHeader)); if (videoInfo.BmiHeader.Size != 0 && videoInfo.BmiHeader.BitCount != 0) { if (videoInfo.BmiHeader.BitCount > bitCount) { AvailableResolutions.Clear(); bitCount = videoInfo.BmiHeader.BitCount; } AvailableResolutions.Add(videoInfo.BmiHeader.Width + "*" + videoInfo.BmiHeader.Height); } } return(AvailableResolutions); }
// -------------------- Protected Methods ----------------------- /// <summary> Populate the collection from a filter graph. </summary> protected void addFromGraph( ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter, bool isVideoDevice ) { Trace.Assert( graphBuilder != null ); ArrayList crossbars = findCrossbars( graphBuilder, deviceFilter ); foreach ( IAMCrossbar crossbar in crossbars ) { ArrayList sources = findCrossbarSources( graphBuilder, crossbar, isVideoDevice ); InnerList.AddRange( sources ); } if ( !isVideoDevice ) { if ( InnerList.Count == 0 ) { ArrayList sources = findAudioSources( graphBuilder, deviceFilter ); InnerList.AddRange( sources ); } } }
/// <summary> /// Build the filter graph /// </summary> /// <param name="hWin">Window to draw into</param> private void SetupGraph(string sFileName) { int hr; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; // Get a ICaptureGraphBuilder2 to help build the graph ICaptureGraphBuilder2 icgb2 = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); try { // Link the ICaptureGraphBuilder2 to the IFilterGraph2 hr = icgb2.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); #if DEBUG // Allows you to view the graph with GraphEdit File/Connect m_DsRot = new DsROTEntry(m_FilterGraph); #endif // Our graph source filter IBaseFilter ipsb = (IBaseFilter) new GenericSamplePullFilter(); try { // Get the output pin from the filter so we can configure it IPin gssfOut = DsFindPin.ByDirection(ipsb, PinDirection.Output, 0); // Configure the pin's media type and callback string sExt = Path.GetExtension(sFileName); ConfigurePuller((IGenericPullConfig)gssfOut, sExt); // Configure the file name ConfigureRdr((ISetFileName)gssfOut, sFileName); // Free the pin Marshal.ReleaseComObject(gssfOut); // Add the filter to the graph hr = m_FilterGraph.AddFilter(ipsb, "GenericSamplePullFilter"); Marshal.ThrowExceptionForHR(hr); // Build the rest of the graph, outputting to the default renderer hr = icgb2.RenderStream(null, null, ipsb, null, null); Marshal.ThrowExceptionForHR(hr); // Connect any audio pin hr = icgb2.RenderStream(null, MediaType.Audio, ipsb, null, null); //Marshal.ThrowExceptionForHR( hr ); // Blindly assume any errors are due to no audio pin } finally { Marshal.ReleaseComObject(ipsb); } // Grab some other interfaces m_mediaCtrl = m_FilterGraph as IMediaControl; } finally { Marshal.ReleaseComObject(icgb2); } }
/// <summary> Initialize collection with sources from graph. </summary> internal SourceCollection(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter, bool isVideoDevice) { addFromGraph( graphBuilder, deviceFilter, isVideoDevice ); }
private bool directShowInitialize() { try { mVideoChecker = new VideoChecker(this); //mAudioChecker = new AudioChecker(this); //mVideoChecker.Test(); // 3. フィルタグラフマネージャを作成し,各種操作を行うためのインタフェースを取得する. //graphBuilderを作成. graphBuilder = GraphFactory.MakeGraphBuilder(); //各種インタフェースを取得. mediaControl = (IMediaControl)graphBuilder; videoWindow = (IVideoWindow)graphBuilder; mediaEvent = (IMediaEventEx)graphBuilder; // 4. キャプチャグラフビルダと,サンプルグラバフィルタ(個々のビデオデータを取得するフィルタ)を作成する. //キャプチャグラフビルダ(captureGraphBuilder)を作成. captureGraphBuilder = GraphFactory.MakeCaptureGraphBuilder(); // 5. 基本となるフィルタグラフマネージャ(graphBuilder)にキャプチャグラフビルダと各フィルタを追加する. //captureGraphBuilder(キャプチャグラフビルダ)をgraphBuilder(フィルタグラフマネージャ)に追加. result = captureGraphBuilder.SetFiltergraph(graphBuilder); if (result < 0) { Marshal.ThrowExceptionForHR(result); } // 1. デバイスを取得し、2. キャプチャデバイスをソースフィルタに対応づける. // 6. 各フィルタの接続を行い,入力画像のキャプチャとプレビューの準備を行う. bool ret = mVideoChecker.AddVideoFilters(graphBuilder, captureGraphBuilder); if (!ret) { //SMMMessageBox.Show("エラー:映像入力デバイスが見つかりませんでした。\nプログラムを終了します。Error: Any video devices are not found.\n Closing this application.", SMMMessageBoxIcon.Error); return(false); } //ret = mAudioChecker.AddAudioFilters(graphBuilder, captureGraphBuilder); //if (!ret) { // SMMMessageBox.Show("エラー:音声入力デバイスが見つかりませんでした。\nプログラムを終了します。Error: Any audio devices are not found.\n Closing this application.", SMMMessageBoxIcon.Error); // return false; //} // 7. プレビュー映像(レンダラフィルタの出力)の出力場所を設定する. //プレビュー映像を表示するパネルを指定. result = videoWindow.put_Owner(this.panel1.Handle); if (result < 0) { Marshal.ThrowExceptionForHR(result); } //ビデオ表示領域のスタイルを指定. result = videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipChildren); if (result < 0) { Marshal.ThrowExceptionForHR(result); } //ビデオパネルのサイズを変更する. Rectangle rect = this.panel1.ClientRectangle; videoWindow.SetWindowPosition(0, 0, rect.Right, rect.Bottom); //レンダラフィルタの出力を可視化する. result = videoWindow.put_Visible(OABool.True); if (result < 0) { Marshal.ThrowExceptionForHR(result); } // 8. DirectShowイベントを,Windowsメッセージを通して通知するための設定を行う. // mediaEvent(DirectShowイベント)をWM_GRAPHNOTIFY(Windowsメッセージ)に対応付ける. result = mediaEvent.SetNotifyWindow(this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero); if (result < 0) { Marshal.ThrowExceptionForHR(result); } // 9. プレビューを開始する. result = mediaControl.Run(); if (result < 0) { Marshal.ThrowExceptionForHR(result); } } catch (Exception e) { SMMMessageBox.Show(e.Message, SMMMessageBoxIcon.Error); return(false); } return(true); }
void InitResolution(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int targetWidth, int targetHeight) { object o; capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o); AMMediaType media = null; IAMStreamConfig videoStreamConfig = o as IAMStreamConfig; IntPtr ptr; int iC = 0, iS = 0; videoStreamConfig.GetNumberOfCapabilities(out iC, out iS); ptr = Marshal.AllocCoTaskMem(iS); int bestDWidth = 999999; int bestDHeight = 999999; int streamID = 0; for (int i = 0; i < iC; i++) { videoStreamConfig.GetStreamCaps(i, out media, ptr); VideoInfoHeader v; v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); int dW = Math.Abs(targetWidth - v.BmiHeader.Width); int dH = Math.Abs(targetHeight - v.BmiHeader.Height); if (dW < bestDWidth && dH < bestDHeight) { streamID = i; bestDWidth = dW; bestDHeight = dH; } } videoStreamConfig.GetStreamCaps(streamID, out media, ptr); int hr = videoStreamConfig.SetFormat(media); Marshal.FreeCoTaskMem(ptr); DsError.ThrowExceptionForHR(hr); DsUtils.FreeAMMediaType(media); media = null; }
private void BuildGraph() { int hr = 0; graphBuilder = (IFilterGraph2) new FilterGraph(); rot = new DsROTEntry(graphBuilder); ICaptureGraphBuilder2 capBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); capBuilder.SetFiltergraph(graphBuilder); // Get the BDA network provider specific for this given network type networkProvider = BDAUtils.GetNetworkProvider(networkType); hr = graphBuilder.AddFilter(networkProvider, "BDA Network Provider"); DsError.ThrowExceptionForHR(hr); tuner = (ITuner)networkProvider; // Get a tuning space for this network type tuningSpace = BDAUtils.GetTuningSpace(networkType); hr = tuner.put_TuningSpace(tuningSpace); DsError.ThrowExceptionForHR(hr); // Create a tune request from this tuning space tuneRequest = BDAUtils.CreateTuneRequest(tuningSpace); // Is it okay ? hr = tuner.Validate(tuneRequest); if (hr == 0) { // Set it hr = tuner.put_TuneRequest(tuneRequest); DsError.ThrowExceptionForHR(hr); // found a BDA Tuner and a BDA Capture that can connect to this network provider BDAUtils.AddBDATunerAndDemodulatorToGraph(graphBuilder, networkProvider, out bdaTuner, out bdaCapture); if ((bdaTuner != null) && (bdaCapture != null)) { // Create and add the mpeg2 demux mpeg2Demux = (IBaseFilter) new MPEG2Demultiplexer(); hr = graphBuilder.AddFilter(mpeg2Demux, "MPEG2 Demultiplexer"); DsError.ThrowExceptionForHR(hr); // connect it to the BDA Capture hr = capBuilder.RenderStream(null, null, bdaCapture, null, mpeg2Demux); DsError.ThrowExceptionForHR(hr); // Add the two mpeg2 transport stream helper filters BDAUtils.AddTransportStreamFiltersToGraph(graphBuilder, out bdaTIF, out bdaSecTab); if ((bdaTIF != null) && (bdaSecTab != null)) { // Render all the output pins of the demux (missing filters are added) for (int i = 0; i < 5; i++) { IPin pin = DsFindPin.ByDirection(mpeg2Demux, PinDirection.Output, i); hr = graphBuilder.Render(pin); Marshal.ReleaseComObject(pin); } } } } }
// Token: 0x0600034A RID: 842 RVA: 0x00013BA8 File Offset: 0x00011DA8 internal PropertyPageCollection(ICaptureGraphBuilder2 graphBuilder, IBaseFilter videoDeviceFilter, IBaseFilter audioDeviceFilter, IBaseFilter videoCompressorFilter, IBaseFilter audioCompressorFilter, SourceCollection videoSources, SourceCollection audioSources) { this.addFromGraph(graphBuilder, videoDeviceFilter, audioDeviceFilter, videoCompressorFilter, audioCompressorFilter, videoSources, audioSources); }
private void CaptureVideo() { int retVal; graph = (IGraphBuilder) new FilterGraph(); capture = (ICaptureGraphBuilder2) new CaptureGraphBuilder(); IMediaControl control = (IMediaControl)graph; IMediaEventEx eventEx = (IMediaEventEx)graph; retVal = capture.SetFiltergraph(graph); Dictionary <string, IMoniker> devices = EnumDevices(Clsid.VideoInputDeviceCategory); if (devices.Count == 0) { return; } IMoniker moniker = devices.First().Value; object obj = null; moniker.BindToObject(null, null, typeof(IBaseFilter).GUID, out obj); IBaseFilter baseFilter = (IBaseFilter)obj; retVal = graph.AddFilter(baseFilter, devices.First().Key); Guid CLSID_SampleGrabber = new Guid("{C1F400A0-3F08-11D3-9F0B-006008039E37}"); IBaseFilter grabber = Activator.CreateInstance(Type.GetTypeFromCLSID(CLSID_SampleGrabber)) as IBaseFilter; var media = new AMMediaType(); media.MajorType = MediaType.Video; media.SubType = MediaSubType.RGB24; media.FormatType = FormatType.VideoInfo; retVal = ((ISampleGrabber)grabber).SetMediaType(media); object configObj; retVal = capture.FindInterface(PinCategory.Capture, MediaType.Video, baseFilter, typeof(IAMStreamConfig).GUID, out configObj); IAMStreamConfig config = (IAMStreamConfig)configObj; AMMediaType pmt; retVal = config.GetFormat(out pmt); var header = (VideoInfoHeader)Marshal.PtrToStructure(pmt.FormatPtr, typeof(VideoInfoHeader)); var width = header.BmiHeader.Width; var height = header.BmiHeader.Height; var stride = 4 * ((24 * width + 31) / 32); //width * (header.BmiHeader.BitCount / 8); callback = new SampleGrabberCallback() { Width = width, Height = height, Stride = stride }; callback.callback = Image_OnPreview; retVal = ((ISampleGrabber)grabber).SetCallback(callback, 0); retVal = graph.AddFilter(grabber, "SampleGrabber"); IPin output = GetPin(baseFilter, p => p.Name == "Capture"); IPin input = GetPin(grabber, p => p.Name == "Input"); IPin preview = GetPin(grabber, p => p.Name == "Output"); //retVal = graph.ConnectDirect(output, input, pmt); //retVal = graph.Connect(output, input); retVal = capture.RenderStream(PinCategory.Preview, MediaType.Video, baseFilter, grabber, null); //var wih = new WindowInteropHelper(this); var panel = FindName("PART_VideoPanel") as System.Windows.Forms.Panel; IVideoWindow window = (IVideoWindow)graph; retVal = window.put_Owner(panel.Handle); retVal = window.put_WindowStyle(WindowStyles.WS_CHILD | WindowStyles.WS_CLIPCHILDREN); retVal = window.SetWindowPosition(0, 0, (int)panel.ClientSize.Width, (int)panel.ClientSize.Height); retVal = window.put_MessageDrain(panel.Handle); retVal = window.put_Visible(-1); //OATRUE retVal = control.Run(); }
private void DeInit() { DeinterlaceLayoutList = null; rOT = null; mediaEvent = null; mediaControl = null; if (graphBuilder != null) Marshal.ReleaseComObject(graphBuilder); graphBuilder = null; if (captureGraphBuilder != null) Marshal.ReleaseComObject(captureGraphBuilder); captureGraphBuilder = null; // GC.Collect(); }
/// <summary> /// Populate the internal InnerList with sources/physical connectors /// found on the crossbars. Each instance of this class is limited /// to video only or audio only sources ( specified by the isVideoDevice /// parameter on the constructor) so we check each source before adding /// it to the list. /// </summary> protected ArrayList findCrossbarSources(ICaptureGraphBuilder2 graphBuilder, IAMCrossbar crossbar, bool isVideoDevice) { ArrayList sources = new ArrayList(); int hr; int numOutPins; int numInPins; hr = crossbar.get_PinCounts(out numOutPins, out numInPins); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // We loop through every combination of output and input pin // to see which combinations match. // Loop through output pins for (int cOut = 0; cOut < numOutPins; cOut++) { // Loop through input pins for (int cIn = 0; cIn < numInPins; cIn++) { // Can this combination be routed? hr = crossbar.CanRoute(cOut, cIn); if (hr == 0) { // Yes, this can be routed int relatedPin; PhysicalConnectorType connectorType; hr = crossbar.get_CrossbarPinInfo(true, cIn, out relatedPin, out connectorType); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Is this the correct type?, If so add to the InnerList CrossbarSource source = new CrossbarSource(crossbar, cOut, cIn, connectorType); if (connectorType < PhysicalConnectorType.Audio_Tuner) { if (isVideoDevice) { sources.Add(source); } else if (!isVideoDevice) { sources.Add(source); } } } } } // Some silly drivers (*cough* Nvidia *cough*) add crossbars // with no real choices. Every input can only be routed to // one output. Loop through every Source and see if there // at least one other Source with the same output pin. int refIndex = 0; while (refIndex < sources.Count) { bool found = false; CrossbarSource refSource = (CrossbarSource)sources[refIndex]; for (int c = 0; c < sources.Count; c++) { CrossbarSource s = (CrossbarSource)sources[c]; if ((refSource.OutputPin == s.OutputPin) && (refIndex != c)) { found = true; break; } } if (found) { refIndex++; } else { sources.RemoveAt(refIndex); } } return(sources); }
private void InitAMStreamConfig(ICaptureGraphBuilder2 captureGraphBuilder2, IBaseFilter aDev) { Object o; if (AMStreamConfig != null) { // IBaseFilter bf = (IBaseFilter)AMStreamConfig; // RemoveFilter(ref bf); } var hr = captureGraphBuilder2.FindInterface(PinCategory.Capture, MediaType.Video, aDev, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(hr); AMStreamConfig = o as IAMStreamConfig; if (AMStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } }
private void RenderPins(IBaseFilter streamBuffer, ICaptureGraphBuilder2 icgb2) { int hr; IEnumPins iep; hr = streamBuffer.EnumPins(out iep); DsError.ThrowExceptionForHR(hr); try { //hr = icgb2.RenderStream(null, null, pPin[0], null, null); #if true IPin[] pPin = new IPin[1]; // Walk each pin of the stream buffer source for ( hr = iep.Next(1, pPin, IntPtr.Zero); hr == 0; hr = iep.Next(1, pPin, IntPtr.Zero) ) { try { AMMediaType[] amt = new AMMediaType[1]; IEnumMediaTypes pEnum; hr = pPin[0].EnumMediaTypes(out pEnum); DsError.ThrowExceptionForHR(hr); try { // Grab the first media type hr = pEnum.Next(1, amt, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); try { // use the media type to render the stream hr = icgb2.RenderStream(null, amt[0].majorType, pPin[0], null, null); DsError.ThrowExceptionForHR(hr); } finally { DsUtils.FreeAMMediaType(amt[0]); } } finally { Marshal.ReleaseComObject(pEnum); } } finally { Marshal.ReleaseComObject(pPin[0]); } } } finally { Marshal.ReleaseComObject(iep); } #endif }
/// <summary> /// Sets the capture parameters for the video capture device /// </summary> private bool SetVideoCaptureParameters(ICaptureGraphBuilder2 capGraph, IBaseFilter captureFilter, Guid mediaSubType) { /* The stream config interface */ object streamConfig; /* Get the stream's configuration interface */ int hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, captureFilter, typeof(IAMStreamConfig).GUID, out streamConfig); DsError.ThrowExceptionForHR(hr); var videoStreamConfig = streamConfig as IAMStreamConfig; /* If QueryInterface fails... */ if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } /* The media type of the video */ AMMediaType media; /* Get the AMMediaType for the video out pin */ hr = videoStreamConfig.GetFormat(out media); DsError.ThrowExceptionForHR(hr); /* Make the VIDEOINFOHEADER 'readable' */ var videoInfo = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, videoInfo); /* Setup the VIDEOINFOHEADER with the parameters we want */ videoInfo.AvgTimePerFrame = DSHOW_ONE_SECOND_UNIT / FPS; videoInfo.BmiHeader.Width = DesiredWidth; videoInfo.BmiHeader.Height = DesiredHeight; if (mediaSubType != Guid.Empty) { int fourCC = 0; byte[] b = mediaSubType.ToByteArray(); fourCC = b[0]; fourCC |= b[1] << 8; fourCC |= b[2] << 16; fourCC |= b[3] << 24; videoInfo.BmiHeader.Compression = fourCC; media.subType = mediaSubType; } /* Copy the data back to unmanaged memory */ Marshal.StructureToPtr(videoInfo, media.formatPtr, false); /* Set the format */ hr = videoStreamConfig.SetFormat(media); /* We don't want any memory leaks, do we? */ DsUtils.FreeAMMediaType(media); if (hr < 0) return false; return true; }
/// <summary> do cleanup and release DirectShow. </summary> void CloseInterfaces() { int hr; try { #if DEBUG if (rotCookie != 0) { DsROT.RemoveGraphFromRot(ref rotCookie); } #endif if (mediaCtrl != null) { hr = mediaCtrl.Stop(); mediaCtrl = null; } if (mediaEvt != null) { hr = mediaEvt.SetNotifyWindow(IntPtr.Zero, WM_GRAPHNOTIFY, IntPtr.Zero); mediaEvt = null; } if (videoWin != null) { hr = videoWin.put_Visible(DsHlp.OAFALSE); hr = videoWin.put_Owner(IntPtr.Zero); videoWin = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); } capGraph = null; if (graphBuilder != null) { Marshal.ReleaseComObject(graphBuilder); } graphBuilder = null; if (capFilter != null) { Marshal.ReleaseComObject(capFilter); } capFilter = null; if (capDevices != null) { foreach (DsDevice d in capDevices) { d.Dispose(); } capDevices = null; } } catch (Exception) { } }
/// <summary> /// destroys the graph and cleans up any resources /// </summary> protected void Decompose() { if (_graphBuilder == null || !CheckThreadId()) return; Log.Log.WriteFile("dvb:Decompose"); if (_epgGrabbing) { if (_epgGrabberCallback != null && _epgGrabbing) { Log.Log.Epg("dvb:cancel epg->decompose"); _epgGrabberCallback.OnEpgCancelled(); } _epgGrabbing = false; } FreeAllSubChannels(); Log.Log.WriteFile(" stop"); // Decompose the graph int counter = 0, hr = 0; FilterState state = FilterState.Running; hr = ((IMediaControl)_graphBuilder).Stop(); while (state != FilterState.Stopped) { System.Threading.Thread.Sleep(100); hr = ((IMediaControl)_graphBuilder).GetState(10, out state); counter++; if (counter >= 30) { if (state != FilterState.Stopped) Log.Log.Error("dvb:graph still running"); break; } } //In case MDPlugs exists then close and release them if (_mdplugs != null) { Log.Log.Info(" Closing MDAPI Plugins"); _mdplugs.Close(); _mdplugs = null; } if (_conditionalAccess != null) { Log.Log.Info(" Disposing ConditionalAccess"); _conditionalAccess.Dispose(); _conditionalAccess = null; } Log.Log.WriteFile(" free..."); _interfaceChannelScan = null; _interfaceEpgGrabber = null; _previousChannel = null; if (_filterMpeg2DemuxTif != null) { Release.ComObject("_filterMpeg2DemuxTif filter", _filterMpeg2DemuxTif); _filterMpeg2DemuxTif = null; } if (_filterNetworkProvider != null) { Release.ComObject("_filterNetworkProvider filter", _filterNetworkProvider); _filterNetworkProvider = null; } if (_infTeeMain != null) { Release.ComObject("main inftee filter", _infTeeMain); _infTeeMain = null; } if (_infTeeSecond != null) { Release.ComObject("second inftee filter", _infTeeSecond); _infTeeSecond = null; } if (_filterTuner != null) { while (Release.ComObject(_filterTuner) > 0) ; _filterTuner = null; } if (_filterCapture != null) { while (Release.ComObject(_filterCapture) > 0) ; _filterCapture = null; } if (_filterWinTvUsb != null) { Log.Log.Info(" Stopping WinTVCI module"); winTvCiHandler.Shutdown(); while (Release.ComObject(_filterWinTvUsb) > 0) ; _filterWinTvUsb = null; } if (_filterTIF != null) { Release.ComObject("TIF filter", _filterTIF); _filterTIF = null; } //if (_filterSectionsAndTables != null) //{ // Release.ComObject("secions&tables filter", _filterSectionsAndTables); _filterSectionsAndTables = null; //} Log.Log.WriteFile(" free pins..."); if (_filterTsWriter as IBaseFilter != null) { Release.ComObject("TSWriter filter", _filterTsWriter); _filterTsWriter = null; } else { Log.Log.Debug("!!! Error releasing TSWriter filter (_filterTsWriter as IBaseFilter was null!)"); _filterTsWriter = null; } Log.Log.WriteFile(" free graph..."); if (_rotEntry != null) { _rotEntry.Dispose(); _rotEntry = null; } if (_capBuilder != null) { Release.ComObject("capture builder", _capBuilder); _capBuilder = null; } if (_graphBuilder != null) { FilterGraphTools.RemoveAllFilters(_graphBuilder); Release.ComObject("graph builder", _graphBuilder); _graphBuilder = null; } Log.Log.WriteFile(" free devices..."); if (_deviceWinTvUsb != null) { DevicesInUse.Instance.Remove(_deviceWinTvUsb); _deviceWinTvUsb = null; } if (_tunerDevice != null) { DevicesInUse.Instance.Remove(_tunerDevice); _tunerDevice = null; } if (_captureDevice != null) { DevicesInUse.Instance.Remove(_captureDevice); _captureDevice = null; } if (_tunerStatistics != null) { for (int i = 0; i < _tunerStatistics.Count; i++) { IBDA_SignalStatistics stat = _tunerStatistics[i]; while (Release.ComObject(stat) > 0) ; } _tunerStatistics.Clear(); } Log.Log.WriteFile(" decompose done..."); _graphState = GraphState.Idle; }
/// <summary> /// Configure the graph to output the results to a video window. /// </summary> /// <remarks> /// The callback routines are invoked once for each sample. This allows for additional processing to /// be performed on the video or audio buffers. /// </remarks> /// <param name="hWnd">Window handle to render to, or IntPtr.Zero to render to its own window</param> /// <param name="pVideoCallback">Callback routine to be called for each video frame or null for no callback</param> /// <param name="pAudioCallback">Callback routine to be called for each audio frame or null for no callback</param> /// <param name="video">Render only video</param> /// <param name="audio">Render only audio</param> private void RenderToWindow(IntPtr hWnd, IDESCombineCB pVideoCallback, IDESCombineCB pAudioCallback, bool video, bool audio) { int hr; IPin pPin; IVideoWindow pVidWindow; IAMTimelineObj pGroup; // Perform initialization common to all render routines RenderCommon(); // Contains useful routines for creating the graph ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); try { hr = icgb.SetFiltergraph(m_pGraph); DESError.ThrowExceptionForHR(hr); int NumGroups; hr = m_pTimeline.GetGroupCount(out NumGroups); DESError.ThrowExceptionForHR(hr); // Walk the groups. For DESCombine, there is one group that // contains all the video, and a second group for the audio. for (int i = 0; i < NumGroups; i++) { hr = m_pTimeline.GetGroup(out pGroup, i); DESError.ThrowExceptionForHR(hr); try { // Inform the graph we will be previewing (rather than writing to disk) IAMTimelineGroup pTLGroup = (IAMTimelineGroup)pGroup; hr = pTLGroup.SetPreviewMode(true); DESError.ThrowExceptionForHR(hr); } finally { // Release the group Marshal.ReleaseComObject(pGroup); } // Get the IPin for the current group hr = m_pRenderEngine.GetGroupOutputPin(i, out pPin); DESError.ThrowExceptionForHR(hr); try { // If this is the video pin if (video && IsVideo(pPin)) { // Get a video renderer IBaseFilter ibfVideoRenderer = (IBaseFilter) new VideoRenderer(); try { // Create a sample grabber, add it to the graph and connect it all up AVCallback mcb = new AVCallback(m_Video, pVideoCallback, (IMediaEventSink)m_pGraph, EC_VideoFileComplete); RenderWindowHelper(icgb, mcb, "Video", pPin, ibfVideoRenderer); } finally { Marshal.ReleaseComObject(ibfVideoRenderer); } } else if (audio) { // Get an audio renderer IBaseFilter ibfAudioRenderer = (IBaseFilter) new AudioRender(); try { // Create a sample grabber, add it to the graph and connect it all up AVCallback mcb = new AVCallback(m_Audio, pAudioCallback, (IMediaEventSink)m_pGraph, EC_AudioFileComplete); RenderWindowHelper(icgb, mcb, "Audio", pPin, ibfAudioRenderer); } finally { Marshal.ReleaseComObject(ibfAudioRenderer); } } } finally { Marshal.ReleaseComObject(pPin); } } if (video) { // Configure the video window pVidWindow = (IVideoWindow)m_pGraph; // If a window handle was supplied, use it if (hWnd != IntPtr.Zero) { hr = pVidWindow.put_Owner(hWnd); DESError.ThrowExceptionForHR(hr); } else { // Use our own window hr = pVidWindow.put_Caption("Video Rendering Window"); DESError.ThrowExceptionForHR(hr); // since no user interaction is allowed, remove // system menu and maximize/minimize buttons WindowStyle lStyle = 0; hr = pVidWindow.get_WindowStyle(out lStyle); DESError.ThrowExceptionForHR(hr); lStyle &= ~(WindowStyle.MinimizeBox | WindowStyle.MaximizeBox | WindowStyle.SysMenu); hr = pVidWindow.put_WindowStyle(lStyle); DESError.ThrowExceptionForHR(hr); } } } finally { Marshal.ReleaseComObject(icgb); } }
public void CloseResources() { CloseInterfaces(); lock (this) { if (latestBitmap != null) { latestBitmap.Dispose(); latestBitmap = null; } if (samplGrabber != null) { Marshal.ReleaseComObject(samplGrabber); samplGrabber = null; } if (capBuilder != null) { Marshal.ReleaseComObject(capBuilder); capBuilder = null; } if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (Settings.Default.VideoGraphDebugMode) { if (rot != null) { rot.Dispose(); rot = null; } } crossbar = null; } }
/// <summary> /// Configure the graph to output the results to an AVI file. /// </summary> /// <param name="sOutputFile">File name for output (must not be null)</param> /// <param name="ibfVideoCompressor">IBaseFilter of a video compressor to use (or null for none). /// Note that <b><i>no</i></b> configuration of this compressor is done by this method. It merely adds it /// to the graph in the appropriate place. Also, the pointer is not invalidated, so any configuration /// of the compressor that needs to be done after being added to the graph can still be done.</param> /// <param name="ibfAudioCompressor">IBaseFilter of an audio compressor to use (or null for none). /// Note that <b><i>no</i></b> configuration of this compressor is done by this method. It merely adds it /// to the graph in the appropriate place. Also, the pointer is not invalidated, so any configuration /// of the compressor that needs to be done after being added to the graph can still be done.</param> /// <param name="pVideoCallback">Callback routine to be called for each video frame or null for no callback</param> /// <param name="pAudioCallback">Callback routine to be called for each audio frame or null for no callback</param> /// <remarks> /// The callback routines are invoked once for each sample. This allows for additional processing to /// be performed on the video or audio buffers. /// </remarks> public void RenderToAVI( string sOutputFile, IBaseFilter ibfVideoCompressor, IBaseFilter ibfAudioCompressor, IDESCombineCB pVideoCallback, IDESCombineCB pAudioCallback) { int hr; IPin pPin; if (sOutputFile == null) { throw new Exception("Output file name cannot be null"); } // Perform initialization common to all render routines RenderCommon(); // Contains useful routines for creating the graph ICaptureGraphBuilder2 icgb = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); try { hr = icgb.SetFiltergraph(m_pGraph); DESError.ThrowExceptionForHR(hr); // Create the file writer IBaseFilter pMux; IFileSinkFilter pFilter; hr = icgb.SetOutputFileName(MediaSubType.Avi, sOutputFile, out pMux, out pFilter); DESError.ThrowExceptionForHR(hr); // We don't need this, so let it go Marshal.ReleaseComObject(pFilter); try { int NumGroups; hr = m_pTimeline.GetGroupCount(out NumGroups); DESError.ThrowExceptionForHR(hr); // Walk the groups. For this class, there is one group that // contains all the video, and a second group for the audio. for (int i = 0; i < NumGroups; i++) { IAMTimelineObj pGroup; hr = m_pTimeline.GetGroup(out pGroup, i); DESError.ThrowExceptionForHR(hr); try { // Inform the graph we will be writing to disk (rather than previewing) IAMTimelineGroup pTLGroup = (IAMTimelineGroup)pGroup; hr = pTLGroup.SetPreviewMode(false); DESError.ThrowExceptionForHR(hr); } finally { Marshal.ReleaseComObject(pGroup); } // Get the IPin for the current group hr = m_pRenderEngine.GetGroupOutputPin(i, out pPin); DESError.ThrowExceptionForHR(hr); try { if (IsVideo(pPin)) { // Create a sample grabber, add it to the graph and connect it all up AVCallback mcb = new AVCallback(m_Video, pVideoCallback, (IMediaEventSink)m_pGraph, EC_VideoFileComplete); RenderHelper(icgb, mcb, "Video", pPin, ibfVideoCompressor, pMux); } else { // Create a sample grabber, add it to the graph and connect it all up AVCallback mcb = new AVCallback(m_Audio, pAudioCallback, (IMediaEventSink)m_pGraph, EC_AudioFileComplete); RenderHelper(icgb, mcb, "Audio", pPin, ibfAudioCompressor, pMux); } } finally { Marshal.ReleaseComObject(pPin); } } } finally { Marshal.ReleaseComObject(pMux); } } finally { Marshal.ReleaseComObject(icgb); } }
private void SetConfigParms(ICaptureGraphBuilder2 capBuilder, IBaseFilter capFilter, VideoFormatHelper.SupportedVideoFormat selectedFormat, ref float iFrameRate, ref int iWidth, ref int iHeight) { object o; AMMediaType media; IAMStreamConfig videoStreamConfig; IAMVideoControl videoControl = capFilter as IAMVideoControl; int hr = capBuilder.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o); videoStreamConfig = o as IAMStreamConfig; try { if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } int iCount = 0, iSize = 0; hr = videoStreamConfig.GetNumberOfCapabilities(out iCount, out iSize); DsError.ThrowExceptionForHR(hr); VideoInfoHeader vMatching = null; VideoFormatHelper.SupportedVideoFormat entry = null; IntPtr taskMemPointer = Marshal.AllocCoTaskMem(iSize); AMMediaType pmtConfig = null; for (int iFormat = 0; iFormat < iCount; iFormat++) { IntPtr ptr = IntPtr.Zero; hr = videoStreamConfig.GetStreamCaps(iFormat, out pmtConfig, taskMemPointer); DsError.ThrowExceptionForHR(hr); vMatching = (VideoInfoHeader)Marshal.PtrToStructure(pmtConfig.formatPtr, typeof(VideoInfoHeader)); if (vMatching.BmiHeader.BitCount > 0) { entry = new VideoFormatHelper.SupportedVideoFormat() { Width = vMatching.BmiHeader.Width, Height = vMatching.BmiHeader.Height, BitCount = vMatching.BmiHeader.BitCount, FrameRate = 10000000.0 / vMatching.AvgTimePerFrame }; if (entry.Matches(selectedFormat)) { // WE FOUND IT !!! break; } } vMatching = null; } if (vMatching != null) { hr = videoStreamConfig.SetFormat(pmtConfig); DsError.ThrowExceptionForHR(hr); iFrameRate = 10000000/vMatching.AvgTimePerFrame; iWidth = vMatching.BmiHeader.Width; iHeight = vMatching.BmiHeader.Height; } else { hr = videoStreamConfig.GetFormat(out media); DsError.ThrowExceptionForHR(hr); // Copy out the videoinfoheader VideoInfoHeader v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); if (selectedFormat != null && iWidth == 0 && iHeight == 0) { // Use the config from the selected format iWidth = selectedFormat.Width; iHeight = selectedFormat.Height; iFrameRate = (float) selectedFormat.FrameRate; } // If overriding the framerate, set the frame rate if (iFrameRate > 0) { int newAvgTimePerFrame = (int)Math.Round(10000000 / iFrameRate); Trace.WriteLine(string.Format("Overwriting VideoInfoHeader.AvgTimePerFrame from {0} to {1}", v.AvgTimePerFrame, newAvgTimePerFrame)); v.AvgTimePerFrame = newAvgTimePerFrame; } else iFrameRate = 10000000 / v.AvgTimePerFrame; // If overriding the width, set the width if (iWidth > 0) { Trace.WriteLine(string.Format("Overwriting VideoInfoHeader.BmiHeader.Width from {0} to {1}", v.BmiHeader.Width, iWidth)); v.BmiHeader.Width = iWidth; } else iWidth = v.BmiHeader.Width; // If overriding the Height, set the Height if (iHeight > 0) { Trace.WriteLine(string.Format("Overwriting VideoInfoHeader.BmiHeader.Height from {0} to {1}", v.BmiHeader.Height, iHeight)); v.BmiHeader.Height = iHeight; } else iHeight = v.BmiHeader.Height; // Copy the media structure back Marshal.StructureToPtr(v, media.formatPtr, false); // Set the new format hr = videoStreamConfig.SetFormat(media); try { DsError.ThrowExceptionForHR(hr); } catch (Exception ex) { // If setting the format failed then log the error but try to continue Trace.WriteLine(ex.GetFullStackTrace()); } DsUtils.FreeAMMediaType(media); media = null; } Marshal.FreeCoTaskMem(taskMemPointer); DsUtils.FreeAMMediaType(pmtConfig); pmtConfig = null; // Fix upsidedown video if (videoControl != null) { // NOTE: Flipping detection and fixing doesn't seem to work! //IPin pPin = DsFindPin.ByCategory(capFilter, PinCategory.Capture, 0); //VideoFormatHelper.FixFlippedVideo(videoControl, pPin); //pPin = DsFindPin.ByCategory(capFilter, PinCategory.Preview, 0); //VideoFormatHelper.FixFlippedVideo(videoControl, pPin); } } finally { Marshal.ReleaseComObject(videoStreamConfig); } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iFrameRate, int iWidth, int iHeight) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the video device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); DsError.ThrowExceptionForHR(hr); IBaseFilter baseGrabFlt = (IBaseFilter)sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // If any of the default config items are set if (iFrameRate + iHeight + iWidth > 0) { SetConfigParms(capGraph, capFilter, iFrameRate, iWidth, iHeight); } hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt); DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
/* // Uncomment this version of FindCaptureDevice to use the DsDevice helper class // (and comment the first version of course) public IBaseFilter FindCaptureDevice() { System.Collections.ArrayList devices; object source; // Get all video input devices devices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); // Take the first device DsDevice device = (DsDevice)devices[0]; // Bind Moniker to a filter object Guid iid = typeof(IBaseFilter).GUID; device.Mon.BindToObject(null, null, ref iid, out source); // An exception is thrown if cast fail return (IBaseFilter) source; } */ public void GetInterfaces() { int hr = 0; // An exception is thrown if cast fail this.graphBuilder = (IGraphBuilder) new FilterGraph(); this.captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); this.mediaControl = (IMediaControl) this.graphBuilder; this.videoWindow = (IVideoWindow) this.graphBuilder; this.mediaEventEx = (IMediaEventEx) this.graphBuilder; hr = this.mediaEventEx.SetNotifyWindow(this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); }
private void LoadMovieInWindow(string filename) { int hr = 0; if (filename == string.Empty) { return; } this.graphBuilder = (IGraphBuilder) new FilterGraph(); this.captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); this.vmr9 = new VideoMixingRenderer9(); this.vmrConfig = this.vmr9 as IVMRFilterConfig9; // Attach the filter graph to the capture graph hr = this.captureGraphBuilder.SetFiltergraph(this.graphBuilder); DsError.ThrowExceptionForHR(hr); hr = this.graphBuilder.AddFilter(vmr9 as IBaseFilter, "VideoMixingRenderer9"); DsError.ThrowExceptionForHR(hr); hr = this.vmrConfig.SetRenderingMode(VMR9Mode.Windowless); DsError.ThrowExceptionForHR(hr); this.windowLessControl = this.vmr9 as IVMRWindowlessControl9; this.windowLessControl.SetVideoClippingWindow(this.Handle); this.windowLessControl.SetVideoPosition(null, new DsRect(0, 0, this.ClientSize.Width, this.ClientSize.Height)); IBaseFilter fileSourceFilter; hr = this.graphBuilder.AddSourceFilter(filename, "WebCamSource", out fileSourceFilter); DsError.ThrowExceptionForHR(hr); hr = this.captureGraphBuilder.RenderStream(null, null, fileSourceFilter, null, vmr9 as IBaseFilter); DsError.ThrowExceptionForHR(hr); //// Have the graph builder construct its the appropriate graph automatically //hr = this.graphBuilder.RenderFile(filename, null); //DsError.ThrowExceptionForHR(hr); // QueryInterface for DirectShow interfaces this.mediaControl = (IMediaControl)this.graphBuilder; //this.mediaEventEx = (IMediaEventEx)this.graphBuilder; this.mediaSeeking = (IMediaSeeking)this.graphBuilder; //this.mediaPosition = (IMediaPosition)this.graphBuilder; // Query for video interfaces, which may not be relevant for audio files ////this.videoWindow = this.graphBuilder as IVideoWindow; this.basicVideo = this.graphBuilder as IBasicVideo; // Query for audio interfaces, which may not be relevant for video-only files this.basicAudio = this.graphBuilder as IBasicAudio; // Is this an audio-only file (no video component)? CheckVisibility(); //// Have the graph signal event via window callbacks for performance //hr = this.mediaEventEx.SetNotifyWindow(this.Handle, WMGraphNotify, IntPtr.Zero); //DsError.ThrowExceptionForHR(hr); if (!this.isAudioOnly) { this.windowLessControl = this.vmr9 as IVMRWindowlessControl9; this.windowLessControl.SetVideoClippingWindow(this.Handle); this.windowLessControl.SetVideoPosition(null, new DsRect(0, 0, this.ClientSize.Width, this.ClientSize.Height)); //hr = InitVideoWindow(); //DsError.ThrowExceptionForHR(hr); GetFrameStepInterface(); } // Complete window initialization //this.isFullScreen = false; this.currentPlaybackRate = 1.0; //UpdateToolTip(); #if DEBUG rot = new DsROTEntry(this.graphBuilder); #endif }
/// <summary> /// グラフの生成 /// </summary> /// <param name="output_file">出力ファイル</param> public virtual void Setup(string output_file) { this.Dispose(); try { CxDSCameraParam param = this.Param; // グラフビルダー. // CoCreateInstance GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph))); #region フィルタ追加. // 画像入力フィルタ. IBaseFilter capture = CreateVideoCapture(param); if (capture == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(capture, "CaptureFilter"); IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT); this.CaptureFilter = capture; this.CaptureOutPin = capture_out; // サンプルグラバー. IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber(); if (grabber == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(grabber, "SampleGrabber"); this.SampleGrabber = (ISampleGrabber)grabber; #endregion #region キャプチャビルダー: { int hr = 0; CaptureBuilder = (ICaptureGraphBuilder2)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_CaptureGraphBuilder2))); hr = CaptureBuilder.SetFiltergraph(GraphBuilder); if (string.IsNullOrEmpty(output_file)) { // レンダラー. IBaseFilter renderer = null; renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer))); if (renderer == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(renderer, "Renderer"); this.Renderer = renderer; #if true // IGraphBuilder.Connect の代わりに ICaptureGraphBuilder2.RenderStream を使用する. // fig) [capture]-out->-in-[sample grabber]-out->-in-[null render] hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, renderer); #else // ピンの取得. IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT); IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT); // ピンの接続. GraphBuilder.Connect(capture_out, grabber_in); GraphBuilder.Connect(grabber_out, renderer_in); // ピンの保管. //SampleGrabberInPin = grabber_in; //SampleGrabberOutPin = grabber_out; //RendererInPin = renderer_in; #endif } else { IBaseFilter mux = null; IFileSinkFilter sync = null; hr = CaptureBuilder.SetOutputFileName(new Guid(GUID.MEDIASUBTYPE_Avi), output_file, ref mux, ref sync); hr = CaptureBuilder.RenderStream(new Guid(GUID.PIN_CATEGORY_CAPTURE), new Guid(GUID.MEDIATYPE_Video), capture, grabber, mux); this.Mux = mux; this.Sync = sync; } } #endregion #region 保管: フレームサイズ. VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber); this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader; this.SampleGrabberCB.FrameSize = new Size( System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth), System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight) ); #endregion #region 保管: デバイス名称. try { if (string.IsNullOrEmpty(param.FilterInfo.Name) == false) { this.DeviceName = param.FilterInfo.Name; } else { int filter_index = param.FilterInfo.Index; List<DSLab.CxDSFilterInfo> filters = DSLab.Axi.GetFilterList(DSLab.GUID.CLSID_VideoInputDeviceCategory); if (0 <= filter_index && filter_index < filters.Count) { this.DeviceName = filters[filter_index].Name; } } } catch (System.Exception) { this.DeviceName = ""; } #endregion // DEBUG #if DEBUG DebugPrint(this.GraphBuilder); #endif } catch (Exception ex) { this.Dispose(); throw new DSLab.CxDSException(ex); } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iSampleRate, int iChannels) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; IBaseFilter baseGrabFlt = null; IBaseFilter nullrenderer = null; IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the audio device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Audio input", out capFilter); DsError.ThrowExceptionForHR(hr); // If any of the default config items are set if (iSampleRate + iChannels > 0) { SetConfigParms(capGraph, capFilter, iSampleRate, iChannels); } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the file filter to the sample grabber // Hopefully this will be the audio pin, we could check by reading it's mediatype IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); // Get the input pin from the sample grabber IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Add the null renderer to the graph nullrenderer = new NullRenderer() as IBaseFilter; hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the sample grabber to the null renderer iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Read and cache the resulting settings SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
protected ArrayList findAudioSources(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter) { ArrayList sources = new ArrayList(); IAMAudioInputMixer audioInputMixer = deviceFilter as IAMAudioInputMixer; if ( audioInputMixer != null ) { // Get a pin enumerator off the filter IEnumPins pinEnum; int hr = deviceFilter.EnumPins( out pinEnum ); pinEnum.Reset(); if( (hr == 0) && (pinEnum != null) ) { // Loop through each pin IPin[] pins = new IPin[1]; int f; do { // Get the next pin hr = pinEnum.Next( 1, pins, out f ); if( (hr == 0) && (pins[0] != null) ) { // Is this an input pin? PinDirection dir = PinDirection.Output; hr = pins[0].QueryDirection( out dir ); if( (hr == 0) && (dir == (PinDirection.Input)) ) { // Add the input pin to the sources list AudioSource source = new AudioSource( pins[0] ); sources.Add( source ); } pins[0] = null; } } while( hr == 0 ); Marshal.ReleaseComObject( pinEnum ); pinEnum = null; } } // If there is only one source, don't return it // because there is nothing for the user to choose. // (Hopefully that single source is already enabled). if ( sources.Count == 1 ) sources.Clear(); return( sources ); }
/// <summary> /// Populate the internal InnerList with sources/physical connectors /// found on the crossbars. Each instance of this class is limited /// to video only or audio only sources ( specified by the isVideoDevice /// parameter on the constructor) so we check each source before adding /// it to the list. /// </summary> protected ArrayList FindCrossbarSources(ICaptureGraphBuilder2 graphBuilder, IAMCrossbar crossbar, bool isVideoDevice) { ArrayList sources = new ArrayList(); int hr; int numOutPins; int numInPins; hr = crossbar.get_PinCounts(out numOutPins, out numInPins); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // We loop through every combination of output and input pin // to see which combinations match. // Loop through output pins for (int cOut = 0; cOut < numOutPins; cOut++) { // Loop through input pins for (int cIn = 0; cIn < numInPins; cIn++) { // Can this combination be routed? hr = crossbar.CanRoute(cOut, cIn); if (hr == 0) { // Yes, this can be routed int relatedInputPin; PhysicalConnectorType connectorType; hr = crossbar.get_CrossbarPinInfo(true, cIn, out relatedInputPin, out connectorType); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } // Add it to the list CrossbarSource source = new CrossbarSource(crossbar, cOut, cIn, relatedInputPin, connectorType); sources.Add(source); } } } // Some silly drivers (*cough* Nvidia *cough*) add crossbars // with no real choices. Every input can only be routed to // one output. Loop through every Source and see if there // at least one other Source with the same output pin. int refIndex = 0; while (refIndex < sources.Count) { bool found = false; CrossbarSource refSource = (CrossbarSource)sources[refIndex]; for (int c = 0; c < sources.Count; c++) { CrossbarSource s = (CrossbarSource)sources[c]; if ((refSource.OutputPin == s.OutputPin) && (refIndex != c)) { found = true; break; } } if (found) { refIndex++; } else { sources.RemoveAt(refIndex); } } // Some of the video input pins have related audio pins // that should be connected at the same time. We noted the pin number // in the CrossbarSource.RelatedInputPin. Now that we have all // the sources, lookup the CrossbarSource object associated with // that pin foreach (CrossbarSource source in sources) { if (source.RelatedInputPin != -1) { foreach (CrossbarSource related in sources) { if (source.RelatedInputPin == related.InputPin) { source.RelatedInputSource = related; } } } } // Remove any sources that are not of the correct type for (int c = 0; c < sources.Count; c++) { if (((CrossbarSource)sources[c]).ConnectorType < PhysicalConnectorType.Audio_Tuner) { if (!isVideoDevice) { sources.RemoveAt(c); c--; } } else { if (isVideoDevice) { sources.RemoveAt(c); c--; } } } return(sources); }
/// <summary> /// Populate the internal InnerList with sources/physical connectors /// found on the crossbars. Each instance of this class is limited /// to video only or audio only sources ( specified by the isVideoDevice /// parameter on the constructor) so we check each source before adding /// it to the list. /// </summary> protected ArrayList findCrossbarSources(ICaptureGraphBuilder2 graphBuilder, IAMCrossbar crossbar, bool isVideoDevice) { ArrayList sources = new ArrayList(); int hr; int numOutPins; int numInPins; hr = crossbar.get_PinCounts( out numOutPins, out numInPins ); if ( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // We loop through every combination of output and input pin // to see which combinations match. // Loop through output pins for ( int cOut = 0; cOut < numOutPins; cOut++ ) { // Loop through input pins for ( int cIn = 0; cIn < numInPins; cIn++ ) { // Can this combination be routed? hr = crossbar.CanRoute( cOut, cIn ); if ( hr == 0 ) { // Yes, this can be routed int relatedPin; PhysicalConnectorType connectorType; hr = crossbar.get_CrossbarPinInfo( true, cIn, out relatedPin, out connectorType ); if ( hr < 0 ) Marshal.ThrowExceptionForHR( hr ); // Is this the correct type?, If so add to the InnerList CrossbarSource source = new CrossbarSource( crossbar, cOut, cIn, connectorType ); if ( connectorType < PhysicalConnectorType.Audio_Tuner ) if ( isVideoDevice ) sources.Add( source ); else if ( !isVideoDevice ) sources.Add( source ); } } } // Some silly drivers (*cough* Nvidia *cough*) add crossbars // with no real choices. Every input can only be routed to // one output. Loop through every Source and see if there // at least one other Source with the same output pin. int refIndex = 0; while ( refIndex < sources.Count ) { bool found = false; CrossbarSource refSource = (CrossbarSource) sources[refIndex]; for ( int c = 0; c < sources.Count; c++ ) { CrossbarSource s = (CrossbarSource) sources[c]; if ( ( refSource.OutputPin == s.OutputPin ) && ( refIndex != c ) ) { found = true; break; } } if ( found ) refIndex++; else sources.RemoveAt( refIndex ); } return( sources ); }
/// <summary> Initialize collection with sources from graph. </summary> internal SourceCollection(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter, bool isVideoDevice) { AddFromGraph(graphBuilder, deviceFilter, isVideoDevice); }
// Set the Framerate, and video size private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, AMMediaType media) { int hr; object o; // Find the stream config interface hr = capGraph.FindInterface( PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o ); IAMStreamConfig videoStreamConfig = o as IAMStreamConfig; if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } // Set the new format hr = videoStreamConfig.SetFormat( media ); DsError.ThrowExceptionForHR( hr ); DsUtils.FreeAMMediaType(media); media = null; }
/// <summary> /// プレイヤーの接続 /// </summary> /// <param name="filename"></param> private void Player_Connect(string filename) { #region グラフビルダーの生成: { Graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph); if (Graph == null) throw new System.IO.IOException("Failed to create a GraphBuilder."); Builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2); if (Builder == null) throw new System.IO.IOException("Failed to create a GraphBuilder."); Builder.SetFiltergraph(Graph); } #endregion #region 映像入力用: ソースフィルタを生成します. { Graph.AddSourceFilter(filename, "VideoSource", ref VideoSource); if (VideoSource == null) throw new System.IO.IOException("Failed to create a VideoSource."); } #endregion #region 映像捕獲用: サンプルグラバーを生成します. { VideoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (VideoGrabber == null) throw new System.IO.IOException("Failed to create a VideoGrabber."); Graph.AddFilter(VideoGrabber, "VideoGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)VideoGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Video); mt.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24); mt.formattype = new Guid(GUID.FORMAT_VideoInfo); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(VideoGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(VideoGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 音声捕獲用: サンプルグラバーを生成します. { AudioGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (AudioGrabber == null) throw new System.IO.IOException("Failed to create a AudioGrabber."); Graph.AddFilter(AudioGrabber, "AudioGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)AudioGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Audio); mt.subtype = new Guid(GUID.MEDIASUBTYPE_PCM); mt.formattype = new Guid(GUID.FORMAT_WaveFormatEx); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(AudioGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(AudioGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 映像出力用: レンダラーを生成します. { VideoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (VideoRenderer == null) throw new System.IO.IOException("Failed to create a VideoRenderer."); Graph.AddFilter(VideoRenderer, "VideoRenderer"); } #endregion #region 音声出力用: レンダラーを生成します. { AudioRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (AudioRenderer == null) throw new System.IO.IOException("Failed to create a AudioRenderer."); Graph.AddFilter(AudioRenderer, "AudioRenderer"); } #endregion #region フィルタの接続: if (filename.EndsWith(".avi", StringComparison.InvariantCultureIgnoreCase)) { #region AVI 形式ファイル用の初期化: unsafe { HRESULT hr; // AVI 分離器の追加: Splitter = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_AviSplitter); if (Splitter == null) throw new System.IO.IOException("Failed to create a Splitter."); Graph.AddFilter(Splitter, "Splitter"); // フィルタの接続: (AVI 分離器) hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, IntPtr.Zero, VideoSource, null, Splitter); if (hr < HRESULT.S_OK) throw new CxDSException(hr); // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), Splitter, VideoGrabber, VideoRenderer); if (hr < HRESULT.S_OK) throw new CxDSException(hr); // フィルタの接続: (音声入力) ※ Audioless も有る. try { var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), Splitter, AudioGrabber, AudioRenderer); } catch (System.Exception ex) { Debug.WriteLine(ex.StackTrace); } } #endregion } else if ( filename.EndsWith(".asf", StringComparison.InvariantCultureIgnoreCase) || filename.EndsWith(".wmv", StringComparison.InvariantCultureIgnoreCase)) { #region WMV 形式ファイル用の初期化: unsafe { HRESULT hr; // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), VideoSource, VideoGrabber, VideoRenderer); if (hr < HRESULT.S_OK) throw new CxDSException(hr); // フィルタの接続: (音声入力) var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), VideoSource, AudioGrabber, AudioRenderer); if (hr < HRESULT.S_OK) throw new CxDSException(hr); } #endregion } #endregion // 同期用: サンプルグラバーのイベント登録: VideoGrabberCB.Enable = true; VideoGrabberCB.Notify += VideoGrabberCB_Notify; VideoInfoHeader = Axi.GetVideoInfo((ISampleGrabber)VideoGrabber); }
// Configure specified pin and collect its capabilities if required private void GetPinCapabilitiesAndConfigureSizeAndRate( ICaptureGraphBuilder2 graphBuilder, IBaseFilter baseFilter, Guid pinCategory, Size size, int frameRate, ref VideoCapabilities[] capabilities ) { object streamConfigObject; graphBuilder.FindInterface( pinCategory, MediaType.Video, baseFilter, typeof( IAMStreamConfig ).GUID, out streamConfigObject ); if ( streamConfigObject != null ) { IAMStreamConfig streamConfig = null; try { streamConfig = (IAMStreamConfig) streamConfigObject; } catch ( InvalidCastException ) { } if ( streamConfig != null ) { if ( capabilities == null ) { try { // get all video capabilities capabilities = AForge.Video.DirectShow.VideoCapabilities.FromStreamConfig( streamConfig ); } catch { } } // check if it is required to change capture settings if ( ( frameRate != 0 ) || ( ( size.Width != 0 ) && ( size.Height != 0 ) ) ) { SetFrameSizeAndRate( streamConfig, size, frameRate ); } } } // if failed resolving capabilities, then just create empty capabilities array, // so we don't try again if ( capabilities == null ) { capabilities = new VideoCapabilities[0]; } }
/// <summary> /// Connects to the property changed events of the camera settings. /// </summary> //private void Initialize() //{ // //Settings.Instance.Camera.OnCameraControlPropertyChanged += OnCameraControlPropertyChanged; // //Settings.Instance.Camera.OnVideoProcAmpPropertyChanged += OnVideoProcAmpPropertyChanged; // //Settings.Instance.Camera.OnVideoControlFlagsChanged += OnVideoControlFlagsChanged; // //stopwatch = new Stopwatch(); //} /// <summary> /// Build the capture graph for grabber. /// </summary> /// <param name="dev">The index of the new capture device.</param> /// <param name="frameRate">The framerate to use.</param> /// <param name="width">The width to use.</param> /// <param name="height">The height to use.</param> /// <returns>True, if succesfull, otherwise false.</returns> private bool SetupGraph(DsDevice dev, int frameRate, int width, int height) { int hr; fps = frameRate; // Not measured, only to expose FPS externally cameraControl = null; capFilter = null; // Get the graphbuilder object graphBuilder = (IFilterGraph2) new FilterGraph(); mediaControl = graphBuilder as IMediaControl; try { // Create the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Create the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(graphBuilder); //if (hr != 0) // ErrorLogger.WriteLine("Error in capGraph.SetFiltergraph. Could not build graph. Message: " + // DsError.GetErrorText(hr)); #if DEBUG this.rotEntry = new DsROTEntry(this.graphBuilder); #endif this.capFilter = CreateFilter( FilterCategory.VideoInputDevice, dev.Name); if (this.capFilter != null) { hr = graphBuilder.AddFilter(this.capFilter, "Video Source"); DsError.ThrowExceptionForHR(hr); } //// Add the video device //hr = graphBuilder.AddSourceFilterForMoniker(dev.Mon, null, "Video input", out capFilter); //if (hr != 0) // ErrorLogger.WriteLine( // "Error in m_graphBuilder.AddSourceFilterForMoniker(). Could not add source filter. Message: " + // DsError.GetErrorText(hr)); var baseGrabFlt = (IBaseFilter)sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); //if (hr != 0) // ErrorLogger.WriteLine("Error in m_graphBuilder.AddFilter(). Could not add filter. Message: " + // DsError.GetErrorText(hr)); // turn on the infrared leds ONLY FOR THE GENIUS WEBCAM /* * if (!defaultMode) * { * m_icc = capFilter as IAMCameraControl; * CameraControlFlags CamFlags = new CameraControlFlags(); * int pMin, pMax, pStep, pDefault; * * hr = m_icc.GetRange(CameraControlProperty.Focus, out pMin, out pMax, out pStep, out pDefault, out CamFlags); * m_icc.Set(CameraControlProperty.Focus, pMax, CameraControlFlags.None); * } */ //IBaseFilter smartTee = new SmartTee() as IBaseFilter; //// Add the smart tee filter to the graph //hr = this.graphBuilder.AddFilter(smartTee, "Smart Tee"); //Marshal.ThrowExceptionForHR(hr); // Connect the video source output to the smart tee //hr = capGraph.RenderStream(null, null, capFilter, null, smartTee); hr = capGraph.RenderStream(PinCategory.Capture, MediaType.Video, capFilter, null, baseGrabFlt); var errorText = DsError.GetErrorText(hr); cameraControl = capFilter as IAMCameraControl; // Set videoProcAmp object obj; var iid_IBaseFilter = new Guid("56a86895-0ad4-11ce-b03a-0020af0ba770"); DirectShowDevices.Instance.Cameras[deviceNumber].DirectshowDevice.Mon.BindToObject( null, null, ref iid_IBaseFilter, out obj); videoProcAmp = obj as IAMVideoProcAmp; // If any of the default config items are set if (frameRate + height + width > 0) { SetConfigParms(capGraph, capFilter, frameRate, width, height); } // Check for succesful rendering, if this failed the class cannot be used, so dispose the resources and return false. if (hr < 0) { Cleanup(); return(false); } else { // Otherwise update the SampleGrabber. SaveSizeInfo(sampGrabber); hr = sampGrabber.SetBufferSamples(false); if (hr == 0) { hr = sampGrabber.SetOneShot(false); hr = sampGrabber.SetCallback(this, 1); } //if (hr < 0) // ErrorLogger.WriteLine("Could not set callback function (SetupGraph) in Camera.Capture()"); } } catch (Exception ex) { //ErrorLogger.ProcessException(ex, false); Cleanup(); return(false); } return(true); }
private void CMB_videosources_SelectedIndexChanged(object sender, EventArgs e) { int hr; int count; int size; object o; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; AMMediaType media = null; VideoInfoHeader v; VideoStreamConfigCaps c; List <GCSBitmapInfo> modes = new List <GCSBitmapInfo>(); // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); IFilterGraph2 m_FilterGraph = (IFilterGraph2) new FilterGraph(); DsDevice[] capDevices; capDevices = DsDevice.GetDevicesOfCat(FilterCategory.VideoInputDevice); // Add the video device hr = m_FilterGraph.AddSourceFilterForMoniker(capDevices[CMB_videosources.SelectedIndex].Mon, null, "Video input", out capFilter); try { DsError.ThrowExceptionForHR(hr); } catch (Exception ex) { CustomMessageBox.Show("Can not add video source\n" + ex.ToString()); return; } // Find the stream config interface hr = capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o); DsError.ThrowExceptionForHR(hr); IAMStreamConfig videoStreamConfig = o as IAMStreamConfig; if (videoStreamConfig == null) { throw new Exception("Failed to get IAMStreamConfig"); } hr = videoStreamConfig.GetNumberOfCapabilities(out count, out size); DsError.ThrowExceptionForHR(hr); IntPtr TaskMemPointer = Marshal.AllocCoTaskMem(size); for (int i = 0; i < count; i++) { IntPtr ptr = IntPtr.Zero; hr = videoStreamConfig.GetStreamCaps(i, out media, TaskMemPointer); v = (VideoInfoHeader)Marshal.PtrToStructure(media.formatPtr, typeof(VideoInfoHeader)); c = (VideoStreamConfigCaps)Marshal.PtrToStructure(TaskMemPointer, typeof(VideoStreamConfigCaps)); modes.Add(new GCSBitmapInfo(v.BmiHeader.Width, v.BmiHeader.Height, c.MaxFrameInterval, c.VideoStandard.ToString(), media)); } Marshal.FreeCoTaskMem(TaskMemPointer); DsUtils.FreeAMMediaType(media); CMB_videoresolutions.DataSource = modes; if (MainV2.getConfig("video_options") != "" && CMB_videosources.Text != "") { CMB_videoresolutions.SelectedIndex = int.Parse(MainV2.getConfig("video_options")); } }
/// <summary> /// Set the Framerate, and video size /// </summary> /// <param name="capGraph">The <see cref="ICaptureGraphBuilder2"/> interface.</param> /// <param name="capFilter">The <see cref="IBaseFilter"/> of the capture device.</param> /// <param name="frameRate">The new framerate to be used.</param> /// <param name="width">The new video width to be used.</param> /// <param name="height">The new video height to be used.</param> private void SetConfigParms(ICaptureGraphBuilder2 capGraph, IBaseFilter capFilter, int frameRate, int width, int height) { int hr; object o; AMMediaType media = null; // Find the stream config interface hr = this.capGraph.FindInterface(PinCategory.Capture, MediaType.Video, capFilter, typeof(IAMStreamConfig).GUID, out o); videoControl = capFilter as IAMVideoControl; videoStreamConfig = o as IAMStreamConfig; //if (videoStreamConfig == null) // ErrorLogger.WriteLine("Error in Capture.SetConfigParams(). Failed to get IAMStreamConfig"); // Get the existing format block if (videoStreamConfig != null) { hr = videoStreamConfig.GetFormat(out media); } //if (hr != 0) // ErrorLogger.WriteLine("Could not SetConfigParms in Camera.Capture. Message: " + DsError.GetErrorText(hr)); // copy out the videoinfoheader var v = new VideoInfoHeader(); Marshal.PtrToStructure(media.formatPtr, v); // if overriding set values if (frameRate > 0) { v.AvgTimePerFrame = 10000000 / frameRate; } if (width > 0) { v.BmiHeader.Width = width; } if (height > 0) { v.BmiHeader.Height = height; } // Copy the media structure back Marshal.StructureToPtr(v, media.formatPtr, true); // Set the new format if (videoStreamConfig != null) { hr = videoStreamConfig.SetFormat(media); } //if (hr != 0) // ErrorLogger.WriteLine( // "Error while setting new camera format (videoStreamConfig) in Camera.Capture. Message: " + // DsError.GetErrorText(hr)); DsUtils.FreeAMMediaType(media); media = null; }
/// <summary> /// プレイヤーの切断 /// </summary> private void Player_Disconnect() { if (Player_IsRunning) Player_Stop(); // 同期用: サンプルグラバーのイベント登録解除: VideoGrabberCB.Enable = false; VideoGrabberCB.Notify -= VideoGrabberCB_Notify; #region 解放: if (VideoSource != null) Marshal.ReleaseComObject(VideoSource); VideoSource = null; if (Splitter != null) Marshal.ReleaseComObject(Splitter); Splitter = null; if (VideoGrabber != null) Marshal.ReleaseComObject(VideoGrabber); VideoGrabber = null; if (AudioGrabber != null) Marshal.ReleaseComObject(AudioGrabber); AudioGrabber = null; if (VideoRenderer != null) Marshal.ReleaseComObject(VideoRenderer); VideoRenderer = null; if (AudioRenderer != null) Marshal.ReleaseComObject(AudioRenderer); AudioRenderer = null; if (Builder != null) Marshal.ReleaseComObject(Builder); Builder = null; if (Graph != null) Marshal.ReleaseComObject(Graph); Graph = null; #endregion }
private void StartCapture() { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; if (System.IO.File.Exists(txtAviFileName.Text)) { // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the video source hr = m_FilterGraph.AddSourceFilter(txtAviFileName.Text, "File Source (Async.)", out capFilter); DsError.ThrowExceptionForHR(hr); //add AVI Decompressor IBaseFilter pAVIDecompressor = (IBaseFilter) new AVIDec(); hr = m_FilterGraph.AddFilter(pAVIDecompressor, "AVI Decompressor"); DsError.ThrowExceptionForHR(hr); IBaseFilter ffdshow; try { // Create Decoder filter COM object (ffdshow video decoder) Type comtype = Type.GetTypeFromCLSID(new Guid("{04FE9017-F873-410E-871E-AB91661A4EF7}")); if (comtype == null) { throw new NotSupportedException("Creating ffdshow video decoder COM object fails."); } object comobj = Activator.CreateInstance(comtype); ffdshow = (IBaseFilter)comobj; // error ocurrs! raised exception comobj = null; } catch { CustomMessageBox.Show("Please install/reinstall ffdshow"); return; } hr = m_FilterGraph.AddFilter(ffdshow, "ffdshow"); DsError.ThrowExceptionForHR(hr); // IBaseFilter baseGrabFlt = (IBaseFilter)sampGrabber; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); IBaseFilter vidrender = (IBaseFilter) new VideoRenderer(); hr = m_FilterGraph.AddFilter(vidrender, "Render"); DsError.ThrowExceptionForHR(hr); IPin captpin = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); IPin ffdpinin = DsFindPin.ByName(ffdshow, "In"); IPin ffdpinout = DsFindPin.ByName(ffdshow, "Out"); IPin samppin = DsFindPin.ByName(baseGrabFlt, "Input"); hr = m_FilterGraph.Connect(captpin, ffdpinin); DsError.ThrowExceptionForHR(hr); hr = m_FilterGraph.Connect(ffdpinout, samppin); DsError.ThrowExceptionForHR(hr); FileWriter filewritter = new FileWriter(); IFileSinkFilter filemux = (IFileSinkFilter)filewritter; //filemux.SetFileName("test.avi",); //hr = capGraph.RenderStream(null, MediaType.Video, capFilter, null, vidrender); // DsError.ThrowExceptionForHR(hr); SaveSizeInfo(sampGrabber); // setup buffer if (m_handle == IntPtr.Zero) { m_handle = Marshal.AllocCoTaskMem(m_stride * m_videoHeight); } // tell the callback to ignore new images m_PictureReady = new ManualResetEvent(false); m_bGotOne = false; m_bRunning = false; timer1 = new Thread(timer); timer1.IsBackground = true; timer1.Start(); m_mediaextseek = m_FilterGraph as IAMExtendedSeeking; m_mediapos = m_FilterGraph as IMediaPosition; m_mediaseek = m_FilterGraph as IMediaSeeking; double length = 0; m_mediapos.get_Duration(out length); trackBar_mediapos.Minimum = 0; trackBar_mediapos.Maximum = (int)length; Start(); } else { MessageBox.Show("File does not exist"); } }