public void OpenFile(string sFileName, Guid clsidPresenter) { int hr = S_Ok; IBaseFilter pSource = null; // Create a new filter graph. (This also closes the old one, if any.) InitializeGraph(); m_clsidPresenter = clsidPresenter; // Add the source filter to the graph. hr = m_pGraph.AddSourceFilter(sFileName, null, out pSource); DsError.ThrowExceptionForHR(hr); try { // Try to render the streams. RenderStreams(pSource); // Get the seeking capabilities. hr = m_pSeek.GetCapabilities(out m_seekCaps); DsError.ThrowExceptionForHR(hr); // Update our state. m_state = PlaybackState.Stopped; } finally { Marshal.ReleaseComObject(pSource); } }
private void GetInterface() { object o; int hr; m_pGraph = (IGraphBuilder)new FilterGraph(); IBaseFilter pSource; hr = m_pGraph.AddSourceFilter(@"C:\SourceForge\mflib\Test\Media\AspectRatio4x3.wmv", null, out pSource); DsError.ThrowExceptionForHR(hr); IBaseFilter pEVR = (IBaseFilter)new EnhancedVideoRenderer(); hr = m_pGraph.AddFilter(pEVR, "EVR"); DsError.ThrowExceptionForHR(hr); ICaptureGraphBuilder2 cgb; cgb = (ICaptureGraphBuilder2)new CaptureGraphBuilder2(); hr = cgb.SetFiltergraph(m_pGraph); DsError.ThrowExceptionForHR(hr); hr = cgb.RenderStream(null, MediaType.Video, pSource, null, pEVR); DsError.ThrowExceptionForHR(hr); IMFGetService gs = pEVR as IMFGetService; hr = gs.GetService(MFServices.MR_VIDEO_MIXER_SERVICE, typeof(IMFVideoProcessor).GUID, out o); MFError.ThrowExceptionForHR(hr); m_vp = o as IMFVideoProcessor; }
public static ISourceFilterHandler AddSourceFilter(IGraphBuilder graphBuilder, string source) { IBaseFilter sourceFilter; var hr = graphBuilder.AddSourceFilter(source, "Source", out sourceFilter); hr.ThrowExceptionForHR(GraphBuilderError.SourceFilter); return(new RegularSourceFilterHandler(sourceFilter)); }
/// <summary>Do the conversion from DVR-MS to WAV.</summary> /// <returns>Null; ignored.</returns> protected override object DoWork() { // Get the filter graph object filterGraph = ClassId.CoCreateInstance(ClassId.FilterGraph); DisposalCleanup.Add(filterGraph); IGraphBuilder graph = (IGraphBuilder)filterGraph; // Add the source filter for the dvr-ms file IBaseFilter DvrmsSourceFilter = graph.AddSourceFilter(InputFilePath, null); DisposalCleanup.Add(DvrmsSourceFilter); // Add the file writer to the graph IBaseFilter wavFilter = (IBaseFilter)ClassId.CoCreateInstance(ClassId.FileWriter); DisposalCleanup.Add(wavFilter); graph.AddFilter(wavFilter, null); IFileSinkFilter sinkFilter = (IFileSinkFilter)wavFilter; sinkFilter.SetFileName(OutputFilePath, null); // Add the Wav Dest filter to the graph IBaseFilter wavDest = (IBaseFilter)ClassId.CoCreateInstance(ClassId.WavDest); DisposalCleanup.Add(wavDest); graph.AddFilter(wavDest, null); // Add the decrypter node to the graph IBaseFilter decrypter = (IBaseFilter)ClassId.CoCreateInstance(ClassId.DecryptTag); DisposalCleanup.Add(decrypter); graph.AddFilter(decrypter, null); // Connect the dvr-ms source to the decrypter, the decrypter to the wav dest, // and the wav dest to the file writer Connect(graph, DvrmsSourceFilter, "DVR Out - 1", decrypter, "In(Enc/Tag)"); Connect(graph, decrypter, "Out", wavDest, "In"); Connect(graph, wavDest, "Out", wavFilter, "in"); // Run the graph to convert the audio to wav RunGraph(graph); return(null); }
public void AddSourceFilter(string filename) { try { IBaseFilter ifilter; int hr = graphBuilder.AddSourceFilter(filename, filename, out ifilter); DsError.ThrowExceptionForHR(hr); } catch (COMException e) { ShowCOMException(e, "Can't add source filter for " + filename); return; } catch (Exception e) { MessageBox.Show(e.Message, "Can't add source filter for " + filename); return; } ReloadGraph(); }
/// <summary> /// Create our RTSP source filter and load the /// RTSP source url /// </summary> /// <param name="pGraph">The graph the filter will live in</param> /// <param name="url">The URL to load into the filter</param> /// <returns></returns> private IBaseFilter CreateSourceFilter(IGraphBuilder pGraph, string url) { //var clsidRTSPFilter = new Guid("{B3F5D418-CDB1-441C-9D6D-2063D5538962}"); //RTSPSource.ax //var pRTSPFilter2 = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(clsidRTSPFilter)); //int hr = pGraph.AddFilter(pRTSPFilter2, "RTSP Filter"); //CheckHr(hr, "Can't add RTSP Filter to graph"); ////set source filename //var pRTSPFilter2Src = pRTSPFilter2 as IFileSourceFilter; //if (pRTSPFilter2Src == null) // CheckHr(unchecked((int)0x80004002), "Can't get IFileSourceFilter"); //if (pRTSPFilter2Src != null) // hr = pRTSPFilter2Src.Load(url, null); //CheckHr(hr, "Can't load file"); //Filter = true; IBaseFilter pSourceFilter2; int hr = pGraph.AddSourceFilter(url, "SourceFilter", out pSourceFilter2); CheckHr(hr, "Can't add source Filter to graph for url=" + url); return(pSourceFilter2); }
/// <summary> /// WMV 形式のファイルを読み込む処理 /// </summary> /// <remarks> /// RenderStream (NULL, MEDIATYPE_Video, source, videoGrabber, renderner)<br/> /// RenderStream (NULL, MEDIATYPE_Audio, source, audioGrabber, renderner)<br/> /// <pre> /// source grabber mux renderner /// +--------+ +---------+ +-------+ +-------+ /// | audio 0 ----0 audio 0 --- 1 0 --- 0 | /// | | +---------+ | | +-------+ /// | | | | /// | | +---------+ | | /// | video 1 --- 0 video 0 --- 0 | /// +--------+ +---------+ | | /// 2 | /// +-------+ /// </pre> /// </remarks> public static void Sample31() { string __FUNCTION__ = MethodBase.GetCurrentMethod().Name; Console.WriteLine(__FUNCTION__); IGraphBuilder graph = null; ICaptureGraphBuilder2 builder = null; IBaseFilter videoSource = null; IBaseFilter videoGrabber = null; IBaseFilter audioGrabber = null; IBaseFilter videoRenderer = null; IBaseFilter audioRenderer = null; var videoGrabberCB = new CxSampleGrabberCB(); var audioGrabberCB = new CxSampleGrabberCB(); string src_filename = Path.Combine(TestFiles, "stopwatch_320x240.wmv"); try { #region グラフビルダーの生成: { graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph); if (graph == null) { throw new System.IO.IOException("Failed to create a GraphBuilder."); } builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2); if (builder == null) { throw new System.IO.IOException("Failed to create a GraphBuilder."); } builder.SetFiltergraph(graph); } #endregion #region 像入力用: ソースフィルタを生成します. { #if true graph.AddSourceFilter(src_filename, "VideoSource", ref videoSource); if (videoSource == null) { throw new System.IO.IOException("Failed to create a videoSource."); } #else videoSource = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_WMAsfReader); if (videoSource == null) { throw new System.IO.IOException("Failed to create a videoSource."); } graph.AddFilter(videoSource, "VideoSource"); // Configure the file source filter. var pConfig = (IFileSourceFilter)videoSource; { HRESULT hr = (HRESULT)pConfig.Load(src_filename, IntPtr.Zero); if (hr < HRESULT.S_OK) { throw new System.IO.IOException("Failed to set the src_filename."); } } #endif } #endregion #region 像捕獲用: サンプルグラバーを生成します. { videoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (videoGrabber == null) { throw new System.IO.IOException("Failed to create a videoGrabber."); } graph.AddFilter(videoGrabber, "videoGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)videoGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Video); mt.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24); mt.formattype = new Guid(GUID.FORMAT_VideoInfo); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(videoGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(videoGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 音声捕獲用: サンプルグラバーを生成します. { audioGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (audioGrabber == null) { throw new System.IO.IOException("Failed to create a audioGrabber."); } graph.AddFilter(audioGrabber, "audioGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)audioGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Audio); mt.subtype = new Guid(GUID.MEDIASUBTYPE_PCM); mt.formattype = new Guid(GUID.FORMAT_WaveFormatEx); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(audioGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(audioGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 像出力用: レンダラーを生成します. { videoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (videoRenderer == null) { throw new System.IO.IOException("Failed to create a videoRenderer."); } graph.AddFilter(videoRenderer, "videoRenderer"); } #endregion #region 音声出力用: レンダラーを生成します. { audioRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (audioRenderer == null) { throw new System.IO.IOException("Failed to create a audioRenderer."); } graph.AddFilter(audioRenderer, "audioRenderer"); } #endregion #region フィルタの接続: unsafe { HRESULT hr; // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), videoSource, videoGrabber, videoRenderer); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } // フィルタの接続: (音声入力) var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), videoSource, audioGrabber, audioRenderer); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } } #endregion #region DEBUG: GraphEdit ファイルを保存します. /* * 現在のフィルタ構成を指定されたファイル(GRF 拡張子)に保存します。 * 保存されたファイルは graphedt.exe (Windws SDK 同梱) で確認できます。 */ try { Axi.SaveGraphFile(graph, Path.GetFullPath(__FUNCTION__ + ".GRF")); } catch (System.Exception ex) { Console.WriteLine(ex.StackTrace); } #endregion // ------------------------------ #region 取り込み処理: { var mediaControl = (IMediaControl)graph; var mediaEvent = (IMediaEvent)graph; var mediaSeeking = (IMediaSeeking)graph; // 映像サイズの取得. var vih = Axi.GetVideoInfo((ISampleGrabber)videoGrabber); var images = new List <Bitmap>(); var watch = new Stopwatch(); watch.Start(); // 取り込み処理. videoGrabberCB.Notify += delegate(object _sender, CxSampleGrabberEventArgs _e) { Console.WriteLine("{0}: SampleTime={1:F6}", images.Count, _e.SampleTime); images.Add(_e.ToImage(vih)); }; // 再生. Console.WriteLine("Run ..."); { HRESULT hr; int state; hr = (HRESULT)mediaControl.Run(); hr = (HRESULT)mediaControl.GetState(1000, out state); } Console.WriteLine("Running ... {0:F3} msec", watch.Elapsed.TotalMilliseconds); // 再生が完了するまで待機する. { HRESULT hr; int code; hr = (HRESULT)mediaEvent.WaitForCompletion(-1, out code); hr = (HRESULT)mediaControl.Stop(); } // 確認用: Console.WriteLine("Save ... {0:F3} msec", watch.Elapsed.TotalMilliseconds); { string subdir = Path.Combine(Results, __FUNCTION__); if (Directory.Exists(subdir) == false) { Directory.CreateDirectory(subdir); } for (int i = 0; i < images.Count; i++) { var filename = string.Format("image{0}.png", i); images[i].Save(Path.Combine(subdir, filename)); } } Console.WriteLine("Completed. {0:F3} msec", watch.Elapsed.TotalMilliseconds); } #endregion } catch (System.Exception ex) { Console.WriteLine("{0}", ex.StackTrace); } finally { #region 解放: if (videoSource != null) { Marshal.ReleaseComObject(videoSource); } videoSource = null; if (videoGrabber != null) { Marshal.ReleaseComObject(videoGrabber); } videoGrabber = null; if (audioGrabber != null) { Marshal.ReleaseComObject(audioGrabber); } audioGrabber = null; if (videoRenderer != null) { Marshal.ReleaseComObject(videoRenderer); } videoRenderer = null; if (audioRenderer != null) { Marshal.ReleaseComObject(audioRenderer); } audioRenderer = null; if (builder != null) { Marshal.ReleaseComObject(builder); } builder = null; if (graph != null) { Marshal.ReleaseComObject(graph); } graph = null; #endregion } }
/// <summary> /// AVI 形式のファイルを読み込み、WMV 形式のファイルに保存する処理 /// </summary> /// <remarks> /// RenderStream (NULL, NULL, source, null, splitter)<br/> /// RenderStream (NULL, MEDIATYPE_Video, splitter, grabber, renderner)<br/> /// RenderStream (NULL, MEDIATYPE_Audio, splitter, null, renderner)<br/> /// <pre> /// source splitter grabber renderner /// +-------+ +--------+ +---------+ +-------+ /// | 0 --- 0 video 0 --- 0 video 0 --- 1 | /// | | | | +---------+ | | /// | | | | | | /// | | | | +---------+ | | /// | | | audio 1 --- 0 audio 0 --- 0 | /// +-------+ +--------+ +---------+ +-------+ /// </pre> /// ※ 本例では grabber は省略しています。<br/> /// </remarks> public static void Sample22() { string __FUNCTION__ = MethodBase.GetCurrentMethod().Name; Console.WriteLine(__FUNCTION__); IGraphBuilder graph = null; ICaptureGraphBuilder2 builder = null; IBaseFilter videoSource = null; IBaseFilter aviSplitter = null; IBaseFilter videoGrabber = null; IBaseFilter videoRenderer = null; IFileSinkFilter fileSink = null; var videoGrabberCB = new CxSampleGrabberCB(); string src_filename = Path.Combine(TestFiles, "stopwatch_320x240.avi"); string dst_filename = Path.Combine(Results, __FUNCTION__ + ".wmv"); try { #region グラフビルダーの生成: { graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph); if (graph == null) { throw new System.IO.IOException("Failed to create a GraphBuilder."); } builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2); if (builder == null) { throw new System.IO.IOException("Failed to create a GraphBuilder."); } builder.SetFiltergraph(graph); } #endregion #region 像入力用: ソースフィルタを生成します. { graph.AddSourceFilter(src_filename, "VideoSource", ref videoSource); if (videoSource == null) { throw new System.IO.IOException("Failed to create a videoSource."); } aviSplitter = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_AviSplitter); if (aviSplitter == null) { throw new System.IO.IOException("Failed to create a aviSplitter."); } graph.AddFilter(aviSplitter, "aviSplitter"); } #endregion #region 像捕獲用: サンプルグラバーを生成します. { videoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (videoGrabber == null) { throw new System.IO.IOException("Failed to create a videoGrabber."); } graph.AddFilter(videoGrabber, "videoGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)videoGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Video); mt.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24); mt.formattype = new Guid(GUID.FORMAT_VideoInfo); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(videoGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(videoGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 像出力用: 保存する動画ファイル名を設定します. unsafe { HRESULT hr; // 動画ファイルを保存する設定: var filetype = new Guid(GUID.MEDIASUBTYPE_Asf); hr = (HRESULT)builder.SetOutputFileName(new IntPtr(&filetype), dst_filename, ref videoRenderer, ref fileSink); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } /* * Capturing Video to a Windows Media File * https://msdn.microsoft.com/en-us/library/windows/desktop/dd318630(v=vs.85).aspx */ var config = (IConfigAsfWriter)videoRenderer; // WMProfile { // WMProfile_V80_256Video が取得されます. Guid currentProfileGuid; hr = (HRESULT)config.GetCurrentProfileGuid(out currentProfileGuid); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } // WMProfile_V80_BESTVBRVideo に書き替えます. Guid newProfileGuid = new Guid(GUID.WMProfile_V80_BESTVBRVideo); hr = (HRESULT)config.ConfigureFilterUsingProfileGuid(newProfileGuid); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } } } #endregion #region フィルタの接続: unsafe { HRESULT hr; // フィルタの接続: (AVI 分離器) hr = (HRESULT)builder.RenderStream(IntPtr.Zero, IntPtr.Zero, videoSource, null, aviSplitter); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), aviSplitter, videoGrabber, videoRenderer); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } // フィルタの接続: (音声入力) var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), aviSplitter, null, videoRenderer); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } } #endregion #region 像出力用: 保存する動画ファイルのフレームサイズを設定します. unsafe { /* * Capturing Video to a Windows Media File * https://msdn.microsoft.com/en-us/library/windows/desktop/dd318630(v=vs.85).aspx */ var config = (IConfigAsfWriter)videoRenderer; // フレームサイズの取得: var vih = Axi.GetVideoInfo((ISampleGrabber)videoGrabber); var frameSize = new Size(vih.bmiHeader.biWidth, vih.bmiHeader.biHeight); // フレームサイズの変更: Axi.SetVideoFrameSize(config, frameSize); } #endregion #region DEBUG: GraphEdit ファイルを保存します. /* * 現在のフィルタ構成を指定されたファイル(GRF 拡張子)に保存します。 * 保存されたファイルは graphedt.exe (Windws SDK 同梱) で確認できます。 */ try { Axi.SaveGraphFile(graph, Path.GetFullPath(__FUNCTION__ + ".GRF")); } catch (System.Exception ex) { Console.WriteLine(ex.StackTrace); } #endregion // ------------------------------ #region 取り込み処理: { var mediaControl = (IMediaControl)graph; var mediaEvent = (IMediaEvent)graph; var mediaSeeking = (IMediaSeeking)graph; var watch = new Stopwatch(); watch.Start(); // 再生. Console.WriteLine("Run ..."); { HRESULT hr; int state; hr = (HRESULT)mediaControl.Run(); hr = (HRESULT)mediaControl.GetState(1000, out state); } Console.WriteLine("Running ... {0:F3} msec", watch.Elapsed.TotalMilliseconds); // 再生が完了するまで待機する. { HRESULT hr; int code; hr = (HRESULT)mediaEvent.WaitForCompletion(-1, out code); hr = (HRESULT)mediaControl.Stop(); } Console.WriteLine("Completed. {0:F3} msec", watch.Elapsed.TotalMilliseconds); } #endregion } catch (System.Exception ex) { Console.WriteLine("{0}", ex.StackTrace); } finally { #region 解放: if (videoSource != null) { Marshal.ReleaseComObject(videoSource); } videoSource = null; if (aviSplitter != null) { Marshal.ReleaseComObject(aviSplitter); } aviSplitter = null; if (videoRenderer != null) { Marshal.ReleaseComObject(videoRenderer); } videoRenderer = null; if (fileSink != null) { Marshal.ReleaseComObject(fileSink); } fileSink = null; if (builder != null) { Marshal.ReleaseComObject(builder); } builder = null; if (graph != null) { Marshal.ReleaseComObject(graph); } graph = null; #endregion } }
private void CreateFilters() { isValid = true; // grabber grabberVideo = new GrabberVideo(this); grabberAudio = new GrabberAudio(this); // objects graphObject = null; grabberObjectVideo = null; grabberObjectAudio = null; int sourceBaseVideoPinIndex = 0; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object if (fileName.ToLower().EndsWith(".wmv")) { type = Type.GetTypeFromCLSID(Clsid.WMASFReader); if (type == null) { throw new ApplicationException("Failed creating ASF Reader filter"); } sourceBase = (IBaseFilter)Activator.CreateInstance(type); IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase; sourceFile.Load(fileName, null); graph.AddFilter(sourceBase, "source"); sourceBaseVideoPinIndex = 1; } else { graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { try { type = Type.GetTypeFromCLSID(Clsid.AsyncReader); if (type == null) { throw new ApplicationException("Failed creating Async Reader filter"); } sourceBase = (IBaseFilter)Activator.CreateInstance(type); IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase; sourceFile.Load(fileName, null); graph.AddFilter(sourceBase, "source"); } catch { throw new ApplicationException("Failed creating source filter"); } } sourceBaseVideoPinIndex = 0; } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObjectVideo = Activator.CreateInstance(type); sampleGrabberVideo = (ISampleGrabber)grabberObjectVideo; grabberBaseVideo = (IBaseFilter)grabberObjectVideo; // add grabber filters to graph graph.AddFilter(grabberBaseVideo, "grabberVideo"); // set media type AMMediaType mediaType = new AMMediaType { MajorType = MediaType.Video, SubType = MediaSubType.ARGB32 /* MediaSubType.RGB24 */ }; ; sampleGrabberVideo.SetMediaType(mediaType); // connect pins IPin outPin = Tools.GetOutPin(sourceBase, sourceBaseVideoPinIndex); IPin inPin = Tools.GetInPin(grabberBaseVideo, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // get media type if (sampleGrabberVideo.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabberVideo.Width = vih.BmiHeader.Width; grabberVideo.Height = vih.BmiHeader.Height; mediaType.Dispose(); } if (useAudioGrabber) { // ***************************************************************** // ******** Add the audio grabber to monitor audio peaks *********** bool audioGrabberIsConnected = false; Tools.FilterInfo2 filterInfo2 = Tools.GetNextFilter(sourceBase, PinDirection.Output, 0); foreach (Tools.PinInfo2 pinInfo2 in filterInfo2.Pins) { if (pinInfo2.PinInfo.Direction == PinDirection.Output) { if (!Tools.IsPinConnected(pinInfo2.Pin)) { try { graph.Render(pinInfo2.Pin); AMMediaType mt = new AMMediaType(); pinInfo2.Pin.ConnectionMediaType(mt); if (mt.MajorType == MediaType.Audio) { // Obtain a reference to the filter connected to the audio output of the video splitter (usually, this is the audio decoder) Tools.FilterInfo2 decoderFilterInfo2 = Tools.GetNextFilter(pinInfo2.PinInfo.Filter, PinDirection.Output, 0); // Remove all the filters connected to the audio decoder filter System.Collections.Generic.List <Tools.FilterInfo2> filtersInfo2 = new System.Collections.Generic.List <Tools.FilterInfo2>(); Tools.FilterInfo2 testFilterInfo2 = Tools.GetNextFilter(decoderFilterInfo2.Filter, PinDirection.Output, 0); while (true) { filtersInfo2.Add(testFilterInfo2); testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0); if (testFilterInfo2.Filter == null) { break; } } foreach (Tools.FilterInfo2 fi2 in filtersInfo2) { graph.RemoveFilter(fi2.Filter); fi2.Release(); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating audio sample grabber"); } // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaTypeAudio = new AMMediaType { MajorType = MediaType.Audio, SubType = MediaSubType.PCM, FormatType = FormatType.WaveEx }; sampleGrabberAudio.SetMediaType(mediaTypeAudio); outPin = Tools.GetOutPin(decoderFilterInfo2.Filter, 0); inPin = Tools.GetInPin(grabberBaseAudio, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed connecting filter to grabberBaseAudio"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // Finally, connect the grabber to the audio renderer outPin = Tools.GetOutPin(grabberBaseAudio, 0); graph.Render(outPin); mt = new AMMediaType(); outPin.ConnectionMediaType(mt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media audio information"); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(mt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber sampleGrabberAudio.SetBufferSamples(false); sampleGrabberAudio.SetOneShot(false); sampleGrabberAudio.SetCallback(grabberAudio, 1); audioGrabberIsConnected = true; break; } } catch { } } } } filterInfo2.Release(); if (!audioGrabberIsConnected) { foreach (Tools.PinInfo2 pinInfo2 in Tools.GetPins(sourceBase)) { if (!Tools.IsPinConnected(pinInfo2.Pin)) { foreach (AMMediaType mt in Tools.GetMediaTypes(pinInfo2.Pin)) { if (mt.MajorType == MediaType.Audio) { // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaTypeAudio = new AMMediaType { MajorType = MediaType.Audio, SubType = MediaSubType.PCM, FormatType = FormatType.WaveEx }; sampleGrabberAudio.SetMediaType(mediaTypeAudio); inPin = Tools.GetInPin(grabberBaseAudio, 0); if (graph.Connect(pinInfo2.Pin, inPin) < 0) { throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo"); } Marshal.ReleaseComObject(inPin); // Finally, connect the grabber to the audio renderer outPin = Tools.GetOutPin(grabberBaseAudio, 0); graph.Render(outPin); AMMediaType amt = new AMMediaType(); outPin.ConnectionMediaType(amt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media audio information"); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(amt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber sampleGrabberAudio.SetBufferSamples(false); sampleGrabberAudio.SetOneShot(false); sampleGrabberAudio.SetCallback(grabberAudio, 1); audioGrabberIsConnected = true; break; } } } } } // ***************************************************************** } // let's do the rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBaseVideo, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabberVideo.SetBufferSamples(false); sampleGrabberVideo.SetOneShot(false); sampleGrabberVideo.SetCallback(grabberVideo, 1); // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; mediaFilter.SetSyncSource(null); } // get media control mediaControl = (IMediaControl)graphObject; // get media seek control mediaSeekControl = (IMediaSeeking)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; // get media audio control basicAudio = (IBasicAudio)graphObject; } catch (Exception exception) { DestroyFilters(); // provide information to clients VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(exception.Message)); } }
private void WorkerThread() { ReasonToFinishPlaying reason = ReasonToFinishPlaying.StoppedByUser; Grabber grabber = new Grabber(this); object obj = null; object obj2 = null; IGraphBuilder graphBuilder = null; IBaseFilter filter = null; IBaseFilter baseFilter = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IMediaEventEx mediaEventEx = null; try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } obj = Activator.CreateInstance(typeFromCLSID); graphBuilder = (IGraphBuilder)obj; graphBuilder.AddSourceFilter(fileName, "source", out filter); if (filter == null) { throw new ApplicationException("Failed creating source filter"); } typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj2 = Activator.CreateInstance(typeFromCLSID); sampleGrabber = (ISampleGrabber)obj2; baseFilter = (IBaseFilter)obj2; graphBuilder.AddFilter(baseFilter, "grabber"); AMMediaType aMMediaType = new AMMediaType(); aMMediaType.MajorType = MediaType.Video; aMMediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(aMMediaType); int num = 0; IPin inPin = Tools.GetInPin(baseFilter, 0); IPin pin = null; while (true) { pin = Tools.GetOutPin(filter, num); if (pin == null) { Marshal.ReleaseComObject(inPin); throw new ApplicationException("Did not find acceptable output video pin in the given source"); } if (graphBuilder.Connect(pin, inPin) >= 0) { break; } Marshal.ReleaseComObject(pin); pin = null; num++; } Marshal.ReleaseComObject(pin); Marshal.ReleaseComObject(inPin); if (sampleGrabber.GetConnectedMediaType(aMMediaType) == 0) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = videoInfoHeader.BmiHeader.Width; grabber.Height = videoInfoHeader.BmiHeader.Height; aMMediaType.Dispose(); } if (!preventFreezing) { graphBuilder.Render(Tools.GetOutPin(baseFilter, 0)); IVideoWindow videoWindow = (IVideoWindow)obj; videoWindow.put_AutoShow(autoShow: false); videoWindow = null; } sampleGrabber.SetBufferSamples(bufferThem: false); sampleGrabber.SetOneShot(oneShot: false); sampleGrabber.SetCallback(grabber, 1); if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)obj; mediaFilter.SetSyncSource(null); } mediaControl = (IMediaControl)obj; mediaEventEx = (IMediaEventEx)obj; mediaControl.Run(); do { if (mediaEventEx != null && mediaEventEx.GetEvent(out DsEvCode lEventCode, out IntPtr lParam, out IntPtr lParam2, 0) >= 0) { mediaEventEx.FreeEventParams(lEventCode, lParam, lParam2); if (lEventCode == DsEvCode.Complete) { reason = ReasonToFinishPlaying.EndOfStreamReached; break; } } }while (!stopEvent.WaitOne(100, exitContext: false)); mediaControl.Stop(); } catch (Exception ex) { if (this.VideoSourceError != null) { this.VideoSourceError(this, new VideoSourceErrorEventArgs(ex.Message)); } } finally { graphBuilder = null; baseFilter = null; sampleGrabber = null; mediaControl = null; mediaEventEx = null; if (obj != null) { Marshal.ReleaseComObject(obj); obj = null; } if (filter != null) { Marshal.ReleaseComObject(filter); filter = null; } if (obj2 != null) { Marshal.ReleaseComObject(obj2); obj2 = null; } } if (this.PlayingFinished != null) { this.PlayingFinished(this, reason); } }
private void Form1_Load(object sender, EventArgs e) { m_PictureReady = new ManualResetEvent(false); // ¨PlayMovieInWindow( "c:\\temp\\beckscen2.avi" ); int hr; this.graphBuilder = (IGraphBuilder)new FilterGraph(); // this.graphBuilder.AddFilter( // Get the SampleGrabber interface ISampleGrabber sampGrabber = new SampleGrabber() as ISampleGrabber; IBaseFilter ibfRenderer = null; IBaseFilter capFilter = null; IPin iPinInFilter = null; IPin iPinOutFilter = null; IPin iPinInDest = null; // Add the video source hr = graphBuilder.AddSourceFilter(Filename, "Ds.NET FileFilter", out capFilter); DsError.ThrowExceptionForHR(hr); // Hopefully this will be the video pin IPin iPinOutSource = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); IBaseFilter baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); iPinInFilter = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); iPinOutFilter = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); // Add the frame grabber to the graph hr = graphBuilder.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.Connect(iPinOutSource, iPinInFilter); DsError.ThrowExceptionForHR(hr); // Get the default video renderer ibfRenderer = (IBaseFilter)new VideoRendererDefault(); // Add it to the graph hr = graphBuilder.AddFilter(ibfRenderer, "Ds.NET VideoRendererDefault"); DsError.ThrowExceptionForHR(hr); iPinInDest = DsFindPin.ByDirection(ibfRenderer, PinDirection.Input, 0); // Connect the graph. Many other filters automatically get added here hr = graphBuilder.Connect(iPinOutFilter, iPinInDest); DsError.ThrowExceptionForHR(hr); this.mediaControl = (IMediaControl)this.graphBuilder; this.mediaEventEx = (IMediaEventEx)this.graphBuilder; this.mediaSeeking = (IMediaSeeking)this.graphBuilder; this.mediaPosition = (IMediaPosition)this.graphBuilder; this.videoWindow = this.graphBuilder as IVideoWindow; this.basicVideo = this.graphBuilder as IBasicVideo; this.basicAudio = this.graphBuilder as IBasicAudio; hr = this.videoWindow.put_Owner(panel1.Handle); DsError.ThrowExceptionForHR(hr); hr = this.videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren); DsError.ThrowExceptionForHR(hr); if (this.basicVideo == null) return; int lHeight, lWidth; hr = this.basicVideo.GetVideoSize(out lWidth, out lHeight); Console.WriteLine("video: %d x %d\n", lWidth, lHeight); m_videoWidth = lWidth; m_videoHeight = lHeight; SaveSizeInfo(sampGrabber); newbitmap = new Bitmap(lWidth, lHeight, PixelFormat.Format24bppRgb); origbitmap = new Bitmap(lWidth, lHeight, PixelFormat.Format24bppRgb); m_ImageChanged = true; pictureBox1.Width = lWidth; pictureBox1.Height = lHeight; pictureBox2.Width = lWidth; pictureBox2.Height = lHeight; pictureBox2.Top = pictureBox1.Top + lHeight + 4; duration = 0.0f; this.mediaPosition.get_Duration(out duration); m_ipBuffer = Marshal.AllocCoTaskMem(Math.Abs(m_stride) * m_videoHeight); // this.ClientSize = new Size(lWidth, lHeight); Application.DoEvents(); hr = this.videoWindow.SetWindowPosition(0, 0, panel1.Width, panel1.Height); this.mediaControl.Run(); timer1.Enabled = true; // buildCaptureGRaph( this.de ( (capDevices[iDeviceNum], iWidth, iHeight, iBPP, hControl); // buildCaptureaph(); }
/// <summary> /// グラフの生成 /// </summary> public virtual void Setup() { this.Dispose(); try { // グラフ. // CoCreateInstance GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph))); #region フィルタ追加. // ファイル入力. IBaseFilter capture = null; GraphBuilder.AddSourceFilter(SourceFile, "CaptureFilter", ref capture); if (capture == null) throw new System.IO.IOException(); #if false // DMO ラッパーフィルタ. // https://msdn.microsoft.com/ja-jp/library/cc371140.aspx IBaseFilter dmo = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_DMOWrapperFilter))); if (dmo != null) { //// Mpeg4 Decoder DMO //// F371728A-6052-4D47-827C-D039335DFE0A //// 4A69B442-28BE-4991-969C-B500ADF5D8A8 //// mpg4decd.dll [C:\Windows\System32, C:\Windows\SysWOW64] var idmo = (IDMOWrapperFilter)dmo; idmo.Init(new Guid("F371728A-6052-4D47-827C-D039335DFE0A"), new Guid("4A69B442-28BE-4991-969C-B500ADF5D8A8")); idmo = null; this.GraphBuilder.AddFilter(dmo, "Mpeg4 Decoder DMO"); } #endif #if false // Avi Splitter IBaseFilter splitter = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVISplitter))); if (splitter == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(splitter, "Avi Splitter"); // Avi Decompressor IBaseFilter decompressor = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVIDec))); if (decompressor == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(decompressor, "Avi Decompressor"); #endif // サンプルグラバー. IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber(); if (grabber == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(grabber, "SampleGrabber"); // レンダラー. IBaseFilter renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer))); if (renderer == null) throw new System.IO.IOException(); this.GraphBuilder.AddFilter(renderer, "Renderer"); #endregion #region ピンの取得. IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT); IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT); #endregion #region ピンの接続. GraphBuilder.Connect(capture_out, grabber_in); GraphBuilder.Connect(grabber_out, renderer_in); #endregion #region 保管: インターフェース. CaptureFilter = capture; CaptureOutPin = capture_out; SampleGrabber = (ISampleGrabber)grabber; SampleGrabberInPin = grabber_in; SampleGrabberOutPin = grabber_out; Renderer = renderer; RendererInPin = renderer_in; #endregion #region 保管: フレームサイズ. VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber); this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader; this.SampleGrabberCB.FrameSize = new Size( System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth), System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight) ); #endregion #region インタフェースの抽出: { DSLab.IGraphBuilder graph = this.GraphBuilder; DSLab.IEnumFilters filters = null; DSLab.IBaseFilter filter = null; int fetched = 0; int hr = graph.EnumFilters(ref filters); while (filters.Next(1, ref filter, ref fetched) == (int)DSLab.HRESULT.S_OK) { if (fetched == 0) break; if (filter is DSLab.IMediaSeeking) { // シーク操作用. Seeking = (DSLab.IMediaSeeking)filter; } else { // フィルタ解放. Marshal.ReleaseComObject(filter); filter = null; } } // 解放. Marshal.ReleaseComObject(filters); } #endregion // DEBUG #if DEBUG DebugPrint(this.GraphBuilder); #endif } catch (Exception ex) { throw new DSLab.CxDSException(ex); } }
/// <summary> /// プレイヤーの接続 /// </summary> /// <param name="filename"></param> private void Player_Connect(string filename) { #region グラフビルダーの生成: { Graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph); if (Graph == null) throw new System.IO.IOException("Failed to create a GraphBuilder."); Builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2); if (Builder == null) throw new System.IO.IOException("Failed to create a GraphBuilder."); Builder.SetFiltergraph(Graph); } #endregion #region 映像入力用: ソースフィルタを生成します. { Graph.AddSourceFilter(filename, "VideoSource", ref VideoSource); if (VideoSource == null) throw new System.IO.IOException("Failed to create a VideoSource."); } #endregion #region 映像捕獲用: サンプルグラバーを生成します. { VideoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (VideoGrabber == null) throw new System.IO.IOException("Failed to create a VideoGrabber."); Graph.AddFilter(VideoGrabber, "VideoGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)VideoGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Video); mt.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24); mt.formattype = new Guid(GUID.FORMAT_VideoInfo); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(VideoGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(VideoGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 音声捕獲用: サンプルグラバーを生成します. { AudioGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (AudioGrabber == null) throw new System.IO.IOException("Failed to create a AudioGrabber."); Graph.AddFilter(AudioGrabber, "AudioGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)AudioGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Audio); mt.subtype = new Guid(GUID.MEDIASUBTYPE_PCM); mt.formattype = new Guid(GUID.FORMAT_WaveFormatEx); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(AudioGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(AudioGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 映像出力用: レンダラーを生成します. { VideoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (VideoRenderer == null) throw new System.IO.IOException("Failed to create a VideoRenderer."); Graph.AddFilter(VideoRenderer, "VideoRenderer"); } #endregion #region 音声出力用: レンダラーを生成します. { AudioRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (AudioRenderer == null) throw new System.IO.IOException("Failed to create a AudioRenderer."); Graph.AddFilter(AudioRenderer, "AudioRenderer"); } #endregion #region フィルタの接続: if (filename.EndsWith(".avi", StringComparison.InvariantCultureIgnoreCase)) { #region AVI 形式ファイル用の初期化: unsafe { HRESULT hr; // AVI 分離器の追加: Splitter = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_AviSplitter); if (Splitter == null) throw new System.IO.IOException("Failed to create a Splitter."); Graph.AddFilter(Splitter, "Splitter"); // フィルタの接続: (AVI 分離器) hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, IntPtr.Zero, VideoSource, null, Splitter); if (hr < HRESULT.S_OK) throw new CxDSException(hr); // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), Splitter, VideoGrabber, VideoRenderer); if (hr < HRESULT.S_OK) throw new CxDSException(hr); // フィルタの接続: (音声入力) ※ Audioless も有る. try { var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), Splitter, AudioGrabber, AudioRenderer); } catch (System.Exception ex) { Debug.WriteLine(ex.StackTrace); } } #endregion } else if ( filename.EndsWith(".asf", StringComparison.InvariantCultureIgnoreCase) || filename.EndsWith(".wmv", StringComparison.InvariantCultureIgnoreCase)) { #region WMV 形式ファイル用の初期化: unsafe { HRESULT hr; // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), VideoSource, VideoGrabber, VideoRenderer); if (hr < HRESULT.S_OK) throw new CxDSException(hr); // フィルタの接続: (音声入力) var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), VideoSource, AudioGrabber, AudioRenderer); if (hr < HRESULT.S_OK) throw new CxDSException(hr); } #endregion } #endregion // 同期用: サンプルグラバーのイベント登録: VideoGrabberCB.Enable = true; VideoGrabberCB.Notify += VideoGrabberCB_Notify; VideoInfoHeader = Axi.GetVideoInfo((ISampleGrabber)VideoGrabber); }
/// <summary> /// プレイヤーの接続 /// </summary> /// <param name="filename"></param> private void Player_Connect(string filename) { #region グラフビルダーの生成: { Graph = (IGraphBuilder)Axi.CoCreateInstance(GUID.CLSID_FilterGraph); if (Graph == null) { throw new System.IO.IOException("Failed to create a GraphBuilder."); } Builder = (ICaptureGraphBuilder2)Axi.CoCreateInstance(GUID.CLSID_CaptureGraphBuilder2); if (Builder == null) { throw new System.IO.IOException("Failed to create a GraphBuilder."); } Builder.SetFiltergraph(Graph); } #endregion #region 像入力用: ソースフィルタを生成します. { Graph.AddSourceFilter(filename, "VideoSource", ref VideoSource); if (VideoSource == null) { throw new System.IO.IOException("Failed to create a VideoSource."); } } #endregion #region 像捕獲用: サンプルグラバーを生成します. { VideoGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (VideoGrabber == null) { throw new System.IO.IOException("Failed to create a VideoGrabber."); } Graph.AddFilter(VideoGrabber, "VideoGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)VideoGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Video); mt.subtype = new Guid(GUID.MEDIASUBTYPE_RGB24); mt.formattype = new Guid(GUID.FORMAT_VideoInfo); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(VideoGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(VideoGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 音声捕獲用: サンプルグラバーを生成します. { AudioGrabber = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_SampleGrabber); if (AudioGrabber == null) { throw new System.IO.IOException("Failed to create a AudioGrabber."); } Graph.AddFilter(AudioGrabber, "AudioGrabber"); // サンプルグラバフィルタの入力形式設定. // SetMediaType で必要なメディア タイプを指定します。 // http://msdn.microsoft.com/ja-jp/library/cc369546.aspx // ※AM_MEDIA_TYPE 構造体のメンバをすべて設定する必要はない。 // ※デフォルトでは、サンプル グラバに優先メディア タイプはない。 // ※サンプル グラバを正しいフィルタに確実に接続するには、フィルタ グラフを作成する前にこのメソッドを呼び出す。 // majortype: http://msdn.microsoft.com/ja-jp/library/cc370108.aspx // subtype : http://msdn.microsoft.com/ja-jp/library/cc371040.aspx { var grabber = (ISampleGrabber)AudioGrabber; var mt = new AM_MEDIA_TYPE(); mt.majortype = new Guid(GUID.MEDIATYPE_Audio); mt.subtype = new Guid(GUID.MEDIASUBTYPE_PCM); mt.formattype = new Guid(GUID.FORMAT_WaveFormatEx); grabber.SetMediaType(mt); grabber.SetBufferSamples(false); // サンプルコピー 無効. grabber.SetOneShot(false); // One Shot 無効. //grabber.SetCallback(AudioGrabberCB, 0); // 0:SampleCB メソッドを呼び出すよう指示する. grabber.SetCallback(AudioGrabberCB, 1); // 1:BufferCB メソッドを呼び出すよう指示する. } } #endregion #region 像出力用: レンダラーを生成します. { VideoRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (VideoRenderer == null) { throw new System.IO.IOException("Failed to create a VideoRenderer."); } Graph.AddFilter(VideoRenderer, "VideoRenderer"); } #endregion #region 音声出力用: レンダラーを生成します. { AudioRenderer = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_NullRenderer); if (AudioRenderer == null) { throw new System.IO.IOException("Failed to create a AudioRenderer."); } Graph.AddFilter(AudioRenderer, "AudioRenderer"); } #endregion #region フィルタの接続: if (filename.EndsWith(".avi", StringComparison.InvariantCultureIgnoreCase)) { #region AVI 形式ファイル用の初期化: unsafe { HRESULT hr; // AVI 分離器の追加: Splitter = (IBaseFilter)Axi.CoCreateInstance(GUID.CLSID_AviSplitter); if (Splitter == null) { throw new System.IO.IOException("Failed to create a Splitter."); } Graph.AddFilter(Splitter, "Splitter"); // フィルタの接続: (AVI 分離器) hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, IntPtr.Zero, VideoSource, null, Splitter); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), Splitter, VideoGrabber, VideoRenderer); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } // フィルタの接続: (音声入力) ※ Audioless も有る. try { var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), Splitter, AudioGrabber, AudioRenderer); } catch (System.Exception ex) { Debug.WriteLine(ex.StackTrace); } } #endregion } else if ( filename.EndsWith(".asf", StringComparison.InvariantCultureIgnoreCase) || filename.EndsWith(".wmv", StringComparison.InvariantCultureIgnoreCase)) { #region WMV 形式ファイル用の初期化: unsafe { HRESULT hr; // フィルタの接続: (映像入力) var mediatype_video = new Guid(GUID.MEDIATYPE_Video); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_video), VideoSource, VideoGrabber, VideoRenderer); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } // フィルタの接続: (音声入力) var mediatype_audio = new Guid(GUID.MEDIATYPE_Audio); hr = (HRESULT)Builder.RenderStream(IntPtr.Zero, new IntPtr(&mediatype_audio), VideoSource, AudioGrabber, AudioRenderer); if (hr < HRESULT.S_OK) { throw new CxDSException(hr); } } #endregion } #endregion // 同期用: サンプルグラバーのイベント登録: VideoGrabberCB.Enable = true; VideoGrabberCB.Notify += VideoGrabberCB_Notify; VideoInfoHeader = Axi.GetVideoInfo((ISampleGrabber)VideoGrabber); }
DSStreamResultCodes InitWithStreamBufferFile(WTVStreamingVideoRequest strq) { // Init variables //IPin[] pin = new IPin[1]; IBaseFilter DecFilterAudio = null; IBaseFilter DecFilterVideo = null; IBaseFilter MainAudioDecoder = null; IBaseFilter MainVideoDecoder = null; string dPin = string.Empty; string sName = string.Empty; string dName = string.Empty; string sPin = string.Empty; FileInfo fiInputFile = new FileInfo(strq.FileName); string txtOutputFNPath = fiInputFile.FullName + ".wmv"; if ( (!fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) && (!fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms")) ) { return(DSStreamResultCodes.ErrorInvalidFileType); } int hr = 0; try { // Get the graphbuilder interface SendDebugMessage("Creating Graph Object", 0); IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph; // Add the DVRMS/WTV file / filter to the graph SendDebugMessage("Add SBE Source Filter", 0); hr = graphbuilder.AddSourceFilter(fiInputFile.FullName, "SBE Filter", out currentSBEfilter); // class variable DsError.ThrowExceptionForHR(hr); dc.Add(currentSBEfilter); // Get the SBE audio and video out pins IPin SBEVidOutPin, SBEAudOutPin; SBEAudOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Audio, MediaSubType.Null); SBEVidOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Video, MediaSubType.Null); // Set up two decrypt filters according to file extension (assume audio and video both present ) if (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms")) { // Add DVR-MS decrypt filters SendDebugMessage("Add DVRMS (bda) decryption", 0); DecFilterAudio = (IBaseFilter) new DTFilter(); // THESE ARE FOR DVR-MS (BDA DTFilters) DecFilterVideo = (IBaseFilter) new DTFilter(); graphbuilder.AddFilter(DecFilterAudio, "Decrypt / Tag"); graphbuilder.AddFilter(DecFilterVideo, "Decrypt / Tag 0001"); } else // Add WTV decrypt filters { SendDebugMessage("Add WTV (pbda) decryption", 0); DecFilterAudio = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder); DecFilterVideo = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder, "PBDA DTFilter 0001"); } dc.Add(DecFilterAudio); dc.Add(DecFilterVideo); // Make the first link in the graph: SBE => Decrypts SendDebugMessage("Connect SBE => Decrypt filters", 0); IPin DecVideoInPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Input, 0); FilterGraphTools.ConnectFilters(graphbuilder, SBEVidOutPin, DecVideoInPin, false); IPin DecAudioInPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Input, 0); if (DecAudioInPin == null) { SendDebugMessage("WARNING: No Audio Input to decrypt filter."); } else { FilterGraphTools.ConnectFilters(graphbuilder, SBEAudOutPin, DecAudioInPin, false); } // Get Dec Audio Out pin IPin DecAudioOutPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Output, 0); // Examine Dec Audio out for audio format SendDebugMessage("Examining source audio", 0); AMMediaType AudioMediaType = null; getPinMediaType(DecAudioOutPin, MediaType.Audio, Guid.Empty, Guid.Empty, ref AudioMediaType); SendDebugMessage("Audio media subtype: " + AudioMediaType.subType.ToString()); SendDebugMessage("Examining Audio StreamInfo"); StreamInfo si = FileInformation.GetStreamInfo(AudioMediaType); bool AudioIsAC3 = (si.SimpleType == "AC-3"); if (AudioIsAC3) { SendDebugMessage("Audio type is AC3"); } else { SendDebugMessage("Audio type is not AC3"); } si = null; DsUtils.FreeAMMediaType(AudioMediaType); // Add an appropriate audio decoder if (AudioIsAC3) { if (!FilterGraphTools.IsThisComObjectInstalled(FilterDefinitions.Audio.AudioDecoderMPCHC.CLSID)) { SendDebugMessage("Missing AC3 Audio Decoder, and AC3 audio detected."); return(DSStreamResultCodes.ErrorAC3CodecNotFound); } else { MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMPCHC, ref graphbuilder); //MainAudioDecoder = FatAttitude.WTVTranscoder.FilterDefinitions.Audio.AudioDecoderFFDShow.AddToFilterGraph(ref graph); Guid tmpGuid; MainAudioDecoder.GetClassID(out tmpGuid); SendDebugMessage("Main Audio decoder CLSID is " + tmpGuid.ToString()); } } else { MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMSDTV, ref graphbuilder); } // Add a video decoder SendDebugMessage("Add DTV decoder", 0); MainVideoDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Video.VideoDecoderMSDTV, ref graphbuilder); dc.Add(MainAudioDecoder); dc.Add(MainVideoDecoder); //SetAudioDecoderOutputToPCMStereo(MainAudioDecoder); // Add a null renderer SendDebugMessage("Add null renderer", 0); NullRenderer MyNullRenderer = new NullRenderer(); dc.Add(MyNullRenderer); hr = graphbuilder.AddFilter((IBaseFilter)MyNullRenderer, @"Null Renderer"); DsError.ThrowExceptionForHR(hr); // Link up video through to null renderer SendDebugMessage("Connect video to null renderer", 0); // Make the second link: Decrypts => DTV IPin DecVideoOutPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Output, 0); IPin DTVVideoInPin = DsFindPin.ByName(MainVideoDecoder, @"Video Input"); // IPin DTVVideoInPin = DsFindPin.ByDirection(DTVVideoDecoder, PinDirection.Input, 0); // first one should be video input? // FilterGraphTools.ConnectFilters(graphbuilder, DecVideoOutPin, DTVVideoInPin, false); // 3. DTV => Null renderer IPin NullRInPin = DsFindPin.ByDirection((IBaseFilter)MyNullRenderer, PinDirection.Input, 0); IPin DTVVideoOutPin = FilterGraphTools.FindPinByMediaType(MainVideoDecoder, PinDirection.Output, MediaType.Video, MediaSubType.Null); FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, NullRInPin, false); Marshal.ReleaseComObject(NullRInPin); NullRInPin = null; // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant] SendDebugMessage("Run graph for testing purposes", 0); IMediaControl tempControl = (IMediaControl)graphbuilder; IMediaEvent tempEvent = (IMediaEvent)graphbuilder; DsError.ThrowExceptionForHR(tempControl.Pause()); DsError.ThrowExceptionForHR(tempControl.Run()); EventCode pEventCode; hr = tempEvent.WaitForCompletion(1000, out pEventCode); //DsError.ThrowExceptionForHR(hr); // DO *NOT* DO THIS HERE! THERE MAY WELL BE AN ERROR DUE TO EVENTS RAISED BY THE STREAM BUFFER ENGINE, THIS IS A DELIBERATE TEST RUN OF THE GRAPH // Stop graph if necessary FilterState pFS; hr = tempControl.GetState(1000, out pFS); if (pFS == FilterState.Running) { DsError.ThrowExceptionForHR(tempControl.Stop()); } // Remove null renderer hr = graphbuilder.RemoveFilter((IBaseFilter)MyNullRenderer); // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder AMMediaType pmt = null; getPinMediaType(DTVVideoOutPin, MediaType.Video, MediaSubType.YUY2, Guid.Empty, ref pmt); FrameSize SourceFrameSize; if (pmt.formatType == FormatType.VideoInfo2) { VideoInfoHeader2 pvih2 = new VideoInfoHeader2(); Marshal.PtrToStructure(pmt.formatPtr, pvih2); int VideoWidth = pvih2.BmiHeader.Width; int VideoHeight = pvih2.BmiHeader.Height; SourceFrameSize = new FrameSize(VideoWidth, VideoHeight); } else { SourceFrameSize = new FrameSize(320, 240); } // Free up DsUtils.FreeAMMediaType(pmt); pmt = null; // Link up audio // 2. Audio Decrypt -> Audio decoder IPin MainAudioInPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Input, 0); FilterGraphTools.ConnectFilters(graphbuilder, DecAudioOutPin, MainAudioInPin, false); // Add ASF Writer // Create an ASF writer filter SendDebugMessage("Creating ASF Writer", 0); WMAsfWriter asf_filter = new WMAsfWriter(); dc.Add(asf_filter); // CHECK FOR ERRORS currentOutputFilter = (IBaseFilter)asf_filter; // class variable // Add the ASF filter to the graph hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer"); DsError.ThrowExceptionForHR(hr); // Set the filename IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter; string destPathFN = fiInputFile.FullName + ".wmv"; hr = sinkFilter.SetFileName(destPathFN, null); DsError.ThrowExceptionForHR(hr); // Make the final links: DTV => writer SendDebugMessage("Linking audio/video through to decoder and writer", 0); IPin DTVAudioOutPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Output, 0); IPin ASFAudioInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null); IPin ASFVideoInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null); FilterGraphTools.ConnectFilters(graphbuilder, DTVAudioOutPin, ASFAudioInputPin, false); if (ASFVideoInputPin != null) { FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, ASFVideoInputPin, false); } // Configure ASFWriter ConfigureASFWriter(asf_filter, strq, SourceFrameSize); // Release pins SendDebugMessage("Releasing COM objects (pins)", 0); // dec Marshal.ReleaseComObject(DecAudioInPin); DecAudioInPin = null; Marshal.ReleaseComObject(DecVideoInPin); DecVideoInPin = null; Marshal.ReleaseComObject(DecVideoOutPin); DecVideoOutPin = null; Marshal.ReleaseComObject(DecAudioOutPin); DecAudioOutPin = null; // dtv Marshal.ReleaseComObject(MainAudioInPin); MainAudioInPin = null; Marshal.ReleaseComObject(DTVVideoInPin); DTVVideoInPin = null; Marshal.ReleaseComObject(DTVVideoOutPin); DTVVideoOutPin = null; Marshal.ReleaseComObject(DTVAudioOutPin); DTVAudioOutPin = null; // asf Marshal.ReleaseComObject(ASFAudioInputPin); ASFAudioInputPin = null; Marshal.ReleaseComObject(ASFVideoInputPin); ASFVideoInputPin = null; } catch (Exception ex) { SendDebugMessageWithException(ex.Message, ex); return(DSStreamResultCodes.ErrorExceptionOccurred); } return(DSStreamResultCodes.OK); }
/// <summary> /// Create our RTSP source filter and load the /// RTSP source url /// </summary> /// <param name="pGraph">The graph the filter will live in</param> /// <param name="url">The URL to load into the filter</param> /// <returns></returns> private IBaseFilter CreateSourceFilter(IGraphBuilder pGraph, string url) { //var clsidRTSPFilter = new Guid("{B3F5D418-CDB1-441C-9D6D-2063D5538962}"); //RTSPSource.ax //var pRTSPFilter2 = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(clsidRTSPFilter)); //int hr = pGraph.AddFilter(pRTSPFilter2, "RTSP Filter"); //CheckHr(hr, "Can't add RTSP Filter to graph"); ////set source filename //var pRTSPFilter2Src = pRTSPFilter2 as IFileSourceFilter; //if (pRTSPFilter2Src == null) // CheckHr(unchecked((int)0x80004002), "Can't get IFileSourceFilter"); //if (pRTSPFilter2Src != null) // hr = pRTSPFilter2Src.Load(url, null); //CheckHr(hr, "Can't load file"); //Filter = true; IBaseFilter pSourceFilter2; int hr = pGraph.AddSourceFilter(url, "SourceFilter", out pSourceFilter2); CheckHr(hr, "Can't add source Filter to graph for url="+url); return pSourceFilter2; }
void Init(string inputFile, IBaseFilter userSourceFilter) { Reset(); if (!string.IsNullOrEmpty(inputFile) && (userSourceFilter != null)) { throw new ArgumentException("Specify only one kind of input"); } graph = new FilterGraph() as IFilterGraph2; if (null == graph) { throw new COMException("Cannot create FilterGraph"); } mediaControl = graph as IMediaControl; if (null == mediaControl) { throw new COMException("Cannot obtain IMediaControl"); } mediaEvent = graph as IMediaEventEx; if (null == mediaEvent) { throw new COMException("Cannot obtain IMediaEventEx"); } // remove reference clock IMediaFilter mf = graph as IMediaFilter; mf.SetSyncSource(null); int hr = 0; string sourceFilterInfoDumpPath = Path.Combine(Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location), "source_filter_info_dump.txt"); if (!string.IsNullOrEmpty(inputFile)) { IBaseFilter sourceFilter = null; try { hr = graph.AddSourceFilter(inputFile, "Source", out sourceFilter); DsError.ThrowExceptionForHR(hr); System.IO.File.WriteAllText(sourceFilterInfoDumpPath, Util.DumpFilterInfo(sourceFilter)); InitVideoGrabber(sourceFilter); InitAudioGrabber(sourceFilter); } finally { Util.ReleaseComObject(ref sourceFilter); } } else { hr = graph.AddFilter(userSourceFilter, "Source"); DsError.ThrowExceptionForHR(hr); System.IO.File.WriteAllText(sourceFilterInfoDumpPath, Util.DumpFilterInfo(userSourceFilter)); InitVideoGrabber(userSourceFilter); InitAudioGrabber(userSourceFilter); } }
private void PlayMovieInWindow(string filename) { WindowsMediaLib.IWMReaderAdvanced2 wmReader = null; IBaseFilter sourceFilter = null; try { FileLogger.Log("PlayMovieInWindow: {0}", filename); lastJump = 0; int hr = 0; if (filename == string.Empty) return; this.graphBuilder = (IGraphBuilder)new FilterGraph(); FileLogger.Log("PlayMovieInWindow: Create Graph"); this.evrRenderer = FilterGraphTools.AddFilterFromClsid(this.graphBuilder, new Guid("{FA10746C-9B63-4B6C-BC49-FC300EA5F256}"), "EVR"); if (evrRenderer != null) { FileLogger.Log("PlayMovieInWindow: Add EVR"); SetupEvrDisplay(); //#if DEBUG if (ps.PublishGraph) rot = new DsROTEntry(this.graphBuilder); //#endif IObjectWithSite grfSite = graphBuilder as IObjectWithSite; if (grfSite != null) grfSite.SetSite(new FilterBlocker(filename)); string fileExt = Path.GetExtension(filename).ToLower(); if (ps.PreferredDecoders != null) { foreach (string pa in ps.PreferredDecoders) { string[] pvA = pa.Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries); if (pvA[0].ToLower() == fileExt) { for (int i = 1; i < pvA.Length; i++) { string strFilter = pvA[i].Trim(); IBaseFilter filter = null; try { if (Regex.IsMatch(strFilter, @"{?\w{8}-\w{4}-\w{4}-\w{4}-\w{12}}?")) filter = FilterGraphTools.AddFilterFromClsid(graphBuilder, new Guid(strFilter), strFilter); else filter = FilterGraphTools.AddFilterByName(graphBuilder, FilterCategory.LegacyAmFilterCategory, strFilter); if (filter != null) { FileLogger.Log("Added {0} to the graph", strFilter); } else FileLogger.Log("{0} not added to the graph", strFilter); } finally { if (filter != null) Marshal.ReleaseComObject(filter); filter = null; } } } } } // Have the graph builder construct its the appropriate graph automatically //hr = this.graphBuilder.RenderFile(filename, null); if (ps.UseCustomAudioRenderer) { m_audioRendererClsid = new Guid(ps.CustomAudioRender); } audioRenderer = FilterGraphTools.AddFilterFromClsid(graphBuilder, m_audioRendererClsid, "Audio Renderer"); //IAVSyncClock wtf = audioRenderer as IAVSyncClock; //double cap; //hr = wtf.GetBias(out cap); //IMPAudioSettings arSett = audioRenderer as IMPAudioSettings; //if (arSett != null) //{ // AC3Encoding ac3Mode; // hr = arSett.GetAC3EncodingMode(out ac3Mode); // SpeakerConfig sc; // hr = arSett.GetSpeakerConfig(out sc); // AUDCLNT_SHAREMODE sm; // hr = arSett.GetWASAPIMode(out sm); // bool em; // hr = arSett.GetUseWASAPIEventMode(out em); // /*DeviceDefinition[] */IntPtr dc; // //int count; // //hr = arSett.GetAvailableAudioDevices(out dc, out count); // //DsError.ThrowExceptionForHR(hr); // ////DeviceDefinition[] dd = new DeviceDefinition[count]; // //AudioDeviceDefinition dd = (AudioDeviceDefinition)Marshal.PtrToStructure(dc, typeof(AudioDeviceDefinition)); // //if (dc != null) // // Marshal.ReleaseComObject(dc); // hr = arSett.SetAudioDeviceById(null); // //arSett.SetSpeakerMatchOutput(true); // arSett.SetUseWASAPIEventMode(true); // arSett.SetUseFilters((int)MPARUseFilters.ALL); // arSett.SetAllowBitStreaming(true); // arSett.SetAC3EncodingMode(AC3Encoding.DISABLED); // arSett.SetUseTimeStretching(false); //} IMPAudioRendererConfig arSett = audioRenderer as IMPAudioRendererConfig; if (arSett != null) { int ac3Mode; hr = arSett.GetInt(MPARSetting.AC3_ENCODING, out ac3Mode); int sc; hr = arSett.GetInt(MPARSetting.SPEAKER_CONFIG, out sc); int sm; hr = arSett.GetInt(MPARSetting.WASAPI_MODE, out sm); bool em; hr = arSett.GetBool(MPARSetting.WASAPI_EVENT_DRIVEN, out em); /*DeviceDefinition[] */ IntPtr dc; //int count; //hr = arSett.GetAvailableAudioDevices(out dc, out count); //DsError.ThrowExceptionForHR(hr); ////DeviceDefinition[] dd = new DeviceDefinition[count]; //AudioDeviceDefinition dd = (AudioDeviceDefinition)Marshal.PtrToStructure(dc, typeof(AudioDeviceDefinition)); //if (dc != null) // Marshal.ReleaseComObject(dc); hr = arSett.SetString(MPARSetting.SETTING_AUDIO_DEVICE, ps.AudioPlaybackDevice); //arSett.SetSpeakerMatchOutput(true); arSett.SetBool(MPARSetting.WASAPI_EVENT_DRIVEN, true); arSett.SetInt(MPARSetting.USE_FILTERS, (int)MPARUseFilters.ALL); arSett.SetBool(MPARSetting.ALLOW_BITSTREAMING, true); arSett.SetInt(MPARSetting.AC3_ENCODING, (int)AC3Encoding.DISABLED); arSett.SetBool(MPARSetting.ENABLE_TIME_STRETCHING, false); } //try //{ hr = graphBuilder.AddSourceFilter(filename, "Source", out sourceFilter); if (hr < 0) { //if it doesn't work before failing try to load it with the WMV reader sourceFilter = (IBaseFilter)new WMAsfReader(); hr = graphBuilder.AddFilter(sourceFilter, "WM/ASF Reader"); DsError.ThrowExceptionForHR(hr); hr = ((IFileSourceFilter)sourceFilter).Load(filename, null); DsError.ThrowExceptionForHR(hr); wmReader = sourceFilter as WindowsMediaLib.IWMReaderAdvanced2; } IPin outPin = DsFindPin.ByConnectionStatus(sourceFilter, PinConnectedStatus.Unconnected, 0); while (outPin != null) { try { hr = graphBuilder.Render(outPin); DsError.ThrowExceptionForHR(hr); } finally { if (outPin != null) Marshal.ReleaseComObject(outPin); outPin = null; } outPin = DsFindPin.ByConnectionStatus(sourceFilter, PinConnectedStatus.Unconnected, 0); } if (ps.MultiChannelWMA) { FileLogger.Log("Set multichannel mode for WMA"); IBaseFilter wmaDec = FilterGraphTools.FindFilterByName(graphBuilder, "WMAudio Decoder DMO"); if (wmaDec != null) { try { //http://msdn.microsoft.com/en-us/library/aa390550(VS.85).aspx IPropertyBag bag = wmaDec as IPropertyBag; if (bag != null) { object pVar; hr = bag.Read("_HIRESOUTPUT", out pVar, null); DsError.ThrowExceptionForHR(hr); bool bVar = (bool)pVar; FileLogger.Log("_HIRESOUTPUT = {0}", bVar); if (!bVar) { IPin wmaOut = DsFindPin.ByDirection(wmaDec, PinDirection.Output, 0); IPin cPin = null; try { hr = wmaOut.ConnectedTo(out cPin); DsError.ThrowExceptionForHR(hr); if (cPin != null) //cpin should never be null at this point, but lets be safe { hr = wmaOut.Disconnect(); DsError.ThrowExceptionForHR(hr); List<Guid> oldFilters = new List<Guid>(); IBaseFilter oFilt = FilterGraphTools.GetFilterFromPin(cPin); try { while (oFilt != null) { IBaseFilter cFilter = null; try { Guid clsid; hr = oFilt.GetClassID(out clsid); DsError.ThrowExceptionForHR(hr); if (clsid != DSOUND_RENDERER) { oldFilters.Add(clsid); cFilter = FilterGraphTools.GetConnectedFilter(oFilt, PinDirection.Output, 0); } hr = graphBuilder.RemoveFilter(oFilt); DsError.ThrowExceptionForHR(hr); } finally { if (oFilt != null) Marshal.ReleaseComObject(oFilt); oFilt = null; } oFilt = cFilter; } } finally { if (oFilt != null) Marshal.ReleaseComObject(oFilt); oFilt = null; } foreach (Guid addFilt in oldFilters) { IBaseFilter addMe = FilterGraphTools.AddFilterFromClsid(graphBuilder, addFilt, addFilt.ToString()); if (addMe != null) Marshal.ReleaseComObject(addMe); } } pVar = true; hr = bag.Write("_HIRESOUTPUT", ref pVar); DsError.ThrowExceptionForHR(hr); hr = graphBuilder.Render(wmaOut); DsError.ThrowExceptionForHR(hr); } finally { if (wmaOut != null) Marshal.ReleaseComObject(wmaOut); if (cPin != null) Marshal.ReleaseComObject(cPin); } } } } catch (Exception ex) { FileLogger.Log("Error setting multichannel mode for WMA: {0}", ex.Message); } finally { while(Marshal.ReleaseComObject(wmaDec) > 0); } } } //} //finally //{ // if (sourceFilter != null) // Marshal.ReleaseComObject(sourceFilter); //} if (ps.DXVAWMV) { FileLogger.Log("Set DXVA for WMV"); IBaseFilter wmvDec = FilterGraphTools.FindFilterByName(graphBuilder, "WMVideo Decoder DMO"); if (wmvDec != null) { try { MediaFoundation.Misc.IPropertyStore config = wmvDec as MediaFoundation.Misc.IPropertyStore; if (config != null) { MediaFoundation.Misc.PropVariant pv = new MediaFoundation.Misc.PropVariant(); //config.GetValue(MediaFoundation.Misc.WMVConst.MFPKEY_DXVA_ENABLED, pv); } } catch (Exception ex) { FileLogger.Log("Error setting DXVA mode for WMV: {0}", ex.Message); } finally { while (Marshal.ReleaseComObject(wmvDec) > 0) ; } } } SetEvrVideoMode(); // QueryInterface for DirectShow interfaces this.mediaControl = (IMediaControl)this.graphBuilder; this.mediaEventEx = (IMediaEventEx)this.graphBuilder; this.mediaSeeking = (IMediaSeeking)this.graphBuilder; this.mediaPosition = (IMediaPosition)this.graphBuilder; // Query for video interfaces, which may not be relevant for audio files //this.videoWindow = this.graphBuilder as IVideoWindow; //this.basicVideo = this.graphBuilder as IBasicVideo; // Query for audio interfaces, which may not be relevant for video-only files this.basicAudio = this.graphBuilder as IBasicAudio; // Is this an audio-only file (no video component)? CheckVisibility(); // Have the graph signal event via window callbacks for performance hr = this.mediaEventEx.SetNotifyWindow(this.Handle, WM.GRAPH_NOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); if (!this.isAudioOnly) { // Setup the video window //hr = this.videoWindow.put_Owner(this.Handle); //DsError.ThrowExceptionForHR(hr); //this.evrDisplay.SetVideoWindow(this.Handle); //hr = this.videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren); //DsError.ThrowExceptionForHR(hr); hr = InitVideoWindow();//1, 1); DsError.ThrowExceptionForHR(hr); GetFrameStepInterface(); } else { // Initialize the default player size and enable playback menu items hr = InitPlayerWindow(); DsError.ThrowExceptionForHR(hr); EnablePlaybackMenu(true, MediaType.Audio); } // Complete window initialization //CheckSizeMenu(menuFileSizeNormal); //this.isFullScreen = false; this.currentPlaybackRate = 1.0; UpdateMainTitle(); this.Activate(); //pre-roll the graph hr = this.mediaControl.Pause(); DsError.ThrowExceptionForHR(hr); if (wmReader != null) { WindowsMediaLib.PlayMode pMode; hr = wmReader.GetPlayMode(out pMode); DsError.ThrowExceptionForHR(hr); if (pMode == WindowsMediaLib.PlayMode.Streaming) { int pdwPercent = 0; long pcnsBuffering; while (pdwPercent < 100) { hr = wmReader.GetBufferProgress(out pdwPercent, out pcnsBuffering); DsError.ThrowExceptionForHR(hr); if (pdwPercent >= 100) break; int sleepFor = Convert.ToInt32(pcnsBuffering / 1000); Thread.Sleep(100); } } } // Run the graph to play the media file hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); if (commWatcher != null) commWatcher.Dispose(); string commPath = string.Empty; if (ps.UseDtbXml) { commWatcher = new FileSystemWatcher(Commercials.XmlDirectory, Commercials.GetXmlFilename(filename)); commPath = Path.Combine(Commercials.XmlDirectory, Commercials.GetXmlFilename(filename)); } else { commWatcher = new FileSystemWatcher(Path.GetDirectoryName(filename), Commercials.GetEdlFilename(filename)); commPath = Path.Combine(Path.GetDirectoryName(filename), Commercials.GetEdlFilename(filename)); } ReadComm(commPath); commWatcher.Changed += new FileSystemEventHandler(commWatcher_Changed); commWatcher.Created += new FileSystemEventHandler(commWatcher_Changed); //commWatcher.NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.Size; commWatcher.EnableRaisingEvents = true; MoveToBookmark(); this.currentState = PlayState.Running; if (isFullScreen) tmMouseMove.Enabled = true; } else { //MessageBox.Show("EVR cannot be loaded on this PC"); using (EPDialog ed = new EPDialog()) ed.ShowDialog("Error", "The Enhanced Video Renderer cannot be loaded", 20, this); } } finally { //if (wmReader != null) // Marshal.ReleaseComObject(wmReader); if (sourceFilter != null) while(Marshal.ReleaseComObject(sourceFilter)>0); } }
/// <summary> /// This method sets up the DirectShow filter graph and obtains the interfaces necessary to control playback /// for VideoTextures created from video files. This method works for .avi, .mpeg, and .wmv files. /// </summary> /// <param name="filename">The .avi, .mpeg, or .wmv video file.</param> private void SetupGraph(string filename) { try { int hr; // 1. Start building the graph, using FilterGraph and CaptureGraphBuilder2 graphBuilder = (IGraphBuilder) new FilterGraph(); ICaptureGraphBuilder2 builder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); hr = builder.SetFiltergraph(graphBuilder); DsError.ThrowExceptionForHR(hr); // 2. Add the source filter for the video file input. IBaseFilter sourceFilter; hr = graphBuilder.AddSourceFilter(filename, filename, out sourceFilter); DsError.ThrowExceptionForHR(hr); // 3. Get the SampleGrabber interface, configure it, and add it to the graph. ISampleGrabber sampGrabber = (ISampleGrabber) new SampleGrabber(); ConfigureSampleGrabber(sampGrabber); hr = graphBuilder.AddFilter((IBaseFilter)sampGrabber, "SampleGrabber"); DsError.ThrowExceptionForHR(hr); // 4. Add the null renderer (since we don't want to render in a seperate window.) IBaseFilter nullRenderer = (IBaseFilter) new NullRenderer(); hr = graphBuilder.AddFilter(nullRenderer, "Null Renderer"); DsError.ThrowExceptionForHR(hr); // 5. Render the stream. The way the stream is rendered depends on its type. switch (vidType) { case VideoType.AVI: case VideoType.MPEG: hr = builder.RenderStream(null, null, sourceFilter, (IBaseFilter)sampGrabber, nullRenderer); break; case VideoType.WMV: hr = builder.RenderStream(null, MediaType.Video, sourceFilter, (IBaseFilter)sampGrabber, nullRenderer); break; default: throw new Exception("Unsupported Video type: " + vidType); } DsError.ThrowExceptionForHR(hr); // 6. Now that everthing is configured and set up, save the width, height, stride information for use later. SaveSizeInfo(sampGrabber); // 7. Obtain the interfaces that we will use to control the execution of the filter graph. mediaControl = graphBuilder as IMediaControl; mediaSeeking = graphBuilder as IMediaSeeking; mediaSeeking.SetTimeFormat(TimeFormat.MediaTime); long duration; mediaSeeking.GetDuration(out duration); length = (int)(duration / 10000); } catch (Exception ex) { Console.WriteLine(ex.Message); throw; } }
private async void Button_Click(object sender, RoutedEventArgs e) { OpenFileDialog lopenFileDialog = new OpenFileDialog(); lopenFileDialog.AddExtension = true; var lresult = lopenFileDialog.ShowDialog(); if (lresult != true) { return; } IBaseFilter lDSoundRender = new DSoundRender() as IBaseFilter; m_pGraph.AddFilter(lDSoundRender, "Audio Renderer"); int k = 0; IPin[] lAudioRendererPins = new IPin[1]; IEnumPins ppEnum; k = lDSoundRender.EnumPins(out ppEnum); k = ppEnum.Next(1, lAudioRendererPins, IntPtr.Zero); var lCaptureManagerEVRMultiSinkFactory = await CaptureManagerVideoRendererMultiSinkFactory.getInstance().getICaptureManagerEVRMultiSinkFactoryAsync(); uint lMaxVideoRenderStreamCount = await lCaptureManagerEVRMultiSinkFactory.getMaxVideoRenderStreamCountAsync(); if (lMaxVideoRenderStreamCount == 0) { return; } List <object> lOutputNodesList = await lCaptureManagerEVRMultiSinkFactory.createOutputNodesAsync( IntPtr.Zero, mEVRDisplay.Surface.texture, 1); if (lOutputNodesList.Count == 0) { return; } IBaseFilter lVideoMixingRenderer9 = (IBaseFilter)lOutputNodesList[0]; var h = m_pGraph.AddFilter(lVideoMixingRenderer9, "lVideoMixingRenderer9"); IPin[] lVideoRendererPin = new IPin[1]; k = lVideoMixingRenderer9.EnumPins(out ppEnum); k = ppEnum.Next(1, lVideoRendererPin, IntPtr.Zero); IBaseFilter m_SourceFilter = null; m_pGraph.AddSourceFilter(lopenFileDialog.FileName, null, out m_SourceFilter); IEnumPins lEnumPins = null; m_SourceFilter.EnumPins(out lEnumPins); IPin[] lPins = new IPin[1]; while (lEnumPins.Next(1, lPins, IntPtr.Zero) == 0) { IEnumMediaTypes lIEnumMediaTypes; lPins[0].EnumMediaTypes(out lIEnumMediaTypes); AMMediaType[] ppMediaTypes = new AMMediaType[1]; while (lIEnumMediaTypes.Next(1, ppMediaTypes, IntPtr.Zero) == 0) { var gh = ppMediaTypes[0].subType; if (ppMediaTypes[0].majorType == DirectShowLib.MediaType.Video) { k = m_pGraph.Connect(lPins[0], lVideoRendererPin[0]); } } foreach (var item in lPins) { k = m_pGraph.Render(item); } } IMediaControl lIMediaControl = m_pGraph as IMediaControl; k = lIMediaControl.Run(); }
private void CreateFilters(Guid audioSubType) { isValid = false; int r; // grabber grabberAudio = new GrabberAudio(this); // objects graphObject = null; grabberObjectAudio = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object r = graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { throw new ApplicationException("Failed creating source filter"); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph r = graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaType = new AMMediaType { MajorType = MediaType.Audio, SubType = audioSubType, FormatType = FormatType.WaveEx }; r = sampleGrabberAudio.SetMediaType(mediaType); // render pin // TODO: Improve this! We can't always assume that the second pin will always be the audio pin -- we need to find it. IPin sbPin = Tools.GetOutPin(sourceBase, 1); if (sbPin == null) { sbPin = Tools.GetOutPin(sourceBase, 0); } r = graph.Render(sbPin); IPin outPin = Tools.GetOutPin(grabberBaseAudio, 0); AMMediaType mt = new AMMediaType(); r = outPin.ConnectionMediaType(mt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media information"); } // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; r = mediaFilter.SetSyncSource(null); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(mt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber r = sampleGrabberAudio.SetBufferSamples(false); r = sampleGrabberAudio.SetOneShot(false); r = sampleGrabberAudio.SetCallback(grabberAudio, 1); if (useNullRenderer) { // Get a list of all the filters connected to the sample grabber List <Tools.FilterInfo2> filtersInfo2 = new List <Tools.FilterInfo2>(); Tools.FilterInfo2 testFilterInfo2 = Tools.GetNextFilter(grabberBaseAudio, PinDirection.Output, 0); while (true) { filtersInfo2.Add(testFilterInfo2); testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0); if (testFilterInfo2.Filter == null) { break; } } // Remove the last filter, the audio renderer r = graph.RemoveFilter(filtersInfo2[filtersInfo2.Count - 1].Filter); // create null renderer type = Type.GetTypeFromCLSID(Clsid.NullRenderer); if (type == null) { throw new ApplicationException("Failed creating null renderer"); } nullRendererObjectAudio = Activator.CreateInstance(type); IBaseFilter nullRendererAudio = (IBaseFilter)nullRendererObjectAudio; // add grabber filters to graph r = graph.AddFilter(nullRendererAudio, "nullRenderer"); //outPin = Tools.GetOutPin(filtersInfo2[filtersInfo2.Count - 2].Filter, 0); outPin = Tools.GetOutPin(grabberBaseAudio, 0); IPin inPin = Tools.GetInPin(nullRendererAudio, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed obtaining media audio information"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); } // configure video window IVideoWindow window = (IVideoWindow)graphObject; if (window != null) { window.put_AutoShow(false); window = null; } // get media control mediaControl = (IMediaControl)graphObject; // get media seek control mediaSeekControl = (IMediaSeeking)graphObject; mediaSeekControl.SetTimeFormat(TimeFormat.MediaTime); // get media events' interface mediaEvent = (IMediaEventEx)graphObject; // get media audio control basicAudio = (IBasicAudio)graphObject; isValid = true; } catch (Exception exception) { DestroyFilters(); // provide information to clients AudioSourceError?.Invoke(this, new AudioSourceErrorEventArgs(exception.Message)); } }
/* * TIVO Files Pin Mapping (pin name between ||) (NOTE: XXXX changes from each machine and AC3 changes if the audio codec changes) * Audio -> Source Pin |Output| -> MainConcept MPEG DeMultiplexer |Input| |AC3 (PID XXXX @ Prog# 1)| -> Dump |Input| * Video -> Source Pin |Output| -> MainConcept MPEG DeMultiplexer |Input| |Video (PID XXXX @ Prog# 1)| -> Dump |Input| */ public void BuildGraph() { int hr; IntPtr fetched = IntPtr.Zero; IntPtr fetched2 = IntPtr.Zero; IEnumPins FilterPins; IPin[] pins = new IPin[1]; string PinID; // TiVO Directshow filters are only accessible through userspace otherwise decryption fails, so if we are running the engine as a service (instead of command line) we should prompt the user if ((_Ext == "tivo") && GlobalDefs.IsEngineRunningAsService) { _jobLog.WriteEntry(this, "You need to start MCEBuddy engine as a Command line program. TiVO Desktop Directshow decryption filters do not work with a Windows Service.", Log.LogEntryType.Error); } // Create the source filter for dvrms or wtv or TIVO (will automatically connect to TIVODecryptorTag in source itself) _jobLog.WriteEntry(this, "Loading file using DirectShow source filter", Log.LogEntryType.Debug); hr = _gb.AddSourceFilter(_SourceFile, "Source Filter", out _SourceF); checkHR(hr); // If this is a TIVO while, while the source filter automatically decrypts the inputs we need to connect the MPEG demultiplexer to get the audio and video output pins if (_Ext == "tivo") { IPin PinOut, PinIn; IntPtr ptr; PinInfo demuxPinInfo; List <IBaseFilter> filterList = new List <IBaseFilter>(); // Check if the source filter is a TiVO source filter (otherwise sometimes it tries to use the normal source filter which will fail since the stream in encrypted) string vendorInfo; FilterInfo filterInfo; _SourceF.QueryFilterInfo(out filterInfo); _SourceF.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "TiVO Source filter loaded by Directshow -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); if (vendorInfo == null || !vendorInfo.ToLower().Contains("tivo")) { string exception = ""; // Check if you are running 64Bit MCEBuddy, TiVO needs 32bit MCEBuddy since TiVO directshow dll are 32bit and can only be loaded by 32bit processes if (IntPtr.Size == 8) { exception += "You need to run 32bit MCEBuddy, TiVO Directshow fiters cannot be accessed by a 64bit program."; } else { exception += "TiVO Desktop installation not detected by Windows DirectShow."; } throw new Exception(exception); // Get out of here and let the parent know something is wrong } hr = _SourceF.FindPin("Output", out PinOut); // Get the Source filter pinOut |Output| checkHR(hr); // When TIVO desktop is installed, Render automatically builds the filter graph with the necessary demuxing filters - we cannot manually add the MainConcept demux filter since the class isn't registered but somehow Render is able to find it and load it (along with other redundant filters like DTV, audio etc which we need to remove) _jobLog.WriteEntry(this, "DirectShow building TiVO filter chain", Log.LogEntryType.Debug); hr = _gb.Render(PinOut); checkHR(hr); hr = PinOut.ConnectedTo(out ptr); // Find out which input Pin (Mainconcept Demux filter) the output of the Source Filter is connected to checkHR(hr); PinIn = (IPin)Marshal.GetObjectForIUnknown(ptr); hr = PinIn.QueryPinInfo(out demuxPinInfo); // Get the mainconcept demux filter from the pin checkHR(hr); demuxPinInfo.filter.QueryFilterInfo(out filterInfo); demuxPinInfo.filter.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "Checking downstream TiVO filter chain starting with TiVO Demux filter -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); if (!GetFilterChain(demuxPinInfo.filter, PinDirection.Output, filterList)) // Get the list of all downstreams (redudant) filters (like DTV, Audio, video render etc) from the demux filter that were added by the automatic Render function above (check if there are no downstream filters, then TIVO desktop is not installed) { throw new Exception("Unable to get TIVO filter chain"); } // Now remove all the filters in the chain downstream after the demux filter from the graph builder (we dont' need them, we will add out own filters later) _jobLog.WriteEntry(this, "Removing redundant filters from TiVO filter chain", Log.LogEntryType.Debug); foreach (IBaseFilter filter in filterList) { filter.QueryFilterInfo(out filterInfo); filter.QueryVendorInfo(out vendorInfo); _jobLog.WriteEntry(this, "Removing filter -> " + filterInfo.achName + " (" + vendorInfo + ")", Log.LogEntryType.Debug); _gb.RemoveFilter(filter); Marshal.FinalReleaseComObject(filter); // Release the COM object } // Now the TIVO MainConcept Demux Filter is our new "Source" filter _SourceF = demuxPinInfo.filter; } // TODO: We need to find a way to insert a filter which can allow us to select audio streams (e.g. LAV filter, currently it only allows us access to the default audio stream and not multiple audio streams) // Cycle through pins, connecting as appropriate hr = _SourceF.EnumPins(out FilterPins); checkHR(hr); while (FilterPins.Next(pins.Length, pins, fetched) == 0) { IntPtr ptypes = Marshal.AllocCoTaskMem(Marshal.SizeOf(typeof(IntPtr))); AMMediaType mtypes; IEnumMediaTypes enummtypes; IntPtr ptrEnum; pins[0].EnumMediaTypes(out ptrEnum); enummtypes = (IEnumMediaTypes)Marshal.GetObjectForIUnknown(ptrEnum); while (enummtypes.Next(1, ptypes, fetched2) == 0) { /* Extract Audio, Video or Subtitle streams -> References: * http://nate.deepcreek.org.au/svn/DigitalWatch/trunk/bin/MediaTypes.txt * http://msdn.microsoft.com/en-us/library/ms932033.aspx * https://sourceforge.net/p/tsubget/home/Dumping%20a%20Stream/ * http://msdn.microsoft.com/en-us/library/windows/desktop/dd695343(v=vs.85).aspx * http://msdn.microsoft.com/en-us/library/windows/desktop/dd390660(v=vs.85).aspx * http://msdn.microsoft.com/en-us/library/windows/desktop/dd407354(v=vs.85).aspx * http://whrl.pl/RcRv5p (extracting Teletext from WTV/DVRMS) */ IntPtr ptrStructure = Marshal.ReadIntPtr(ptypes); mtypes = (AMMediaType)Marshal.PtrToStructure(ptrStructure, typeof(AMMediaType)); if ((mtypes.majorType == MediaType.Video) || (mtypes.majorType == MediaType.Audio) || (mtypes.majorType == MediaType.Mpeg2PES) || (mtypes.majorType == MediaType.Stream) || (mtypes.majorType == MediaType.AuxLine21Data) || (mtypes.majorType == MediaType.VBI) || (mtypes.majorType == MediaType.MSTVCaption) || (mtypes.majorType == MediaType.DTVCCData) || (mtypes.majorType == MediaType.Mpeg2Sections && mtypes.subType == MediaSubType.None && mtypes.formatType == FormatType.None)) { string DumpFileName = ""; if ((mtypes.majorType == MediaType.Video) && ((_extractMediaType & ExtractMediaType.Video) != 0)) // Video { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_VIDEO"); _VideoPart = DumpFileName; _jobLog.WriteEntry(this, "Found Video stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } else if (((mtypes.majorType == MediaType.Audio) || // Audio types https://msdn.microsoft.com/en-us/library/windows/desktop/dd390676(v=vs.85).aspx ((mtypes.majorType == MediaType.Mpeg2PES) && ((mtypes.subType == MediaSubType.DolbyAC3) || (mtypes.subType == MediaSubType.DTS) || (mtypes.subType == MediaSubType.DvdLPCMAudio) || (mtypes.subType == MediaSubType.Mpeg2Audio))) || ((mtypes.majorType == MediaType.Stream) && ((mtypes.subType == MediaSubType.DolbyAC3) || (mtypes.subType == MediaSubType.MPEG1Audio) || (mtypes.subType == MediaSubType.Mpeg2Audio) || (mtypes.subType == MediaSubType.DolbyDDPlus) || (mtypes.subType == MediaSubType.MpegADTS_AAC) || (mtypes.subType == MediaSubType.MpegLOAS))) ) && ((_extractMediaType & ExtractMediaType.Audio) != 0)) { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_AUDIO" + AudioParts.Count.ToString()); _AudioParts.Add(DumpFileName); _jobLog.WriteEntry(this, "Found Audio stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } else if ((_extractMediaType & ExtractMediaType.Subtitle) != 0)// Subtitles { DumpFileName = Path.Combine(_workPath, Path.GetFileNameWithoutExtension(_SourceFile) + "_SUBTITLE" + SubtitleParts.Count.ToString()); SubtitleParts.Add(DumpFileName); _jobLog.WriteEntry(this, "Found Subtitle stream, extracting -> " + DumpFileName, Log.LogEntryType.Debug); } if (!String.IsNullOrWhiteSpace(DumpFileName)) // If we are asked to extract something { hr = pins[0].QueryId(out PinID); ConnectDecryptedDump(PinID, DumpFileName); } } else { // Debug - looking for more subtitle types (very poorly documented by Microsoft) Guid type = mtypes.majorType; Guid subtype = mtypes.subType; Guid formattyype = mtypes.formatType; } } Marshal.FreeCoTaskMem(ptypes); // Free up the memory } }
internal void PlayMovieInWindow(string filename) { FileLogger.Log("PlayMovieInWindow: {0}", filename); lastJump = 0; int hr = 0; if (filename == string.Empty) return; this.graphBuilder = (IGraphBuilder)new FilterGraph(); FileLogger.Log("PlayMovieInWindow: Create Graph"); this.evrRenderer = FilterGraphTools.AddFilterFromClsid(this.graphBuilder, new Guid("{FA10746C-9B63-4B6C-BC49-FC300EA5F256}"), "EVR"); if (evrRenderer != null) { FileLogger.Log("PlayMovieInWindow: Add EVR"); SetupEvrDisplay(); //#if DEBUG if (ps.PublishGraph) rot = new DsROTEntry(this.graphBuilder); //#endif IObjectWithSite grfSite = graphBuilder as IObjectWithSite; if (grfSite != null) grfSite.SetSite(new FilterBlocker(filename)); string fileExt = Path.GetExtension(filename).ToLower(); if (ps.PreferredDecoders != null) { foreach (string pa in ps.PreferredDecoders) { string[] pvA = pa.Split(new char[] { ';' }, StringSplitOptions.RemoveEmptyEntries); if (pvA[0].ToLower() == fileExt) { for (int i = 1; i < pvA.Length; i++) { string strFilter = pvA[i].Trim(); IBaseFilter filter = null; try { if (Regex.IsMatch(strFilter, @"{?\w{8}-\w{4}-\w{4}-\w{4}-\w{12}}?")) filter = FilterGraphTools.AddFilterFromClsid(graphBuilder, new Guid(strFilter), strFilter); else filter = FilterGraphTools.AddFilterByName(graphBuilder, FilterCategory.LegacyAmFilterCategory, strFilter); if (filter != null) { FileLogger.Log("Added {0} to the graph", strFilter); } else FileLogger.Log("{0} not added to the graph", strFilter); } finally { if (filter != null) Marshal.ReleaseComObject(filter); filter = null; } } } } } // Have the graph builder construct its the appropriate graph automatically //hr = this.graphBuilder.RenderFile(filename, null); IBaseFilter sourceFilter = null; try { hr = graphBuilder.AddSourceFilter(filename, "Source", out sourceFilter); DsError.ThrowExceptionForHR(hr); IPin outPin = DsFindPin.ByConnectionStatus(sourceFilter, PinConnectedStatus.Unconnected, 0); while (outPin != null) { try { hr = graphBuilder.Render(outPin); DsError.ThrowExceptionForHR(hr); } finally { if (outPin != null) Marshal.ReleaseComObject(outPin); outPin = null; } outPin = DsFindPin.ByConnectionStatus(sourceFilter, PinConnectedStatus.Unconnected, 0); } } finally { if (sourceFilter != null) Marshal.ReleaseComObject(sourceFilter); } SetEvrVideoMode(); // QueryInterface for DirectShow interfaces this.mediaControl = (IMediaControl)this.graphBuilder; this.mediaEventEx = (IMediaEventEx)this.graphBuilder; this.mediaSeeking = (IMediaSeeking)this.graphBuilder; this.mediaPosition = (IMediaPosition)this.graphBuilder; // Query for video interfaces, which may not be relevant for audio files //this.videoWindow = this.graphBuilder as IVideoWindow; //this.basicVideo = this.graphBuilder as IBasicVideo; // Query for audio interfaces, which may not be relevant for video-only files this.basicAudio = this.graphBuilder as IBasicAudio; // Is this an audio-only file (no video component)? CheckVisibility(); // Have the graph signal event via window callbacks for performance hr = this.mediaEventEx.SetNotifyWindow(this.Handle, WM.GRAPH_NOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); // Setup the video window //hr = this.videoWindow.put_Owner(this.Handle); //DsError.ThrowExceptionForHR(hr); this.evrDisplay.SetVideoWindow(container.Handle); //hr = this.videoWindow.put_WindowStyle(WindowStyle.Child | WindowStyle.ClipSiblings | WindowStyle.ClipChildren); //DsError.ThrowExceptionForHR(hr); hr = InitVideoWindow(1, 1); DsError.ThrowExceptionForHR(hr); GetFrameStepInterface(); // Complete window initialization //CheckSizeMenu(menuFileSizeNormal); //this.isFullScreen = false; this.currentPlaybackRate = 1.0; //UpdateMainTitle(); container.Focus(); //pre-roll the graph hr = this.mediaControl.Pause(); DsError.ThrowExceptionForHR(hr); // Run the graph to play the media file hr = this.mediaControl.Run(); DsError.ThrowExceptionForHR(hr); if (commWatcher != null) commWatcher.Dispose(); string commPath = Path.Combine(Path.GetDirectoryName(filename), Commercials.GetEdlFilename(filename)); ReadComm(commPath); commWatcher = new FileSystemWatcher(Path.GetDirectoryName(filename), Commercials.GetEdlFilename(filename)); commWatcher.Changed += new FileSystemEventHandler(commWatcher_Changed); commWatcher.Created += new FileSystemEventHandler(commWatcher_Changed); //commWatcher.NotifyFilter = NotifyFilters.LastWrite | NotifyFilters.Size; commWatcher.EnableRaisingEvents = true; MoveToBookmark(); this.currentState = PlayState.Running; //if (isFullScreen) // tmMouseMove.Enabled = true; } else { //MessageBox.Show("EVR cannot be loaded on this PC"); using (EPDialog ed = new EPDialog()) ed.ShowDialog("EVR Error", "The Enhanced Video Renderer cannot be loaded on this PC", 30); } }
/// <summary> /// グラフの生成 /// </summary> public virtual void Setup() { this.Dispose(); try { // グラフ. // CoCreateInstance GraphBuilder = (IGraphBuilder)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_FilterGraph))); #region フィルタ追加. // ファイル入力. IBaseFilter capture = null; GraphBuilder.AddSourceFilter(SourceFile, "CaptureFilter", ref capture); if (capture == null) { throw new System.IO.IOException(); } #if false // DMO ラッパーフィルタ. // https://msdn.microsoft.com/ja-jp/library/cc371140.aspx IBaseFilter dmo = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_DMOWrapperFilter))); if (dmo != null) { //// Mpeg4 Decoder DMO //// F371728A-6052-4D47-827C-D039335DFE0A //// 4A69B442-28BE-4991-969C-B500ADF5D8A8 //// mpg4decd.dll [C:\Windows\System32, C:\Windows\SysWOW64] var idmo = (IDMOWrapperFilter)dmo; idmo.Init(new Guid("F371728A-6052-4D47-827C-D039335DFE0A"), new Guid("4A69B442-28BE-4991-969C-B500ADF5D8A8")); idmo = null; this.GraphBuilder.AddFilter(dmo, "Mpeg4 Decoder DMO"); } #endif #if false // Avi Splitter IBaseFilter splitter = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVISplitter))); if (splitter == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(splitter, "Avi Splitter"); // Avi Decompressor IBaseFilter decompressor = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_AVIDec))); if (decompressor == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(decompressor, "Avi Decompressor"); #endif // サンプルグラバー. IBaseFilter grabber = (IBaseFilter)CreateSampleGrabber(); if (grabber == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(grabber, "SampleGrabber"); // レンダラー. IBaseFilter renderer = (IBaseFilter)Activator.CreateInstance(Type.GetTypeFromCLSID(new Guid(GUID.CLSID_NullRenderer))); if (renderer == null) { throw new System.IO.IOException(); } this.GraphBuilder.AddFilter(renderer, "Renderer"); #endregion #region ピンの取得. IPin capture_out = DSLab.Axi.FindPin(capture, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin grabber_in = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_INPUT); IPin grabber_out = DSLab.Axi.FindPin(grabber, 0, PIN_DIRECTION.PINDIR_OUTPUT); IPin renderer_in = DSLab.Axi.FindPin(renderer, 0, PIN_DIRECTION.PINDIR_INPUT); #endregion #region ピンの接続. GraphBuilder.Connect(capture_out, grabber_in); GraphBuilder.Connect(grabber_out, renderer_in); #endregion #region 保管: インターフェース. CaptureFilter = capture; CaptureOutPin = capture_out; SampleGrabber = (ISampleGrabber)grabber; SampleGrabberInPin = grabber_in; SampleGrabberOutPin = grabber_out; Renderer = renderer; RendererInPin = renderer_in; #endregion #region 保管: フレームサイズ. VIDEOINFOHEADER vinfo = DSLab.Axi.GetVideoInfo(SampleGrabber); this.SampleGrabberCB.BitmapInfo = vinfo.bmiHeader; this.SampleGrabberCB.FrameSize = new Size( System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biWidth), System.Math.Abs(this.SampleGrabberCB.BitmapInfo.biHeight) ); #endregion #region インタフェースの抽出: { DSLab.IGraphBuilder graph = this.GraphBuilder; DSLab.IEnumFilters filters = null; DSLab.IBaseFilter filter = null; int fetched = 0; int hr = graph.EnumFilters(ref filters); while (filters.Next(1, ref filter, ref fetched) == (int)DSLab.HRESULT.S_OK) { if (fetched == 0) { break; } if (filter is DSLab.IMediaSeeking) { // シーク操作用. Seeking = (DSLab.IMediaSeeking)filter; } else { // フィルタ解放. Marshal.ReleaseComObject(filter); filter = null; } } // 解放. Marshal.ReleaseComObject(filters); } #endregion // DEBUG #if DEBUG DebugPrint(this.GraphBuilder); #endif } catch (Exception ex) { throw new DSLab.CxDSException(ex); } }
public static VideoInfoHeader2 GetSBEFrameSize(string pathToFile) { int hr = 0; IGraphBuilder graph = null; IBaseFilter capFilter = null; IBaseFilter nRender = null; try { graph = (IGraphBuilder) new FilterGraph(); hr = graph.AddSourceFilter(pathToFile, "Source", out capFilter); DsError.ThrowExceptionForHR(hr); #if DEBUG using (DsROTEntry rot = new DsROTEntry(graph)) { #endif IPin vPin = null; IBaseFilter dec = null; IPin sgIn = null; IBaseFilter mpegDec = null; try { dec = (IBaseFilter) new DTFilter(); hr = graph.AddFilter(dec, "Decrypt"); DsError.ThrowExceptionForHR(hr); nRender = (IBaseFilter) new NullRenderer(); hr = graph.AddFilter((IBaseFilter)nRender, "Video Null Renderer"); DsError.ThrowExceptionForHR(hr); IBaseFilter dec1 = FilterDefinition.AddToFilterGraph(FatAttitude.WTVTranscoder.FilterDefinitions.Decrypt.DTFilterPBDA, ref graph, "Decrypt1"); if (dec1 != null) { Marshal.ReleaseComObject(dec1); } dec1 = null; mpegDec = FilterDefinition.AddToFilterGraph(FatAttitude.WTVTranscoder.FilterDefinitions.Video.VideoDecoderMpeg, ref graph, "MS MPEG Decoder"); sgIn = DsFindPin.ByDirection(mpegDec, PinDirection.Input, 0); IEnumPins ppEnum; IPin[] pPins = new IPin[1]; hr = capFilter.EnumPins(out ppEnum); DsError.ThrowExceptionForHR(hr); try { while (ppEnum.Next(1, pPins, IntPtr.Zero) == 0) { IEnumMediaTypes emtDvr = null; AMMediaType[] amtDvr = new AMMediaType[1]; try { pPins[0].EnumMediaTypes(out emtDvr); hr = emtDvr.Next(1, amtDvr, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); if (amtDvr[0].majorType == MediaType.Video) { if (graph.Connect(pPins[0], sgIn) >= 0) { vPin = pPins[0]; break; } } if (pPins[0] != null) { Marshal.ReleaseComObject(pPins[0]); } } finally { if (emtDvr != null) { Marshal.ReleaseComObject(emtDvr); } DsUtils.FreeAMMediaType(amtDvr[0]); } } } finally { if (ppEnum != null) { Marshal.ReleaseComObject(ppEnum); } } FilterGraphTools.RenderPin(graph, mpegDec, "Video Output 1"); } finally { if (vPin != null) { Marshal.ReleaseComObject(vPin); } if (dec != null) { Marshal.ReleaseComObject(dec); } if (sgIn != null) { Marshal.ReleaseComObject(sgIn); } if (mpegDec != null) { Marshal.ReleaseComObject(mpegDec); } } EventCode ec; IMediaControl mControl = graph as IMediaControl; IMediaEvent mEvent = graph as IMediaEvent; hr = mControl.Pause(); DsError.ThrowExceptionForHR(hr); hr = mControl.Run(); DsError.ThrowExceptionForHR(hr); hr = mEvent.WaitForCompletion(1000, out ec); //DsError.ThrowExceptionForHR(hr); hr = mControl.Pause(); DsError.ThrowExceptionForHR(hr); hr = mControl.Stop(); DsError.ThrowExceptionForHR(hr); IPin mpgOut = null; sgIn = null; AMMediaType mt = new AMMediaType(); try { sgIn = DsFindPin.ByDirection(nRender, PinDirection.Input, 0); if (sgIn != null) { hr = sgIn.ConnectedTo(out mpgOut); DsError.ThrowExceptionForHR(hr); hr = graph.RemoveFilter(nRender); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(nRender); nRender = null; nRender = (IBaseFilter) new NullRenderer(); hr = graph.AddFilter((IBaseFilter)nRender, "Video Null Renderer"); DsError.ThrowExceptionForHR(hr); hr = graph.Render(mpgOut); DsError.ThrowExceptionForHR(hr); hr = mpgOut.ConnectionMediaType(mt); DsError.ThrowExceptionForHR(hr); if (mt.formatType == FormatType.VideoInfo2) { VideoInfoHeader2 vih = (VideoInfoHeader2)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader2)); return(vih); } } } finally { DsUtils.FreeAMMediaType(mt); if (mpgOut != null) { Marshal.ReleaseComObject(mpgOut); } if (sgIn != null) { Marshal.ReleaseComObject(sgIn); } } #if DEBUG } #endif } finally { if (nRender != null) { Marshal.ReleaseComObject(nRender); } if (capFilter != null) { Marshal.ReleaseComObject(capFilter); } if (graph != null) { while (Marshal.ReleaseComObject(graph) > 0) { ; } } } return(null); }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread( ) { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; // grabber Grabber grabber = new Grabber(this); // objects object graphObject = null; object grabberObject = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IMediaEventEx mediaEvent = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { throw new ApplicationException("Failed creating source filter"); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add grabber filters to graph graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(mediaType); // connect pins int pinToTry = 0; IPin inPin = Tools.GetInPin(grabberBase, 0); IPin outPin = null; // find output pin acceptable by sample grabber while (true) { outPin = Tools.GetOutPin(sourceBase, pinToTry); if (outPin == null) { Marshal.ReleaseComObject(inPin); throw new ApplicationException("Did not find acceptable output video pin in the given source"); } if (graph.Connect(outPin, inPin) < 0) { Marshal.ReleaseComObject(outPin); outPin = null; pinToTry++; } else { break; } } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } // let's do rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBase, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; mediaFilter.SetSyncSource(null); } // get media control mediaControl = (IMediaControl)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; IntPtr p1, p2; DsEvCode code; // run mediaControl.Run( ); do { if (mediaEvent != null) { if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0) { mediaEvent.FreeEventParams(code, p1, p2); if (code == DsEvCode.Complete) { reasonToStop = ReasonToFinishPlaying.EndOfStreamReached; break; } } } }while (!stopEvent.WaitOne(100, false)); mediaControl.Stop( ); } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects graph = null; grabberBase = null; sampleGrabber = null; mediaControl = null; mediaEvent = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceBase != null) { Marshal.ReleaseComObject(sourceBase); sourceBase = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } } if (PlayingFinished != null) { PlayingFinished(this, reasonToStop); } }
/// <summary> build the capture graph for grabber. </summary> bool SetupGraph() { int hr; IPin pin1, pin2; try { hr = capGraph.SetFiltergraph(graphBuilder); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } if (atiTVCardFound) { SetupVideoCrossbar(); AddFilter(atiCrossbar, "ATI Crossbar"); AddFilter(capFilter, "Analog Capture Device"); AddFilter(wmVideoDecoder, "AVI Decompressor"); AddFilter(stretchVideo, "Stretch Video"); AddFilter(colorConverter, "Color Space Converter"); } else if (videoSource.Equals("File")) { graphBuilder.AddSourceFilter(filePath, "WM ASF Reader", out capFilter); AddFilter(modFrameRate, "Modify Frame Rate"); AddFilter(stretchVideo, "Stretch Video"); AddFilter(colorConverter, "Color Space Converter"); } else { int state; if (capFilter.GetState(100, out state) == 0) { AddFilter(capFilter, "Capture Filter"); } } AddFilter(sampleGrabber, "Sample Grabber"); // make sure samples grabbed have 32 bits per pixel to work with Ge Force 7900 AddFilter(baseGrabFlt, "Vector Grabber"); AddFilter(motionVector, "Motion Flow Vector Filter"); if (videoPreview) { AddFilter(teeSplitter, "Smart Tee Splitter"); AddFilter(colorConverter, "Color Space Converter"); AddFilter(videoRenderer, "Video Renderer"); } #if false // Attempt to use VMR9 abandoned for now IVMRFilterConfig9 vmrConfig = videoRenderer as IVMRFilterConfig9; hr = vmrConfig.SetRenderingMode(VMR9Mode.Renderless); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } IVMRSurfaceAllocatorNotify9 vmrAllocNotify = videoRenderer as IVMRSurfaceAllocatorNotify9; vmrAllocNotify.AdviseSurfaceAllocator(userID, vmrAllocator); vmrAllocator.AdviseNotify(vmrAllocNotify); #endif // connect the pins if (videoSource.Equals("File")) { ConnectPins(capFilter, "Raw Video 1", modFrameRate, "In"); ConnectPins(modFrameRate, "Out", stretchVideo, "In"); //ConnectPins(wmVideoDecoder, "out0", stretchVideo, "In"); ConnectPins(stretchVideo, "Out", colorConverter, "In"); ConnectPins(colorConverter, "Out", sampleGrabber, "In"); } else { if (atiTVCardFound) { ConnectPins(atiCrossbar, "0: Video Decoder Out", capFilter, "0"); ConnectPins(capFilter, "2", wmVideoDecoder, "In"); ConnectPins(wmVideoDecoder, "Out", stretchVideo, "In"); ConnectPins(stretchVideo, "Out", colorConverter, "In"); ConnectPins(colorConverter, "Out", sampleGrabber, "In"); } else // webcam case { //ConnectPins(capFilter, "CapturePin", stretchVideo, "In"); ConnectPins(capFilter, "CapturePin", sampleGrabber, "In"); } } if (videoPreview) { ConnectPins(sampleGrabber, "Out", teeSplitter, "Input"); //ConnectPins(teeSplitter, "0", videoRenderer, "In"); ConnectPins(teeSplitter, "Preview", colorConverter, "In"); ConnectPins(colorConverter, "Out", videoRenderer, "VMR Input0"); ConnectPins(teeSplitter, "Capture", motionVector, "In"); } else { ConnectPins(sampleGrabber, "Out", motionVector, "In"); } ConnectPins(motionVector, "Out", baseGrabFlt, "In"); // check that all filters are accounted for // there must be a total of 7 filters if source is "File" IEnumFilters enumFilters; graphBuilder.EnumFilters(out enumFilters); enumFilters.Reset(); IBaseFilter[] filters = new IBaseFilter[1]; int count = 0; int total = 0; while (0 == (hr = enumFilters.Next(1, filters, out count))) { FilterInfo info = new FilterInfo(); hr = filters[0].QueryFilterInfo(info); if (hr < 0) { Marshal.ThrowExceptionForHR(hr); } LogInfo(LogGroups.Console, info.achName); IPin[] pins = new IPin[1]; IEnumPins enumPins; filters[0].EnumPins(out enumPins); while (0 == (hr = enumPins.Next(1, pins, out count))) { IPin pin; hr = pins[0].ConnectedTo(out pin); if (pin != null) { string pinID; hr = pin.QueryId(out pinID); LogInfo(LogGroups.Console, pinID); } } Marshal.ReleaseComObject(filters[0]); total++; } Marshal.ReleaseComObject(enumFilters); SetupVideoGrabber(); SetupVectorGrabber(); return(true); } catch (Exception ee) { LogInfo(LogGroups.Console, "Could not setup graph\r\n" + ee.Message); return(false); } }