private void TestLogError() { int hr; IGraphBuilder fg; IRenderEngine ire = new RenderEngine() as IRenderEngine; hr = ire.SetTimelineObject(m_pTimeline); DESError.ThrowExceptionForHR(hr); hr = ire.ConnectFrontEnd(); DESError.ThrowExceptionForHR(hr); hr = ire.RenderOutputPins(); DESError.ThrowExceptionForHR(hr); hr = ire.GetFilterGraph(out fg); DESError.ThrowExceptionForHR(hr); hr = ((IMediaControl)fg).Run(); DESError.ThrowExceptionForHR(hr); IMediaEvent ime = fg as IMediaEvent; EventCode evCode; const int E_Abort = unchecked ((int)0x80004004); do { System.Windows.Forms.Application.DoEvents(); System.Threading.Thread.Sleep(100); hr = ime.WaitForCompletion(1000, out evCode); } while (evCode == (EventCode)E_Abort); Debug.Assert(m_Called == true, "LogError"); }
public void WaitUntilDone() { int hr; EventCode evCode; const int E_Abort = unchecked ((int)0x80004004); do { Application.DoEvents(); hr = m_MediaEvent.WaitForCompletion(100, out evCode); } while (hr == E_Abort); DsError.ThrowExceptionForHR(hr); }
private static void playAudioFile(object fileName) { try { IFilterGraph2 filterGraph = (IFilterGraph2) new FilterGraph(); if (0 == filterGraph.RenderFile(fileName as string, IntPtr.Zero)) { IMediaControl mediaControl = (IMediaControl)filterGraph; mediaControl.Run(); IMediaEvent mediaEvent = (IMediaEvent)filterGraph; int eventCode; mediaEvent.WaitForCompletion(6000, out eventCode); } } catch (Exception) { } }
/// <summary> /// Test all methods /// </summary> public void DoTests() { int hr; IntPtr hEvent; IntPtr p1, p2; EventCode ec; BuildGraph(); hr = m_mediaEvent.GetEventHandle(out hEvent); DsError.ThrowExceptionForHR(hr); ManualResetEvent mre = new ManualResetEvent(false); mre.SafeWaitHandle = new Microsoft.Win32.SafeHandles.SafeWaitHandle(hEvent, true); // Should get an event before this bool b = mre.WaitOne(5000, true); Debug.Assert(b, "GetEventHandle"); // I don't know what event I may get, so I don't know how to check it hr = m_mediaEvent.GetEvent(out ec, out p1, out p2, 0); DsError.ThrowExceptionForHR(hr); hr = m_mediaEvent.FreeEventParams(ec, p1, p2); DsError.ThrowExceptionForHR(hr); hr = m_mediaEvent.CancelDefaultHandling(EventCode.Repaint); DsError.ThrowExceptionForHR(hr); hr = m_mediaEvent.RestoreDefaultHandling(EventCode.Repaint); DsError.ThrowExceptionForHR(hr); // The clip is 4 seconds long, so timeout in 5 hr = m_mediaEvent.WaitForCompletion(5000, out ec); DsError.ThrowExceptionForHR(hr); // The video should have successfully played Debug.Assert(ec == EventCode.Complete, "WaitForCompletion"); }
private bool CheckGraphConversion(ref IMediaSeeking mediaSeeking) { int hr; IMediaEvent mediaEvent = (IMediaEvent)currentFilterGraph; // Check the graph / conversion is going ok, and raise any progress events EventCode statusCode; hr = mediaEvent.WaitForCompletion(100, out statusCode); switch (statusCode) { case EventCode.Complete: return(true); case 0: // Still going - fire event with an update on where we are if (mediaSeeking != null) { long curPos; mediaSeeking.GetCurrentPosition(out curPos); long length; mediaSeeking.GetDuration(out length); double progress = curPos * 100.0 / (double)length; if (ConversionProgressChanged != null) { ConversionProgressChanged(new object(), new ProgressChangedEventArgs(progress)); } } return(false); default: // Error EventCode tryCode; IntPtr lp1, lp2; hr = mediaEvent.GetEvent(out tryCode, out lp1, out lp2, 200); DsError.ThrowExceptionForHR(hr); throw new Exception(statusCode.ToString()); } }
/// <summary> /// <para>指定された動画ファイルから音声のみをエンコードし、WAVファイルイメージを作成して返す。</para> /// </summary> public static void t変換(string fileName, out byte[] wavFileImage) { int hr = 0; IGraphBuilder graphBuilder = null; try { graphBuilder = (IGraphBuilder) new FilterGraph(); #region [ オーディオ用サンプルグラバの作成と追加。] //----------------- ISampleGrabber sampleGrabber = null; try { sampleGrabber = (ISampleGrabber) new SampleGrabber(); // サンプルグラバのメディアタイプの設定。 var mediaType = new AMMediaType() { majorType = MediaType.Audio, subType = MediaSubType.PCM, formatType = FormatType.WaveEx, }; try { hr = sampleGrabber.SetMediaType(mediaType); DsError.ThrowExceptionForHR(hr); } finally { if (mediaType != null) { DsUtils.FreeAMMediaType(mediaType); } } // サンプルグラバのバッファリングを有効にする。 hr = sampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); // サンプルグラバにコールバックを追加する。 sampleGrabberProc = new CSampleGrabberCallBack(); hr = sampleGrabber.SetCallback(sampleGrabberProc, 1); // 1:コールバックの BufferCB() メソッドの方を呼び出す。 // サンプルグラバをグラフに追加する。 hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber for Audio/PCM"); DsError.ThrowExceptionForHR(hr); } finally { C共通.tCOMオブジェクトを解放する(ref sampleGrabber); } //----------------- #endregion var e = new DirectShowLib.DsROTEntry(graphBuilder); // fileName からグラフを自動生成。 hr = graphBuilder.RenderFile(fileName, null); // IMediaControl.RenderFile() は推奨されない DsError.ThrowExceptionForHR(hr); // ビデオレンダラを除去。 // オーディオレンダラをNullに変えるより前に実行すること。 // (CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する() の中で一度再生するので、 // そのときにActiveウィンドウが表示されてしまうため。) // chnmr0 : ウィンドウを表示しないだけなら IVideoWindow で put_AutoShow した。 IVideoWindow vw = graphBuilder as IVideoWindow; vw.put_AutoShow(OABool.False); // オーディオレンダラを NullRenderer に置換。 WaveFormat wfx; byte[] wfx拡張領域; CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する(graphBuilder, out wfx, out wfx拡張領域); // 基準クロックを NULL(最高速)に設定する。 IMediaFilter mediaFilter = graphBuilder as IMediaFilter; mediaFilter.SetSyncSource(null); mediaFilter = null; // メモリストリームにデコードデータを出力する。 sampleGrabberProc.MemoryStream = new MemoryStream(); // CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する() で一度再生しているので、ストリームをクリアする。 var ms = sampleGrabberProc.MemoryStream; var bw = new BinaryWriter(ms); bw.Write(new byte[] { 0x52, 0x49, 0x46, 0x46 }); // 'RIFF' bw.Write((UInt32)0); // ファイルサイズ - 8 [byte];今は不明なので後で上書きする。 bw.Write(new byte[] { 0x57, 0x41, 0x56, 0x45 }); // 'WAVE' bw.Write(new byte[] { 0x66, 0x6D, 0x74, 0x20 }); // 'fmt ' bw.Write((UInt32)(16 + ((wfx拡張領域.Length > 0) ? (2 /*sizeof(WAVEFORMATEX.cbSize)*/ + wfx拡張領域.Length) : 0))); // fmtチャンクのサイズ[byte] bw.Write((UInt16)wfx.Encoding); // フォーマットID(リニアPCMなら1) bw.Write((UInt16)wfx.Channels); // チャンネル数 bw.Write((UInt32)wfx.SampleRate); // サンプリングレート bw.Write((UInt32)wfx.AverageBytesPerSecond); // データ速度 bw.Write((UInt16)wfx.BlockAlign); // ブロックサイズ bw.Write((UInt16)wfx.BitsPerSample); // サンプルあたりのビット数 if (wfx拡張領域.Length > 0) { bw.Write((UInt16)wfx拡張領域.Length); // 拡張領域のサイズ[byte] bw.Write(wfx拡張領域); // 拡張データ } bw.Write(new byte[] { 0x64, 0x61, 0x74, 0x61 }); // 'data' int nDATAチャンクサイズ位置 = (int)ms.Position; bw.Write((UInt32)0); // dataチャンクのサイズ[byte];今は不明なので後で上書きする。 #region [ 再生を開始し、終了を待つ。- 再生中、sampleGrabberProc.MemoryStream に PCM データが蓄積されていく。] //----------------- IMediaControl mediaControl = graphBuilder as IMediaControl; mediaControl.Run(); // 再生開始 IMediaEvent mediaEvent = graphBuilder as IMediaEvent; EventCode eventCode; hr = mediaEvent.WaitForCompletion(-1, out eventCode); DsError.ThrowExceptionForHR(hr); if (eventCode != EventCode.Complete) { throw new Exception("再生待ちに失敗しました。"); } mediaControl.Stop(); mediaEvent = null; mediaControl = null; //----------------- #endregion bw.Seek(4, SeekOrigin.Begin); bw.Write((UInt32)ms.Length - 8); // ファイルサイズ - 8 [byte] bw.Seek(nDATAチャンクサイズ位置, SeekOrigin.Begin); bw.Write((UInt32)ms.Length - (nDATAチャンクサイズ位置 + 4)); // dataチャンクサイズ [byte] // 出力その2を作成。 wavFileImage = ms.ToArray(); // 終了処理。 bw.Close(); sampleGrabberProc.Dispose(); // ms.Close() } finally { C共通.tCOMオブジェクトを解放する(ref graphBuilder); } }
DSStreamResultCodes InitWithVideoFile(WTVStreamingVideoRequest strq) { UsingSBEFilter = false; // Not using stream buffer // Init variables IPin[] pin = new IPin[1]; string dPin = string.Empty; string sName = string.Empty; string dName = string.Empty; string sPin = string.Empty; FileInfo fiInputFile = new FileInfo(strq.FileName); string txtOutputFNPath = fiInputFile.FullName + ".wmv"; if ( (fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) || (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms")) ) { return(DSStreamResultCodes.ErrorInvalidFileType); } int hr = 0; try { // Get the graphbuilder interface SendDebugMessage("Creating Graph Object", 0); IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph; // Create an ASF writer filter SendDebugMessage("Creating ASF Writer", 0); WMAsfWriter asf_filter = new WMAsfWriter(); dc.Add(asf_filter); // CHECK FOR ERRORS currentOutputFilter = (IBaseFilter)asf_filter; // class variable // Add the ASF filter to the graph hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer"); DsError.ThrowExceptionForHR(hr); // Set the filename SendDebugMessage("Setting filename", 0); IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter; string destPathFN = fiInputFile.FullName + ".wmv"; hr = sinkFilter.SetFileName(destPathFN, null); DsError.ThrowExceptionForHR(hr); // Handy to have an ACM Wrapper filter hanging around for AVI files with MP3 audio SendDebugMessage("Adding ACM Wrapper", 0); IBaseFilter ACMFilter = FilterDefinition.AddToFilterGraph(FilterDefinitions.Other.ACMWrapperFilter, ref graphbuilder); dc.Add(ACMFilter); // Render file - then build graph SendDebugMessage("Rendering file", 0); graphbuilder.RenderFile(fiInputFile.FullName, null); SendDebugMessage("Saving graph", 0); FilterGraphTools.SaveGraphFile(graphbuilder, "C:\\ProgramData\\RemotePotato\\lastfiltergraph.grf"); // Are both our ASF pins connected? IPin ASFVidInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null); IPin ASFAudInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null); // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant] SendDebugMessage("Run graph for testing purposes", 0); IMediaControl tempControl = (IMediaControl)graphbuilder; IMediaEvent tempEvent = (IMediaEvent)graphbuilder; DsError.ThrowExceptionForHR(tempControl.Pause()); EventCode pEventCode; hr = tempEvent.WaitForCompletion(1000, out pEventCode); // Get media type from vid input pin for ASF writer AMMediaType pmt = new AMMediaType(); hr = ASFVidInputPin.ConnectionMediaType(pmt); FrameSize SourceFrameSize = null; if (pmt.formatType == FormatType.VideoInfo2) { // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder VideoInfoHeader2 pvih2 = new VideoInfoHeader2(); Marshal.PtrToStructure(pmt.formatPtr, pvih2); SourceFrameSize = new FrameSize(pvih2.BmiHeader.Width, pvih2.BmiHeader.Height); } else if (pmt.formatType == FormatType.VideoInfo) //{05589f80-c356-11ce-bf01-00aa0055595a} { VideoInfoHeader pvih = new VideoInfoHeader(); Marshal.PtrToStructure(pmt.formatPtr, pvih); SourceFrameSize = new FrameSize(pvih.BmiHeader.Width, pvih.BmiHeader.Height); } else { SourceFrameSize = new FrameSize(200, 200); // SQUARE } // Stop graph if necessary FilterState pFS; hr = tempControl.GetState(1000, out pFS); if (pFS != FilterState.Stopped) { DsError.ThrowExceptionForHR(tempControl.Stop()); } // Free up media type DsUtils.FreeAMMediaType(pmt); pmt = null; // (re)Configure the ASF writer with the selected WM Profile ConfigureASFWriter(asf_filter, strq, SourceFrameSize); // Release pins SendDebugMessage("Releasing COM objects (pins)", 0); // source Marshal.ReleaseComObject(ASFVidInputPin); ASFVidInputPin = null; Marshal.ReleaseComObject(ASFAudInputPin); ASFAudInputPin = null; } catch (Exception ex) { SendDebugMessageWithException(ex.Message, ex); return(DSStreamResultCodes.ErrorExceptionOccurred); } return(DSStreamResultCodes.OK); }
DSStreamResultCodes InitWithStreamBufferFile(WTVStreamingVideoRequest strq) { // Init variables //IPin[] pin = new IPin[1]; IBaseFilter DecFilterAudio = null; IBaseFilter DecFilterVideo = null; IBaseFilter MainAudioDecoder = null; IBaseFilter MainVideoDecoder = null; string dPin = string.Empty; string sName = string.Empty; string dName = string.Empty; string sPin = string.Empty; FileInfo fiInputFile = new FileInfo(strq.FileName); string txtOutputFNPath = fiInputFile.FullName + ".wmv"; if ( (!fiInputFile.Extension.ToLowerInvariant().Equals(".wtv")) && (!fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms")) ) { return(DSStreamResultCodes.ErrorInvalidFileType); } int hr = 0; try { // Get the graphbuilder interface SendDebugMessage("Creating Graph Object", 0); IGraphBuilder graphbuilder = (IGraphBuilder)currentFilterGraph; // Add the DVRMS/WTV file / filter to the graph SendDebugMessage("Add SBE Source Filter", 0); hr = graphbuilder.AddSourceFilter(fiInputFile.FullName, "SBE Filter", out currentSBEfilter); // class variable DsError.ThrowExceptionForHR(hr); dc.Add(currentSBEfilter); // Get the SBE audio and video out pins IPin SBEVidOutPin, SBEAudOutPin; SBEAudOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Audio, MediaSubType.Null); SBEVidOutPin = FilterGraphTools.FindPinByMediaType(currentSBEfilter, PinDirection.Output, MediaType.Video, MediaSubType.Null); // Set up two decrypt filters according to file extension (assume audio and video both present ) if (fiInputFile.Extension.ToLowerInvariant().Equals(".dvr-ms")) { // Add DVR-MS decrypt filters SendDebugMessage("Add DVRMS (bda) decryption", 0); DecFilterAudio = (IBaseFilter) new DTFilter(); // THESE ARE FOR DVR-MS (BDA DTFilters) DecFilterVideo = (IBaseFilter) new DTFilter(); graphbuilder.AddFilter(DecFilterAudio, "Decrypt / Tag"); graphbuilder.AddFilter(DecFilterVideo, "Decrypt / Tag 0001"); } else // Add WTV decrypt filters { SendDebugMessage("Add WTV (pbda) decryption", 0); DecFilterAudio = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder); DecFilterVideo = FilterDefinition.AddToFilterGraph(FilterDefinitions.Decrypt.DTFilterPBDA, ref graphbuilder, "PBDA DTFilter 0001"); } dc.Add(DecFilterAudio); dc.Add(DecFilterVideo); // Make the first link in the graph: SBE => Decrypts SendDebugMessage("Connect SBE => Decrypt filters", 0); IPin DecVideoInPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Input, 0); FilterGraphTools.ConnectFilters(graphbuilder, SBEVidOutPin, DecVideoInPin, false); IPin DecAudioInPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Input, 0); if (DecAudioInPin == null) { SendDebugMessage("WARNING: No Audio Input to decrypt filter."); } else { FilterGraphTools.ConnectFilters(graphbuilder, SBEAudOutPin, DecAudioInPin, false); } // Get Dec Audio Out pin IPin DecAudioOutPin = DsFindPin.ByDirection(DecFilterAudio, PinDirection.Output, 0); // Examine Dec Audio out for audio format SendDebugMessage("Examining source audio", 0); AMMediaType AudioMediaType = null; getPinMediaType(DecAudioOutPin, MediaType.Audio, Guid.Empty, Guid.Empty, ref AudioMediaType); SendDebugMessage("Audio media subtype: " + AudioMediaType.subType.ToString()); SendDebugMessage("Examining Audio StreamInfo"); StreamInfo si = FileInformation.GetStreamInfo(AudioMediaType); bool AudioIsAC3 = (si.SimpleType == "AC-3"); if (AudioIsAC3) { SendDebugMessage("Audio type is AC3"); } else { SendDebugMessage("Audio type is not AC3"); } si = null; DsUtils.FreeAMMediaType(AudioMediaType); // Add an appropriate audio decoder if (AudioIsAC3) { if (!FilterGraphTools.IsThisComObjectInstalled(FilterDefinitions.Audio.AudioDecoderMPCHC.CLSID)) { SendDebugMessage("Missing AC3 Audio Decoder, and AC3 audio detected."); return(DSStreamResultCodes.ErrorAC3CodecNotFound); } else { MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMPCHC, ref graphbuilder); //MainAudioDecoder = FatAttitude.WTVTranscoder.FilterDefinitions.Audio.AudioDecoderFFDShow.AddToFilterGraph(ref graph); Guid tmpGuid; MainAudioDecoder.GetClassID(out tmpGuid); SendDebugMessage("Main Audio decoder CLSID is " + tmpGuid.ToString()); } } else { MainAudioDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Audio.AudioDecoderMSDTV, ref graphbuilder); } // Add a video decoder SendDebugMessage("Add DTV decoder", 0); MainVideoDecoder = FilterDefinition.AddToFilterGraph(FilterDefinitions.Video.VideoDecoderMSDTV, ref graphbuilder); dc.Add(MainAudioDecoder); dc.Add(MainVideoDecoder); //SetAudioDecoderOutputToPCMStereo(MainAudioDecoder); // Add a null renderer SendDebugMessage("Add null renderer", 0); NullRenderer MyNullRenderer = new NullRenderer(); dc.Add(MyNullRenderer); hr = graphbuilder.AddFilter((IBaseFilter)MyNullRenderer, @"Null Renderer"); DsError.ThrowExceptionForHR(hr); // Link up video through to null renderer SendDebugMessage("Connect video to null renderer", 0); // Make the second link: Decrypts => DTV IPin DecVideoOutPin = DsFindPin.ByDirection(DecFilterVideo, PinDirection.Output, 0); IPin DTVVideoInPin = DsFindPin.ByName(MainVideoDecoder, @"Video Input"); // IPin DTVVideoInPin = DsFindPin.ByDirection(DTVVideoDecoder, PinDirection.Input, 0); // first one should be video input? // FilterGraphTools.ConnectFilters(graphbuilder, DecVideoOutPin, DTVVideoInPin, false); // 3. DTV => Null renderer IPin NullRInPin = DsFindPin.ByDirection((IBaseFilter)MyNullRenderer, PinDirection.Input, 0); IPin DTVVideoOutPin = FilterGraphTools.FindPinByMediaType(MainVideoDecoder, PinDirection.Output, MediaType.Video, MediaSubType.Null); FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, NullRInPin, false); Marshal.ReleaseComObject(NullRInPin); NullRInPin = null; // Run graph [can use this also to get media type => see, e.g. dvrmstowmvhd by Babgvant] SendDebugMessage("Run graph for testing purposes", 0); IMediaControl tempControl = (IMediaControl)graphbuilder; IMediaEvent tempEvent = (IMediaEvent)graphbuilder; DsError.ThrowExceptionForHR(tempControl.Pause()); DsError.ThrowExceptionForHR(tempControl.Run()); EventCode pEventCode; hr = tempEvent.WaitForCompletion(1000, out pEventCode); //DsError.ThrowExceptionForHR(hr); // DO *NOT* DO THIS HERE! THERE MAY WELL BE AN ERROR DUE TO EVENTS RAISED BY THE STREAM BUFFER ENGINE, THIS IS A DELIBERATE TEST RUN OF THE GRAPH // Stop graph if necessary FilterState pFS; hr = tempControl.GetState(1000, out pFS); if (pFS == FilterState.Running) { DsError.ThrowExceptionForHR(tempControl.Stop()); } // Remove null renderer hr = graphbuilder.RemoveFilter((IBaseFilter)MyNullRenderer); // Now graph has been run and stopped we can get the video width and height from the output pin of the main video decoder AMMediaType pmt = null; getPinMediaType(DTVVideoOutPin, MediaType.Video, MediaSubType.YUY2, Guid.Empty, ref pmt); FrameSize SourceFrameSize; if (pmt.formatType == FormatType.VideoInfo2) { VideoInfoHeader2 pvih2 = new VideoInfoHeader2(); Marshal.PtrToStructure(pmt.formatPtr, pvih2); int VideoWidth = pvih2.BmiHeader.Width; int VideoHeight = pvih2.BmiHeader.Height; SourceFrameSize = new FrameSize(VideoWidth, VideoHeight); } else { SourceFrameSize = new FrameSize(320, 240); } // Free up DsUtils.FreeAMMediaType(pmt); pmt = null; // Link up audio // 2. Audio Decrypt -> Audio decoder IPin MainAudioInPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Input, 0); FilterGraphTools.ConnectFilters(graphbuilder, DecAudioOutPin, MainAudioInPin, false); // Add ASF Writer // Create an ASF writer filter SendDebugMessage("Creating ASF Writer", 0); WMAsfWriter asf_filter = new WMAsfWriter(); dc.Add(asf_filter); // CHECK FOR ERRORS currentOutputFilter = (IBaseFilter)asf_filter; // class variable // Add the ASF filter to the graph hr = graphbuilder.AddFilter((IBaseFilter)asf_filter, "WM Asf Writer"); DsError.ThrowExceptionForHR(hr); // Set the filename IFileSinkFilter sinkFilter = (IFileSinkFilter)asf_filter; string destPathFN = fiInputFile.FullName + ".wmv"; hr = sinkFilter.SetFileName(destPathFN, null); DsError.ThrowExceptionForHR(hr); // Make the final links: DTV => writer SendDebugMessage("Linking audio/video through to decoder and writer", 0); IPin DTVAudioOutPin = DsFindPin.ByDirection(MainAudioDecoder, PinDirection.Output, 0); IPin ASFAudioInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Audio, MediaSubType.Null); IPin ASFVideoInputPin = FilterGraphTools.FindPinByMediaType((IBaseFilter)asf_filter, PinDirection.Input, MediaType.Video, MediaSubType.Null); FilterGraphTools.ConnectFilters(graphbuilder, DTVAudioOutPin, ASFAudioInputPin, false); if (ASFVideoInputPin != null) { FilterGraphTools.ConnectFilters(graphbuilder, DTVVideoOutPin, ASFVideoInputPin, false); } // Configure ASFWriter ConfigureASFWriter(asf_filter, strq, SourceFrameSize); // Release pins SendDebugMessage("Releasing COM objects (pins)", 0); // dec Marshal.ReleaseComObject(DecAudioInPin); DecAudioInPin = null; Marshal.ReleaseComObject(DecVideoInPin); DecVideoInPin = null; Marshal.ReleaseComObject(DecVideoOutPin); DecVideoOutPin = null; Marshal.ReleaseComObject(DecAudioOutPin); DecAudioOutPin = null; // dtv Marshal.ReleaseComObject(MainAudioInPin); MainAudioInPin = null; Marshal.ReleaseComObject(DTVVideoInPin); DTVVideoInPin = null; Marshal.ReleaseComObject(DTVVideoOutPin); DTVVideoOutPin = null; Marshal.ReleaseComObject(DTVAudioOutPin); DTVAudioOutPin = null; // asf Marshal.ReleaseComObject(ASFAudioInputPin); ASFAudioInputPin = null; Marshal.ReleaseComObject(ASFVideoInputPin); ASFVideoInputPin = null; } catch (Exception ex) { SendDebugMessageWithException(ex.Message, ex); return(DSStreamResultCodes.ErrorExceptionOccurred); } return(DSStreamResultCodes.OK); }
/// <summary>Runs the graph</summary> /// <param name="graphBuilder">The graph to be run.</param> /// <param name="seekableFilter">The filter to use for computing percent complete. Must implement IMediaSeeking.</param> protected void RunGraph(IGraphBuilder graphBuilder, IBaseFilter seekableFilter) { // Get the necessary control and event interfaces IMediaControl mediaControl = (IMediaControl)graphBuilder; IMediaEvent mediaEvent = (IMediaEvent)graphBuilder; // Get the media seeking interface to use for computing status and progress updates IMediaSeeking mediaSeeking = seekableFilter as IMediaSeeking; if (!CanGetPositionAndDuration(mediaSeeking)) { mediaSeeking = graphBuilder as IMediaSeeking; if (!CanGetPositionAndDuration(mediaSeeking)) { mediaSeeking = null; } } // Publish the graph to the running object table and to a temporary file for examination/debugging purposes //using (new GraphPublisher(graphBuilder, "C:\\vidtests\\grf\\" + Guid.NewGuid().ToString("N") + ".grf")) { // Run the graph int hr = 0; hr = mediaControl.Pause(); hr = mediaControl.Run(); DsError.ThrowExceptionForHR(hr); try { ProgressChanged(new object(), new ProgressChangedEventArgs(0.0)); // initial progress update stating 0% done bool done = false; while (!CancellationPending && !done) // continue until we're done/cancelled { // Poll to see how we're doing EventCode statusCode; hr = mediaEvent.WaitForCompletion(200, out statusCode); Console.Write(" <" + statusCode.ToString() + ">,"); switch (statusCode) { case EventCode.Complete: done = true; break; case 0: // Get an update on where we are with the conversion if (mediaSeeking != null) { long curPos; mediaSeeking.GetCurrentPosition(out curPos); long length; mediaSeeking.GetDuration(out length); double progress = curPos * 100.0 / (double)length; if (progress > 0) { ProgressChanged(new object(), new ProgressChangedEventArgs(progress)); } } break; default: // Error, so throw exception EventCode tryCode; IntPtr lp1, lp2; hr = mediaEvent.GetEvent(out tryCode, out lp1, out lp2, 200); DsError.ThrowExceptionForHR(hr); throw new Exception(statusCode.ToString()); } } ProgressChanged(new object(), new ProgressChangedEventArgs(100)); // final progress update stating 100% done } finally { // We're done converting, so stop the graph FilterState graphState; mediaControl.GetState(100, out graphState); if (graphState == FilterState.Running) { mediaControl.Pause(); } mediaControl.Stop(); // Return done Completed(new object(), new EventArgs()); } } }
private static Bitmap GetBitmap(IGraphBuilder graph, ISampleGrabber sg, long grabPosition, out EventCode ec) { IntPtr pBuffer = IntPtr.Zero; int pBufferSize = 0; Bitmap b = null; int hr = 0; try { IMediaSeeking ims = graph as IMediaSeeking; bool canDuration = false; bool canPos = false; bool canSeek = false; long pDuration = 0; long pCurrent = 0; if (ims != null) { AMSeekingSeekingCapabilities caps; hr = ims.GetCapabilities(out caps); if ((caps & AMSeekingSeekingCapabilities.CanGetDuration) == AMSeekingSeekingCapabilities.CanGetDuration) { canDuration = true; } if ((caps & AMSeekingSeekingCapabilities.CanGetCurrentPos) == AMSeekingSeekingCapabilities.CanGetCurrentPos) { canPos = true; } if ((caps & AMSeekingSeekingCapabilities.CanSeekAbsolute) == AMSeekingSeekingCapabilities.CanSeekAbsolute) { canSeek = true; } if (canDuration) { hr = ims.GetDuration(out pDuration); } if (grabPosition > pDuration) { grabPosition = pDuration - 1; } if (canSeek) { hr = ims.SetPositions(new DsLong(grabPosition), AMSeekingSeekingFlags.AbsolutePositioning, 0, AMSeekingSeekingFlags.NoPositioning); DsError.ThrowExceptionForHR(hr); } if (canPos) { hr = ims.GetCurrentPosition(out pCurrent); } } if (canPos) { hr = ims.GetCurrentPosition(out pCurrent); } IMediaControl mControl = graph as IMediaControl; IMediaEvent mEvent = graph as IMediaEvent; //ec = EventCode.SystemBase; hr = mControl.Pause(); DsError.ThrowExceptionForHR(hr); hr = mControl.Run(); DsError.ThrowExceptionForHR(hr); hr = mEvent.WaitForCompletion(int.MaxValue, out ec); DsError.ThrowExceptionForHR(hr); hr = mControl.Pause(); DsError.ThrowExceptionForHR(hr); hr = mControl.Stop(); DsError.ThrowExceptionForHR(hr); if (ec != EventCode.Complete) { return(null); } hr = sg.GetCurrentBuffer(ref pBufferSize, pBuffer); DsError.ThrowExceptionForHR(hr); pBuffer = Marshal.AllocCoTaskMem(pBufferSize); hr = sg.GetCurrentBuffer(ref pBufferSize, pBuffer); DsError.ThrowExceptionForHR(hr); if (pBuffer != IntPtr.Zero) { AMMediaType sgMt = new AMMediaType(); int videoWidth = 0; int videoHeight = 0; int stride = 0; try { hr = sg.GetConnectedMediaType(sgMt); DsError.ThrowExceptionForHR(hr); if (sgMt.formatPtr != IntPtr.Zero) { if (sgMt.formatType == FormatType.VideoInfo) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(sgMt.formatPtr, typeof(VideoInfoHeader)); videoWidth = vih.BmiHeader.Width; videoHeight = vih.BmiHeader.Height; stride = videoWidth * (vih.BmiHeader.BitCount / 8); } else { throw new ApplicationException("Unsupported Sample"); } b = new Bitmap(videoWidth, videoHeight, stride, System.Drawing.Imaging.PixelFormat.Format32bppRgb, pBuffer); b.RotateFlip(RotateFlipType.RotateNoneFlipY); } } finally { DsUtils.FreeAMMediaType(sgMt); } } return(b); } finally { if (pBuffer != IntPtr.Zero) { Marshal.FreeCoTaskMem(pBuffer); } } }
public static VideoInfoHeader2 GetSBEFrameSize(string pathToFile) { int hr = 0; IGraphBuilder graph = null; IBaseFilter capFilter = null; IBaseFilter nRender = null; try { graph = (IGraphBuilder) new FilterGraph(); hr = graph.AddSourceFilter(pathToFile, "Source", out capFilter); DsError.ThrowExceptionForHR(hr); #if DEBUG using (DsROTEntry rot = new DsROTEntry(graph)) { #endif IPin vPin = null; IBaseFilter dec = null; IPin sgIn = null; IBaseFilter mpegDec = null; try { dec = (IBaseFilter) new DTFilter(); hr = graph.AddFilter(dec, "Decrypt"); DsError.ThrowExceptionForHR(hr); nRender = (IBaseFilter) new NullRenderer(); hr = graph.AddFilter((IBaseFilter)nRender, "Video Null Renderer"); DsError.ThrowExceptionForHR(hr); IBaseFilter dec1 = FilterDefinition.AddToFilterGraph(FatAttitude.WTVTranscoder.FilterDefinitions.Decrypt.DTFilterPBDA, ref graph, "Decrypt1"); if (dec1 != null) { Marshal.ReleaseComObject(dec1); } dec1 = null; mpegDec = FilterDefinition.AddToFilterGraph(FatAttitude.WTVTranscoder.FilterDefinitions.Video.VideoDecoderMpeg, ref graph, "MS MPEG Decoder"); sgIn = DsFindPin.ByDirection(mpegDec, PinDirection.Input, 0); IEnumPins ppEnum; IPin[] pPins = new IPin[1]; hr = capFilter.EnumPins(out ppEnum); DsError.ThrowExceptionForHR(hr); try { while (ppEnum.Next(1, pPins, IntPtr.Zero) == 0) { IEnumMediaTypes emtDvr = null; AMMediaType[] amtDvr = new AMMediaType[1]; try { pPins[0].EnumMediaTypes(out emtDvr); hr = emtDvr.Next(1, amtDvr, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); if (amtDvr[0].majorType == MediaType.Video) { if (graph.Connect(pPins[0], sgIn) >= 0) { vPin = pPins[0]; break; } } if (pPins[0] != null) { Marshal.ReleaseComObject(pPins[0]); } } finally { if (emtDvr != null) { Marshal.ReleaseComObject(emtDvr); } DsUtils.FreeAMMediaType(amtDvr[0]); } } } finally { if (ppEnum != null) { Marshal.ReleaseComObject(ppEnum); } } FilterGraphTools.RenderPin(graph, mpegDec, "Video Output 1"); } finally { if (vPin != null) { Marshal.ReleaseComObject(vPin); } if (dec != null) { Marshal.ReleaseComObject(dec); } if (sgIn != null) { Marshal.ReleaseComObject(sgIn); } if (mpegDec != null) { Marshal.ReleaseComObject(mpegDec); } } EventCode ec; IMediaControl mControl = graph as IMediaControl; IMediaEvent mEvent = graph as IMediaEvent; hr = mControl.Pause(); DsError.ThrowExceptionForHR(hr); hr = mControl.Run(); DsError.ThrowExceptionForHR(hr); hr = mEvent.WaitForCompletion(1000, out ec); //DsError.ThrowExceptionForHR(hr); hr = mControl.Pause(); DsError.ThrowExceptionForHR(hr); hr = mControl.Stop(); DsError.ThrowExceptionForHR(hr); IPin mpgOut = null; sgIn = null; AMMediaType mt = new AMMediaType(); try { sgIn = DsFindPin.ByDirection(nRender, PinDirection.Input, 0); if (sgIn != null) { hr = sgIn.ConnectedTo(out mpgOut); DsError.ThrowExceptionForHR(hr); hr = graph.RemoveFilter(nRender); DsError.ThrowExceptionForHR(hr); Marshal.ReleaseComObject(nRender); nRender = null; nRender = (IBaseFilter) new NullRenderer(); hr = graph.AddFilter((IBaseFilter)nRender, "Video Null Renderer"); DsError.ThrowExceptionForHR(hr); hr = graph.Render(mpgOut); DsError.ThrowExceptionForHR(hr); hr = mpgOut.ConnectionMediaType(mt); DsError.ThrowExceptionForHR(hr); if (mt.formatType == FormatType.VideoInfo2) { VideoInfoHeader2 vih = (VideoInfoHeader2)Marshal.PtrToStructure(mt.formatPtr, typeof(VideoInfoHeader2)); return(vih); } } } finally { DsUtils.FreeAMMediaType(mt); if (mpgOut != null) { Marshal.ReleaseComObject(mpgOut); } if (sgIn != null) { Marshal.ReleaseComObject(sgIn); } } #if DEBUG } #endif } finally { if (nRender != null) { Marshal.ReleaseComObject(nRender); } if (capFilter != null) { Marshal.ReleaseComObject(capFilter); } if (graph != null) { while (Marshal.ReleaseComObject(graph) > 0) { ; } } } return(null); }
[HandleProcessCorruptedStateExceptions] // Some filters cause AccessViolations; NET 4 doesn't catch these without this flag see http://msdn.microsoft.com/en-us/magazine/dd419661.aspx#id0070035 protected void RunGraph(IGraphBuilder graphBuilder, IBaseFilter seekableFilter) { bool shouldTerminateGraphLoop = false; // Get the media seeking interface to use for computing status and progress updates IMediaSeeking mediaSeeking = (IMediaSeeking)currentOutputFilter; if (!CanGetPositionAndDuration(mediaSeeking)) { // Try to seek using the main graph mediaSeeking = (IMediaSeeking)currentFilterGraph; if (!CanGetPositionAndDuration(mediaSeeking)) { mediaSeeking = null; } } // Run the graph int hr = 0; IMediaControl mediaControl = (IMediaControl)graphBuilder; IMediaEvent mediaEvent = (IMediaEvent)graphBuilder; EventCode statusCode; DateTime startingTime = DateTime.Now; TerminationReason whyDidITerminate = TerminationReason.None; try { hr = mediaControl.Pause(); hr = mediaControl.Run(); DsError.ThrowExceptionForHR(hr); // Signal (if first time) that graph is running OK SignalGraphStartedEvent(true); bool anyClientsYet = false; bool conversionComplete = false; while (!shouldTerminateGraphLoop) // continue until we're done/cancelled { // Any commands? (e.g. seek / cancel) ProcessAnyCommands(ref shouldTerminateGraphLoop, ref whyDidITerminate); // Check graph conversion progress if (conversionComplete) { // stall to avoid 100% loop hr = mediaEvent.WaitForCompletion(250, out statusCode); } else { conversionComplete = CheckGraphConversion(ref mediaSeeking); if (conversionComplete) { if (ConversionProgressChanged != null) { ConversionProgressChanged(new object(), new ProgressChangedEventArgs(100)); // final progress update stating 100% done } if (ConversionCompleted != null) { ConversionCompleted(new object(), new EventArgs()); } } } // Check number of clients int numClients = NumberOfConnectedClients(); if ((numClients > 0) && (!anyClientsYet)) { anyClientsYet = true; // A client connected } else if ((numClients == 0)) { if (anyClientsYet) { // There were clients, but All clients have disconnected shouldTerminateGraphLoop = true; whyDidITerminate = TerminationReason.AllClientsDisconnected; } else { // There aren't any clients and never have been - timeout? TimeSpan timeElapsed = DateTime.Now - startingTime; if (timeElapsed.TotalSeconds > TIMEOUT_SECONDS) { shouldTerminateGraphLoop = true; whyDidITerminate = TerminationReason.NoClientsTimeout; } } } } } catch (Exception ex) { SendDebugMessageWithException("Error running graph: ", ex); whyDidITerminate = TerminationReason.Error; SignalGraphStartedEvent(false); } finally { try { // Raise 'done' event first, before any possible AccessExceptions switch (whyDidITerminate) { case TerminationReason.AllClientsDisconnected: if (Finished != null) { Finished(this, new ConversionEndedEventArgs(false, "All clients disconnected.")); } break; case TerminationReason.NoClientsTimeout: if (Finished != null) { Finished(this, new ConversionEndedEventArgs(false, "No clients ever connected.")); } break; case TerminationReason.UserCancelled: if (Finished != null) { Finished(this, new ConversionEndedEventArgs(false, "User cancelled.")); } break; case TerminationReason.Error: if (Finished != null) { Finished(this, new ConversionEndedEventArgs(true, "Unspecified error.")); } break; default: if (Finished != null) { Finished(this, new ConversionEndedEventArgs(false, "Finished but no additional info.")); } break; } // Stop graph FilterState graphState; IMediaControl mediaControl2 = (IMediaControl)graphBuilder; mediaControl2.GetState(50, out graphState); if (graphState == FilterState.Running) { mediaControl2.Pause(); mediaControl2.Stop(); // Throwing AccessViolationException: attempted to read or write protected memory (probably a badly written filter somewhere) } } catch (AccessViolationException) { SendDebugMessage("Ignoring expected AViolationException", 0); } catch (Exception ex) { SendDebugMessageWithException("Ignoring exception when closing graph: ", ex); } // Close sink - can take a loooong time CloseNetworkSink(); /*//so do in a separate thread... * Thread t = new Thread(CloseNetworkSink); * t.Start();*/ } }
/// <summary>Runs the graph</summary> /// <param name="graphBuilder">The graph to be run.</param> /// <param name="seekableFilter">The filter to use for computing percent complete. Must implement IMediaSeeking.</param> protected void RunGraph(IGraphBuilder graphBuilder, IBaseFilter seekableFilter) { // Get the necessary control and event interfaces IMediaControl mediaControl = (IMediaControl)graphBuilder; IMediaEvent mediaEvent = (IMediaEvent)graphBuilder; // Get the media seeking interface to use for computing status and progress updates IMediaSeeking mediaSeeking = seekableFilter as IMediaSeeking; if (!CanGetPositionAndDuration(mediaSeeking)) { mediaSeeking = graphBuilder as IMediaSeeking; if (!CanGetPositionAndDuration(mediaSeeking)) { mediaSeeking = null; } } // Publish the graph to the running object table and to a temporary file for examination/debugging purposes using (new GraphPublisher(graphBuilder, Path.GetTempPath() + Guid.NewGuid().ToString("N") + ".grf")) { // Run the graph mediaControl.Run(); try { OnProgressChanged(0); // initial progress update stating 0% done bool done = false; while (!CancellationPending && !done) // continue until we're done/cancelled { // Poll to see how we're doing EventCode statusCode = EventCode.None; int hr = mediaEvent.WaitForCompletion(PollFrequency, out statusCode); switch (statusCode) { case EventCode.Complete: done = true; break; case EventCode.None: // Get an update on where we are with the conversion if (mediaSeeking != null) { ulong curPos = mediaSeeking.GetCurrentPosition(); ulong length = mediaSeeking.GetDuration(); double progress = curPos * 100.0 / (double)length; if (progress > 0) { OnProgressChanged(progress); } } break; default: // Error, so throw exception throw new DirectShowException(hr, null); } } OnProgressChanged(100); // final progress update stating 100% done } finally { // We're done converting, so stop the graph mediaControl.Stop(); } } }