private void RemoveRefClockFromGraph() { IMediaFilter mf = (IMediaFilter)currentFilterGraph; int hr = mf.SetSyncSource(null); DsError.ThrowExceptionForHR(hr); }
public CAviDS(string filename, double playSpeed) { builder = new FilterGraph() as IGraphBuilder; grabber = new SampleGrabber() as ISampleGrabber; mediaType = new AMMediaType(); mediaType.majorType = MediaType.Video; mediaType.subType = MediaSubType.RGB32; mediaType.formatType = FormatType.VideoInfo; DsError.ThrowExceptionForHR(grabber.SetMediaType(mediaType)); DsError.ThrowExceptionForHR(builder.AddFilter(grabber as IBaseFilter, "Sample Grabber(DTXMania)")); DsError.ThrowExceptionForHR(builder.RenderFile(filename, null)); CDirectShow.ConnectNullRendererFromSampleGrabber(builder, grabber as IBaseFilter); if (builder is IVideoWindow videoWindow) { videoWindow.put_AutoShow(OABool.False); } DsError.ThrowExceptionForHR(grabber.GetConnectedMediaType(mediaType)); videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); nWidth = videoInfo.BmiHeader.Width; nHeight = videoInfo.BmiHeader.Height; seeker = builder as IMediaSeeking; DsError.ThrowExceptionForHR(seeker.GetDuration(out nMediaLength)); DsError.ThrowExceptionForHR(seeker.SetRate(playSpeed / 20.0)); control = builder as IMediaControl; filter = builder as IMediaFilter; grabber.SetBufferSamples(BufferThem: true); Run(); Pause(); bPlaying = false; bPause = false; }
/////////////////////////////////////////////////////////////////////////////// // Small helping Methods // /////////////////////////////////////////////////////////////////////////////// #region HELPER private void RetreiveGraphReferenceClock() { this.graphBuilder.SetDefaultSyncSource(); IMediaFilter filter = this.graphBuilder as IMediaFilter; int hr = filter.GetSyncSource(out referenceClock); DsError.ThrowExceptionForHR(hr); }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(string destFilename, string encoderName) { int hr; // Get the graphbuilder object captureGraphBuilder = new DirectShowLib.CaptureGraphBuilder2() as DirectShowLib.ICaptureGraphBuilder2; IFilterGraph2 filterGraph = new DirectShowLib.FilterGraph() as DirectShowLib.IFilterGraph2; mediaCtrl = filterGraph as DirectShowLib.IMediaControl; IMediaFilter mediaFilt = filterGraph as IMediaFilter; mediaEvent = filterGraph as IMediaEvent; captureGraphBuilder.SetFiltergraph(filterGraph); IBaseFilter aviMux; IFileSinkFilter fileSink = null; hr = captureGraphBuilder.SetOutputFileName(MediaSubType.Avi, destFilename, out aviMux, out fileSink); DsError.ThrowExceptionForHR(hr); DirectShowLib.IBaseFilter compressor = DirectShowUtils.GetVideoCompressor(encoderName); if (compressor == null) { throw new InvalidCodecException(encoderName); } hr = filterGraph.AddFilter(compressor, "compressor"); DsError.ThrowExceptionForHR(hr); // Our data source IBaseFilter source = (IBaseFilter) new GenericSampleSourceFilter(); // Get the pin from the filter so we can configure it IPin ipin = DsFindPin.ByDirection(source, PinDirection.Output, 0); try { // Configure the pin using the provided BitmapInfo ConfigurePusher((IGenericSampleConfig)ipin); } finally { Marshal.ReleaseComObject(ipin); } // Add the filter to the graph hr = filterGraph.AddFilter(source, "GenericSampleSourceFilter"); Marshal.ThrowExceptionForHR(hr); hr = filterGraph.AddFilter(source, "source"); DsError.ThrowExceptionForHR(hr); hr = captureGraphBuilder.RenderStream(null, null, source, compressor, aviMux); DsError.ThrowExceptionForHR(hr); IMediaPosition mediaPos = filterGraph as IMediaPosition; hr = mediaCtrl.Run(); DsError.ThrowExceptionForHR(hr); }
/// <summary> /// Toggle SetSyncClock /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void _clockButton_Click(object sender, EventArgs e) { if ((bool)_clockButton.Tag) { // unset as reference clock _clockButton.MouseOutsideTint = Color.DarkGray; _clockButton.ToolTipText = "Set as Reference Clock"; _clockButton.Tag = false; } else { DSFilterNodeUI refnode = (Parent.Parent as DSGraphEditPanel).GetReferenceClock(); if (refnode != null) { // unset the other reference clock refnode._clockButton.MouseOutsideTint = Color.DarkGray; refnode._clockButton.ToolTipText = "Set as Reference Clock"; refnode._clockButton.Tag = false; } // set as reference clock _clockButton.MouseOutsideTint = Color.Yellow; _clockButton.ToolTipText = "Unset Reference Clock"; _clockButton.Tag = true; } // if the graph is set up to use a reference clock, set the new one if ((Parent.Parent as DSGraphEditPanel).UseReferenceClock) { (Parent.Parent as DSGraphEditPanel).Stop(); DSFilterNodeUI refnode = (Parent.Parent as DSGraphEditPanel).GetReferenceClock(); IMediaFilter mf = (Parent as DSDaggerUIGraph)._Graph as IMediaFilter; try { if (refnode != null) { mf.SetSyncSource(refnode._referenceClock); } else { (Parent as DSDaggerUIGraph)._Graph.SetDefaultSyncSource(); } } catch (Exception ex) { MessageBox.Show(ex.Message, "Error setting reference clock"); } } }
/* * // This version of FindCaptureDevice is provide for education only. * // A second version using the DsDevice helper class is define later. * public IBaseFilter FindCaptureDevice() * { * int hr = 0; #if USING_NET11 * UCOMIEnumMoniker classEnum = null; * UCOMIMoniker[] moniker = new UCOMIMoniker[1]; #else * IEnumMoniker classEnum = null; * IMoniker[] moniker = new IMoniker[1]; #endif * object source = null; * * // Create the system device enumerator * ICreateDevEnum devEnum = (ICreateDevEnum)new CreateDevEnum(); * * // Create an enumerator for the video capture devices * hr = devEnum.CreateClassEnumerator( FilterCategory.VideoInputDevice, out classEnum, 0 ); * DsError.ThrowExceptionForHR( hr ); * * // The device enumerator is no more needed * Marshal.ReleaseComObject( devEnum ); * * // If there are no enumerators for the requested type, then * // CreateClassEnumerator will succeed, but classEnum will be NULL. * if( classEnum == null ) * { * throw new ApplicationException( "No video capture device was detected.\r\n\r\n" + * "This sample requires a video capture device, such as a USB WebCam,\r\n" + * "to be installed and working properly. The sample will now close." ); * } * * // Use the first video capture device on the device list. * // Note that if the Next() call succeeds but there are no monikers, * // it will return 1 (S_FALSE) (which is not a failure). Therefore, we * // check that the return code is 0 (S_OK). #if USING_NET11 * int i; * if (classEnum.Next (moniker.Length, moniker, IntPtr.Zero) == 0) #else * while( classEnum.Next( moniker.Length, moniker, IntPtr.Zero ) == 0 ) #endif * { * // Bind Moniker to a filter object * Guid iid = typeof( IBaseFilter ).GUID; * moniker[0].BindToObject( null, null, ref iid, out source ); * } * //else * //{ * // throw new ApplicationException( "Unable to access video capture device!" ); * //} * * // Release COM objects * Marshal.ReleaseComObject( moniker[0] ); * Marshal.ReleaseComObject( classEnum ); * * // An exception is thrown if cast fail * return (IBaseFilter)source; * } */ // Uncomment this version of FindCaptureDevice to use the DsDevice helper class // (and comment the first version of course) public void GetInterfaces() { int hr = 0; // An exception is thrown if cast fail this.graphBuilder = (IGraphBuilder) new FilterGraph(); this.captureGraphBuilder = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); this.mediaControl = (IMediaControl)this.graphBuilder; this.videoWindow = (IVideoWindow)this.graphBuilder; this.mediaEventEx = (IMediaEventEx)this.graphBuilder; this.graph_streams = (IAMGraphStreams)this.graphBuilder; this.graph_filter = (IMediaFilter)this.graphBuilder; hr = this.mediaEventEx.SetNotifyWindow(this.Handle, WM_GRAPHNOTIFY, IntPtr.Zero); DsError.ThrowExceptionForHR(hr); }
public void DoTests() { try { // I decide to use the VMR9 filter because i love it ! // This test could work with other filters with the notable exception of the // VideoRendererDefault which is in fact a VMR7 on Windows XP... // All DirectShow filters must implement IBaseFilter... // which inherit from IMediaFilter... this.filter = (IMediaFilter) new VideoMixingRenderer9(); TestRun(); TestPause(); TestStop(); TestGetState(); TestSyncSource(); } finally { Marshal.ReleaseComObject(this.filter); } }
private void SetupGraph(string FileName) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter baseGrabFlt = null; IBaseFilter capFilter = null; IBaseFilter nullrenderer = null; // Get the graphbuilder object m_FilterGraph = new FilterGraph() as IFilterGraph2; m_mediaCtrl = m_FilterGraph as IMediaControl; m_MediaEvent = m_FilterGraph as IMediaEvent; IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter; try { // Add the video source hr = m_FilterGraph.AddSourceFilter(FileName, "Ds.NET FileFilter", out capFilter); DsError.ThrowExceptionForHR(hr); // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the file filter to the sample grabber // Hopefully this will be the audio pin, we could check by reading it's mediatype IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); // Get the input pin from the sample grabber IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Add the null renderer to the graph nullrenderer = new NullRenderer() as IBaseFilter; hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the sample grabber to the null renderer iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Turn off the clock. This causes the frames to be sent // thru the graph as fast as possible hr = mediaFilt.SetSyncSource(null); DsError.ThrowExceptionForHR(hr); // Read and cache the image sizes SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (nullrenderer != null) { Marshal.ReleaseComObject(nullrenderer); nullrenderer = null; } } }
/// <summary> build the capture graph for grabber. </summary> private void SetupGraph(DsDevice dev, int iSampleRate, int iChannels) { int hr; ISampleGrabber sampGrabber = null; IBaseFilter capFilter = null; ICaptureGraphBuilder2 capGraph = null; IBaseFilter baseGrabFlt = null; IBaseFilter nullrenderer = null; IMediaFilter mediaFilt = m_FilterGraph as IMediaFilter; // Get the graphbuilder object m_FilterGraph = (IFilterGraph2) new FilterGraph(); m_mediaCtrl = m_FilterGraph as IMediaControl; try { // Get the ICaptureGraphBuilder2 capGraph = (ICaptureGraphBuilder2) new CaptureGraphBuilder2(); // Get the SampleGrabber interface sampGrabber = (ISampleGrabber) new SampleGrabber(); // Start building the graph hr = capGraph.SetFiltergraph(m_FilterGraph); DsError.ThrowExceptionForHR(hr); // Add the audio device hr = m_FilterGraph.AddSourceFilterForMoniker(dev.Mon, null, "Audio input", out capFilter); DsError.ThrowExceptionForHR(hr); // If any of the default config items are set if (iSampleRate + iChannels > 0) { SetConfigParms(capGraph, capFilter, iSampleRate, iChannels); } // Get the SampleGrabber interface sampGrabber = new SampleGrabber() as ISampleGrabber; baseGrabFlt = sampGrabber as IBaseFilter; ConfigureSampleGrabber(sampGrabber); // Add the frame grabber to the graph hr = m_FilterGraph.AddFilter(baseGrabFlt, "Ds.NET Grabber"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the file filter to the sample grabber // Hopefully this will be the audio pin, we could check by reading it's mediatype IPin iPinOut = DsFindPin.ByDirection(capFilter, PinDirection.Output, 0); // Get the input pin from the sample grabber IPin iPinIn = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Add the null renderer to the graph nullrenderer = new NullRenderer() as IBaseFilter; hr = m_FilterGraph.AddFilter(nullrenderer, "Null renderer"); DsError.ThrowExceptionForHR(hr); // --------------------------------- // Connect the sample grabber to the null renderer iPinOut = DsFindPin.ByDirection(baseGrabFlt, PinDirection.Output, 0); iPinIn = DsFindPin.ByDirection(nullrenderer, PinDirection.Input, 0); hr = m_FilterGraph.Connect(iPinOut, iPinIn); DsError.ThrowExceptionForHR(hr); // Read and cache the resulting settings SaveSizeInfo(sampGrabber); } finally { if (capFilter != null) { Marshal.ReleaseComObject(capFilter); capFilter = null; } if (sampGrabber != null) { Marshal.ReleaseComObject(sampGrabber); sampGrabber = null; } if (capGraph != null) { Marshal.ReleaseComObject(capGraph); capGraph = null; } } }
/// <summary> create the used COM components and get the interfaces. </summary> protected override bool GetInterfaces(string filename) { Speed = 1; Log.Info("StreamBufferPlayer9: GetInterfaces()"); //switch back to directx fullscreen mode // Log.Info("StreamBufferPlayer9: switch to fullscreen mode"); Log.Info("StreamBufferPlayer9: Enabling DX9 exclusive mode"); GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null); GUIWindowManager.SendMessage(msg); //Log.Info("StreamBufferPlayer9: build graph"); try { _graphBuilder = (IGraphBuilder) new FilterGraph(); //Log.Info("StreamBufferPlayer9: add _vmr9"); _vmr9 = new VMR9Util(); _vmr9.AddVMR9(_graphBuilder); _vmr9.Enable(false); int hr; m_StreamBufferConfig = new StreamBufferConfig(); streamConfig2 = m_StreamBufferConfig as IStreamBufferConfigure2; if (streamConfig2 != null) { // setting the StreamBufferEngine registry key IntPtr HKEY = (IntPtr) unchecked ((int)0x80000002L); IStreamBufferInitialize pTemp = (IStreamBufferInitialize)streamConfig2; IntPtr subKey = IntPtr.Zero; RegOpenKeyEx(HKEY, "SOFTWARE\\MediaPortal", 0, 0x3f, out subKey); hr = pTemp.SetHKEY(subKey); hr = streamConfig2.SetFFTransitionRates(8, 32); //Log.Info("set FFTransitionRates:{0:X}",hr); int max, maxnon; hr = streamConfig2.GetFFTransitionRates(out max, out maxnon); streamConfig2.GetBackingFileCount(out _minBackingFiles, out _maxBackingFiles); streamConfig2.GetBackingFileDuration(out _backingFileDuration); } //Log.Info("StreamBufferPlayer9: add sbe"); // create SBE source _bufferSource = (IStreamBufferSource) new StreamBufferSource(); if (_bufferSource == null) { Log.Error("StreamBufferPlayer9:Failed to create instance of SBE (do you have WinXp SP1?)"); return(false); } IBaseFilter filter = (IBaseFilter)_bufferSource; hr = _graphBuilder.AddFilter(filter, "SBE SOURCE"); if (hr != 0) { Log.Error("StreamBufferPlayer9:Failed to add SBE to graph"); return(false); } IFileSourceFilter fileSource = (IFileSourceFilter)_bufferSource; if (fileSource == null) { Log.Error("StreamBufferPlayer9:Failed to get IFileSourceFilter"); return(false); } //Log.Info("StreamBufferPlayer9: open file:{0}",filename); hr = fileSource.Load(filename, null); if (hr != 0) { Log.Error("StreamBufferPlayer9:Failed to open file:{0} :0x{1:x}", filename, hr); return(false); } //Log.Info("StreamBufferPlayer9: add codecs"); // add preferred video & audio codecs string strVideoCodec = ""; string strAudioCodec = ""; string strAudioRenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters using (Settings xmlreader = new MPSettings()) { // FlipGer: load infos for custom filters int intCount = 0; while (xmlreader.GetValueAsString("mytv", "filter" + intCount.ToString(), "undefined") != "undefined") { if (xmlreader.GetValueAsBool("mytv", "usefilter" + intCount.ToString(), false)) { strFilters += xmlreader.GetValueAsString("mytv", "filter" + intCount.ToString(), "undefined") + ";"; intFilters++; } intCount++; } strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAudioRenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device"); string strValue = xmlreader.GetValueAsString("mytv", "defaultar", "Normal"); GUIGraphicsContext.ARType = Util.Utils.GetAspectRatio(strValue); } if (strVideoCodec.Length > 0) { _videoCodecFilter = DirectShowUtil.AddFilterToGraph(_graphBuilder, strVideoCodec); } if (strAudioCodec.Length > 0) { _audioCodecFilter = DirectShowUtil.AddFilterToGraph(_graphBuilder, strAudioCodec); } if (strAudioRenderer.Length > 0) { _audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(_graphBuilder, strAudioRenderer, true); } // FlipGer: add custom filters to graph customFilters = new IBaseFilter[intFilters]; string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { customFilters[i] = DirectShowUtil.AddFilterToGraph(_graphBuilder, arrFilters[i]); } // render output pins of SBE DirectShowUtil.RenderOutputPins(_graphBuilder, (IBaseFilter)fileSource); _mediaCtrl = (IMediaControl)_graphBuilder; _mediaEvt = (IMediaEventEx)_graphBuilder; _mediaSeeking = _bufferSource as IStreamBufferMediaSeeking; _mediaSeeking2 = _bufferSource as IStreamBufferMediaSeeking2; if (_mediaSeeking == null) { Log.Error("Unable to get IMediaSeeking interface#1"); } if (_mediaSeeking2 == null) { Log.Error("Unable to get IMediaSeeking interface#2"); } if (_audioRendererFilter != null) { IMediaFilter mp = _graphBuilder as IMediaFilter; IReferenceClock clock = _audioRendererFilter as IReferenceClock; hr = mp.SetSyncSource(clock); } // Set the IBasicAudioInterface _basicAudio = (IBasicAudio)_graphBuilder; // Log.Info("StreamBufferPlayer9:SetARMode"); // DirectShowUtil.SetARMode(_graphBuilder,AspectRatioMode.Stretched); //Log.Info("StreamBufferPlayer9: set Deinterlace"); if (!_vmr9.IsVMR9Connected) { //_vmr9 is not supported, switch to overlay Log.Info("StreamBufferPlayer9: switch to overlay"); _mediaCtrl = null; Cleanup(); return(base.GetInterfaces(filename)); } _pinVmr9ConnectedTo = _vmr9.PinConnectedTo; _vmr9.SetDeinterlaceMode(); return(true); } catch (Exception ex) { Log.Error("StreamBufferPlayer9:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace); return(false); } }
private void CreateFilters() { isValid = true; // grabber grabberVideo = new GrabberVideo(this); grabberAudio = new GrabberAudio(this); // objects graphObject = null; grabberObjectVideo = null; grabberObjectAudio = null; int sourceBaseVideoPinIndex = 0; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object if (fileName.ToLower().EndsWith(".wmv")) { type = Type.GetTypeFromCLSID(Clsid.WMASFReader); if (type == null) { throw new ApplicationException("Failed creating ASF Reader filter"); } sourceBase = (IBaseFilter)Activator.CreateInstance(type); IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase; sourceFile.Load(fileName, null); graph.AddFilter(sourceBase, "source"); sourceBaseVideoPinIndex = 1; } else { graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { try { type = Type.GetTypeFromCLSID(Clsid.AsyncReader); if (type == null) { throw new ApplicationException("Failed creating Async Reader filter"); } sourceBase = (IBaseFilter)Activator.CreateInstance(type); IFileSourceFilter sourceFile = (IFileSourceFilter)sourceBase; sourceFile.Load(fileName, null); graph.AddFilter(sourceBase, "source"); } catch { throw new ApplicationException("Failed creating source filter"); } } sourceBaseVideoPinIndex = 0; } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObjectVideo = Activator.CreateInstance(type); sampleGrabberVideo = (ISampleGrabber)grabberObjectVideo; grabberBaseVideo = (IBaseFilter)grabberObjectVideo; // add grabber filters to graph graph.AddFilter(grabberBaseVideo, "grabberVideo"); // set media type AMMediaType mediaType = new AMMediaType { MajorType = MediaType.Video, SubType = MediaSubType.ARGB32 /* MediaSubType.RGB24 */ }; ; sampleGrabberVideo.SetMediaType(mediaType); // connect pins IPin outPin = Tools.GetOutPin(sourceBase, sourceBaseVideoPinIndex); IPin inPin = Tools.GetInPin(grabberBaseVideo, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // get media type if (sampleGrabberVideo.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabberVideo.Width = vih.BmiHeader.Width; grabberVideo.Height = vih.BmiHeader.Height; mediaType.Dispose(); } if (useAudioGrabber) { // ***************************************************************** // ******** Add the audio grabber to monitor audio peaks *********** bool audioGrabberIsConnected = false; Tools.FilterInfo2 filterInfo2 = Tools.GetNextFilter(sourceBase, PinDirection.Output, 0); foreach (Tools.PinInfo2 pinInfo2 in filterInfo2.Pins) { if (pinInfo2.PinInfo.Direction == PinDirection.Output) { if (!Tools.IsPinConnected(pinInfo2.Pin)) { try { graph.Render(pinInfo2.Pin); AMMediaType mt = new AMMediaType(); pinInfo2.Pin.ConnectionMediaType(mt); if (mt.MajorType == MediaType.Audio) { // Obtain a reference to the filter connected to the audio output of the video splitter (usually, this is the audio decoder) Tools.FilterInfo2 decoderFilterInfo2 = Tools.GetNextFilter(pinInfo2.PinInfo.Filter, PinDirection.Output, 0); // Remove all the filters connected to the audio decoder filter System.Collections.Generic.List <Tools.FilterInfo2> filtersInfo2 = new System.Collections.Generic.List <Tools.FilterInfo2>(); Tools.FilterInfo2 testFilterInfo2 = Tools.GetNextFilter(decoderFilterInfo2.Filter, PinDirection.Output, 0); while (true) { filtersInfo2.Add(testFilterInfo2); testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0); if (testFilterInfo2.Filter == null) { break; } } foreach (Tools.FilterInfo2 fi2 in filtersInfo2) { graph.RemoveFilter(fi2.Filter); fi2.Release(); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating audio sample grabber"); } // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaTypeAudio = new AMMediaType { MajorType = MediaType.Audio, SubType = MediaSubType.PCM, FormatType = FormatType.WaveEx }; sampleGrabberAudio.SetMediaType(mediaTypeAudio); outPin = Tools.GetOutPin(decoderFilterInfo2.Filter, 0); inPin = Tools.GetInPin(grabberBaseAudio, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed connecting filter to grabberBaseAudio"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // Finally, connect the grabber to the audio renderer outPin = Tools.GetOutPin(grabberBaseAudio, 0); graph.Render(outPin); mt = new AMMediaType(); outPin.ConnectionMediaType(mt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media audio information"); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(mt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber sampleGrabberAudio.SetBufferSamples(false); sampleGrabberAudio.SetOneShot(false); sampleGrabberAudio.SetCallback(grabberAudio, 1); audioGrabberIsConnected = true; break; } } catch { } } } } filterInfo2.Release(); if (!audioGrabberIsConnected) { foreach (Tools.PinInfo2 pinInfo2 in Tools.GetPins(sourceBase)) { if (!Tools.IsPinConnected(pinInfo2.Pin)) { foreach (AMMediaType mt in Tools.GetMediaTypes(pinInfo2.Pin)) { if (mt.MajorType == MediaType.Audio) { // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaTypeAudio = new AMMediaType { MajorType = MediaType.Audio, SubType = MediaSubType.PCM, FormatType = FormatType.WaveEx }; sampleGrabberAudio.SetMediaType(mediaTypeAudio); inPin = Tools.GetInPin(grabberBaseAudio, 0); if (graph.Connect(pinInfo2.Pin, inPin) < 0) { throw new ApplicationException("Failed connecting sourceBase to grabberBaseVideo"); } Marshal.ReleaseComObject(inPin); // Finally, connect the grabber to the audio renderer outPin = Tools.GetOutPin(grabberBaseAudio, 0); graph.Render(outPin); AMMediaType amt = new AMMediaType(); outPin.ConnectionMediaType(amt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media audio information"); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(amt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber sampleGrabberAudio.SetBufferSamples(false); sampleGrabberAudio.SetOneShot(false); sampleGrabberAudio.SetCallback(grabberAudio, 1); audioGrabberIsConnected = true; break; } } } } } // ***************************************************************** } // let's do the rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBaseVideo, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabberVideo.SetBufferSamples(false); sampleGrabberVideo.SetOneShot(false); sampleGrabberVideo.SetCallback(grabberVideo, 1); // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; mediaFilter.SetSyncSource(null); } // get media control mediaControl = (IMediaControl)graphObject; // get media seek control mediaSeekControl = (IMediaSeeking)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; // get media audio control basicAudio = (IBasicAudio)graphObject; } catch (Exception exception) { DestroyFilters(); // provide information to clients VideoSourceError?.Invoke(this, new VideoSourceErrorEventArgs(exception.Message)); } }
void Init(string inputFile, IBaseFilter userSourceFilter) { Reset(); if (!string.IsNullOrEmpty(inputFile) && (userSourceFilter != null)) { throw new ArgumentException("Specify only one kind of input"); } graph = new FilterGraph() as IFilterGraph2; if (null == graph) { throw new COMException("Cannot create FilterGraph"); } mediaControl = graph as IMediaControl; if (null == mediaControl) { throw new COMException("Cannot obtain IMediaControl"); } mediaEvent = graph as IMediaEventEx; if (null == mediaEvent) { throw new COMException("Cannot obtain IMediaEventEx"); } // remove reference clock IMediaFilter mf = graph as IMediaFilter; mf.SetSyncSource(null); int hr = 0; string sourceFilterInfoDumpPath = Path.Combine(Path.GetDirectoryName(System.Reflection.Assembly.GetExecutingAssembly().Location), "source_filter_info_dump.txt"); if (!string.IsNullOrEmpty(inputFile)) { IBaseFilter sourceFilter = null; try { hr = graph.AddSourceFilter(inputFile, "Source", out sourceFilter); DsError.ThrowExceptionForHR(hr); System.IO.File.WriteAllText(sourceFilterInfoDumpPath, Util.DumpFilterInfo(sourceFilter)); InitVideoGrabber(sourceFilter); InitAudioGrabber(sourceFilter); } finally { Util.ReleaseComObject(ref sourceFilter); } } else { hr = graph.AddFilter(userSourceFilter, "Source"); DsError.ThrowExceptionForHR(hr); System.IO.File.WriteAllText(sourceFilterInfoDumpPath, Util.DumpFilterInfo(userSourceFilter)); InitVideoGrabber(userSourceFilter); InitAudioGrabber(userSourceFilter); } }
/// <summary> create the used COM components and get the interfaces. </summary> protected bool GetInterfaces() { VMR9Util.g_vmr9 = null; if (IsRadio == false) { Vmr9 = VMR9Util.g_vmr9 = new VMR9Util(); // switch back to directx fullscreen mode Log.Info("RTSPPlayer: Enabling DX9 exclusive mode"); GUIMessage msg = new GUIMessage(GUIMessage.MessageType.GUI_MSG_SWITCH_FULL_WINDOWED, 0, 0, 0, 1, 0, null); GUIWindowManager.SendMessage(msg); } //Type comtype = null; //object comobj = null; DsRect rect = new DsRect(); rect.top = 0; rect.bottom = GUIGraphicsContext.form.Height; rect.left = 0; rect.right = GUIGraphicsContext.form.Width; try { graphBuilder = (IGraphBuilder) new FilterGraph(); Log.Info("RTSPPlayer: add source filter"); if (IsRadio == false) { bool AddVMR9 = VMR9Util.g_vmr9 != null && VMR9Util.g_vmr9.AddVMR9(graphBuilder); if (!AddVMR9) { Log.Error("RTSPPlayer:Failed to add VMR9 to graph"); return(false); } VMR9Util.g_vmr9.Enable(false); } _mpegDemux = (IBaseFilter) new MPEG2Demultiplexer(); graphBuilder.AddFilter(_mpegDemux, "MPEG-2 Demultiplexer"); _rtspSource = (IBaseFilter) new RtpSourceFilter(); int hr = graphBuilder.AddFilter((IBaseFilter)_rtspSource, "RTSP Source Filter"); if (hr != 0) { Log.Error("RTSPPlayer:unable to add RTSP source filter:{0:X}", hr); return(false); } // add preferred video & audio codecs Log.Info("RTSPPlayer: add video/audio codecs"); string strVideoCodec = ""; string strAudioCodec = ""; string strAudiorenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters string postProcessingFilterSection = "mytv"; using (Settings xmlreader = new MPSettings()) { if (_mediaType == g_Player.MediaType.Video) { strVideoCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("movieplayer", "mpeg2audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("movieplayer", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "movieplayer"; } else { strVideoCodec = xmlreader.GetValueAsString("mytv", "videocodec", ""); strAudioCodec = xmlreader.GetValueAsString("mytv", "audiocodec", ""); strAudiorenderer = xmlreader.GetValueAsString("mytv", "audiorenderer", "Default DirectSound Device"); postProcessingFilterSection = "mytv"; } enableDvbSubtitles = xmlreader.GetValueAsBool("tvservice", "dvbsubtitles", false); // FlipGer: load infos for custom filters int intCount = 0; while (xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") != "undefined") { if (xmlreader.GetValueAsBool(postProcessingFilterSection, "usefilter" + intCount.ToString(), false)) { strFilters += xmlreader.GetValueAsString(postProcessingFilterSection, "filter" + intCount.ToString(), "undefined") + ";"; intFilters++; } intCount++; } } string extension = Path.GetExtension(m_strCurrentFile).ToLowerInvariant(); if (IsRadio == false) { if (strVideoCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strVideoCodec); } } if (strAudioCodec.Length > 0) { DirectShowUtil.AddFilterToGraph(graphBuilder, strAudioCodec); } if (enableDvbSubtitles == true) { try { _subtitleFilter = SubtitleRenderer.GetInstance().AddSubtitleFilter(graphBuilder); SubtitleRenderer.GetInstance().SetPlayer(this); dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } Log.Debug("Is subtitle fitler null? {0}", (_subtitleFilter == null)); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(graphBuilder, arrFilters[i]); } if (strAudiorenderer.Length > 0) { audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(graphBuilder, strAudiorenderer, false); } Log.Info("RTSPPlayer: load:{0}", m_strCurrentFile); IFileSourceFilter interfaceFile = (IFileSourceFilter)_rtspSource; if (interfaceFile == null) { Log.Error("RTSPPlayer:Failed to get IFileSourceFilter"); return(false); } //Log.Info("RTSPPlayer: open file:{0}",filename); hr = interfaceFile.Load(m_strCurrentFile, null); if (hr != 0) { Log.Error("RTSPPlayer:Failed to open file:{0} :0x{1:x}", m_strCurrentFile, hr); return(false); } #region connect rtspsource->demux Log.Info("RTSPPlayer:connect rtspsource->mpeg2 demux"); IPin pinTsOut = DsFindPin.ByDirection((IBaseFilter)_rtspSource, PinDirection.Output, 0); if (pinTsOut == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return(false); } IPin pinDemuxIn = DsFindPin.ByDirection(_mpegDemux, PinDirection.Input, 0); if (pinDemuxIn == null) { Log.Info("RTSPPlayer:failed to find output pin of tsfilesource"); return(false); } hr = graphBuilder.Connect(pinTsOut, pinDemuxIn); if (hr != 0) { Log.Info("RTSPPlayer:failed to connect rtspsource->mpeg2 demux:{0:X}", hr); return(false); } DirectShowUtil.ReleaseComObject(pinTsOut); DirectShowUtil.ReleaseComObject(pinDemuxIn); #endregion #region render demux output pins if (IsRadio) { Log.Info("RTSPPlayer:render audio demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { graphBuilder.Render(pins[0]); break; } } } } else { Log.Info("RTSPPlayer:render audio/video demux outputs"); IEnumPins enumPins; _mpegDemux.EnumPins(out enumPins); IPin[] pins = new IPin[2]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Input) { continue; } graphBuilder.Render(pins[0]); } } #endregion // Connect DVB subtitle filter pins in the graph if (_mpegDemux != null && enableDvbSubtitles == true) { IMpeg2Demultiplexer demuxer = _mpegDemux as IMpeg2Demultiplexer; hr = demuxer.CreateOutputPin(GetTSMedia(), "Pcr", out _pinPcr); if (hr == 0) { Log.Info("RTSPPlayer:_pinPcr OK"); IPin pDemuxerPcr = DsFindPin.ByName(_mpegDemux, "Pcr"); IPin pSubtitlePcr = DsFindPin.ByName(_subtitleFilter, "Pcr"); hr = graphBuilder.Connect(pDemuxerPcr, pSubtitlePcr); } else { Log.Info("RTSPPlayer:Failed to create _pinPcr in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "Subtitle", out _pinSubtitle); if (hr == 0) { Log.Info("RTSPPlayer:_pinSubtitle OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "Subtitle"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "In"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinSubtitle in demuxer:{0:X}", hr); } hr = demuxer.CreateOutputPin(GetTSMedia(), "PMT", out _pinPMT); if (hr == 0) { Log.Info("RTSPPlayer:_pinPMT OK"); IPin pDemuxerSubtitle = DsFindPin.ByName(_mpegDemux, "PMT"); IPin pSubtitle = DsFindPin.ByName(_subtitleFilter, "PMT"); hr = graphBuilder.Connect(pDemuxerSubtitle, pSubtitle); } else { Log.Info("RTSPPlayer:Failed to create _pinPMT in demuxer:{0:X}", hr); } } if (IsRadio == false) { if (!VMR9Util.g_vmr9.IsVMR9Connected) { //VMR9 is not supported, switch to overlay Log.Info("RTSPPlayer: vmr9 not connected"); _mediaCtrl = null; Cleanup(); return(false); } VMR9Util.g_vmr9.SetDeinterlaceMode(); } _mediaCtrl = (IMediaControl)graphBuilder; mediaEvt = (IMediaEventEx)graphBuilder; _mediaSeeking = (IMediaSeeking)graphBuilder; mediaPos = (IMediaPosition)graphBuilder; basicAudio = graphBuilder as IBasicAudio; //DirectShowUtil.SetARMode(graphBuilder,AspectRatioMode.Stretched); DirectShowUtil.EnableDeInterlace(graphBuilder); if (VMR9Util.g_vmr9 != null) { m_iVideoWidth = VMR9Util.g_vmr9.VideoWidth; m_iVideoHeight = VMR9Util.g_vmr9.VideoHeight; } if (audioRendererFilter != null) { Log.Info("RTSPPlayer9:set reference clock"); IMediaFilter mp = graphBuilder as IMediaFilter; IReferenceClock clock = audioRendererFilter as IReferenceClock; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); Log.Info("RTSPPlayer9:set reference clock:{0:X}", hr); } Log.Info("RTSPPlayer: graph build successfull"); return(true); } catch (Exception ex) { Error.SetError("Unable to play movie", "Unable build graph for VMR9"); Log.Error("RTSPPlayer:exception while creating DShow graph {0} {1}", ex.Message, ex.StackTrace); CloseInterfaces(); return(false); } }
/// <summary> /// <para>指定された動画ファイルから音声のみをエンコードし、WAVファイルイメージを作成して返す。</para> /// </summary> public static void t変換(string fileName, out byte[] wavFileImage) { int hr = 0; IGraphBuilder graphBuilder = null; try { graphBuilder = (IGraphBuilder) new FilterGraph(); #region [ オーディオ用サンプルグラバの作成と追加。] //----------------- ISampleGrabber sampleGrabber = null; try { sampleGrabber = (ISampleGrabber) new SampleGrabber(); // サンプルグラバのメディアタイプの設定。 var mediaType = new AMMediaType() { majorType = MediaType.Audio, subType = MediaSubType.PCM, formatType = FormatType.WaveEx, }; try { hr = sampleGrabber.SetMediaType(mediaType); DsError.ThrowExceptionForHR(hr); } finally { if (mediaType != null) { DsUtils.FreeAMMediaType(mediaType); } } // サンプルグラバのバッファリングを有効にする。 hr = sampleGrabber.SetBufferSamples(true); DsError.ThrowExceptionForHR(hr); // サンプルグラバにコールバックを追加する。 sampleGrabberProc = new CSampleGrabberCallBack(); hr = sampleGrabber.SetCallback(sampleGrabberProc, 1); // 1:コールバックの BufferCB() メソッドの方を呼び出す。 // サンプルグラバをグラフに追加する。 hr = graphBuilder.AddFilter((IBaseFilter)sampleGrabber, "SampleGrabber for Audio/PCM"); DsError.ThrowExceptionForHR(hr); } finally { C共通.tCOMオブジェクトを解放する(ref sampleGrabber); } //----------------- #endregion var e = new DirectShowLib.DsROTEntry(graphBuilder); // fileName からグラフを自動生成。 hr = graphBuilder.RenderFile(fileName, null); // IMediaControl.RenderFile() は推奨されない DsError.ThrowExceptionForHR(hr); // ビデオレンダラを除去。 // オーディオレンダラをNullに変えるより前に実行すること。 // (CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する() の中で一度再生するので、 // そのときにActiveウィンドウが表示されてしまうため。) // chnmr0 : ウィンドウを表示しないだけなら IVideoWindow で put_AutoShow した。 IVideoWindow vw = graphBuilder as IVideoWindow; vw.put_AutoShow(OABool.False); // オーディオレンダラを NullRenderer に置換。 WaveFormat wfx; byte[] wfx拡張領域; CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する(graphBuilder, out wfx, out wfx拡張領域); // 基準クロックを NULL(最高速)に設定する。 IMediaFilter mediaFilter = graphBuilder as IMediaFilter; mediaFilter.SetSyncSource(null); mediaFilter = null; // メモリストリームにデコードデータを出力する。 sampleGrabberProc.MemoryStream = new MemoryStream(); // CDirectShow.tオーディオレンダラをNullレンダラに変えてフォーマットを取得する() で一度再生しているので、ストリームをクリアする。 var ms = sampleGrabberProc.MemoryStream; var bw = new BinaryWriter(ms); bw.Write(new byte[] { 0x52, 0x49, 0x46, 0x46 }); // 'RIFF' bw.Write((UInt32)0); // ファイルサイズ - 8 [byte];今は不明なので後で上書きする。 bw.Write(new byte[] { 0x57, 0x41, 0x56, 0x45 }); // 'WAVE' bw.Write(new byte[] { 0x66, 0x6D, 0x74, 0x20 }); // 'fmt ' bw.Write((UInt32)(16 + ((wfx拡張領域.Length > 0) ? (2 /*sizeof(WAVEFORMATEX.cbSize)*/ + wfx拡張領域.Length) : 0))); // fmtチャンクのサイズ[byte] bw.Write((UInt16)wfx.Encoding); // フォーマットID(リニアPCMなら1) bw.Write((UInt16)wfx.Channels); // チャンネル数 bw.Write((UInt32)wfx.SampleRate); // サンプリングレート bw.Write((UInt32)wfx.AverageBytesPerSecond); // データ速度 bw.Write((UInt16)wfx.BlockAlign); // ブロックサイズ bw.Write((UInt16)wfx.BitsPerSample); // サンプルあたりのビット数 if (wfx拡張領域.Length > 0) { bw.Write((UInt16)wfx拡張領域.Length); // 拡張領域のサイズ[byte] bw.Write(wfx拡張領域); // 拡張データ } bw.Write(new byte[] { 0x64, 0x61, 0x74, 0x61 }); // 'data' int nDATAチャンクサイズ位置 = (int)ms.Position; bw.Write((UInt32)0); // dataチャンクのサイズ[byte];今は不明なので後で上書きする。 #region [ 再生を開始し、終了を待つ。- 再生中、sampleGrabberProc.MemoryStream に PCM データが蓄積されていく。] //----------------- IMediaControl mediaControl = graphBuilder as IMediaControl; mediaControl.Run(); // 再生開始 IMediaEvent mediaEvent = graphBuilder as IMediaEvent; EventCode eventCode; hr = mediaEvent.WaitForCompletion(-1, out eventCode); DsError.ThrowExceptionForHR(hr); if (eventCode != EventCode.Complete) { throw new Exception("再生待ちに失敗しました。"); } mediaControl.Stop(); mediaEvent = null; mediaControl = null; //----------------- #endregion bw.Seek(4, SeekOrigin.Begin); bw.Write((UInt32)ms.Length - 8); // ファイルサイズ - 8 [byte] bw.Seek(nDATAチャンクサイズ位置, SeekOrigin.Begin); bw.Write((UInt32)ms.Length - (nDATAチャンクサイズ位置 + 4)); // dataチャンクサイズ [byte] // 出力その2を作成。 wavFileImage = ms.ToArray(); // 終了処理。 bw.Close(); sampleGrabberProc.Dispose(); // ms.Close() } finally { C共通.tCOMオブジェクトを解放する(ref graphBuilder); } }
/// <summary> create the used COM components and get the interfaces. </summary> protected override bool GetInterfaces(string filename) { Log.Info("TSReaderPlayer: GetInterfaces()"); try { string strAudioRenderer = ""; int intFilters = 0; // FlipGer: count custom filters string strFilters = ""; // FlipGer: collect custom filters LoadMyTvFilterSettings(ref intFilters, ref strFilters, ref strVideoCodec, ref strAudioCodec, ref strAACAudioCodec, ref strDDPLUSAudioCodec, ref strH264VideoCodec, ref strAudioRenderer, ref enableDVBBitmapSubtitles, ref enableDVBTtxtSubtitles, ref relaxTsReader); _graphBuilder = (IGraphBuilder) new FilterGraph(); _rotEntry = new DsROTEntry((IFilterGraph)_graphBuilder); if (strAudioRenderer.Length > 0) //audio renderer must be in graph before audio switcher { _audioRendererFilter = DirectShowUtil.AddAudioRendererToGraph(_graphBuilder, strAudioRenderer, true); } #region add AudioSwitcher if (enableMPAudioSwitcher) //audio switcher must be in graph before tsreader audiochangecallback { _audioSwitcherFilter = DirectShowUtil.AddFilterToGraph(_graphBuilder, "MediaPortal AudioSwitcher"); if (_audioSwitcherFilter == null) { Log.Error("TSReaderPlayer: Failed to add AudioSwitcher to graph"); } } #endregion #region add TsReader TsReader reader = new TsReader(); _fileSource = (IBaseFilter)reader; _ireader = (ITSReader)reader; _interfaceTSReader = _fileSource; _ireader.SetRelaxedMode(relaxTsReader); // enable/disable continousity filtering _ireader.SetTsReaderCallback(this); _ireader.SetRequestAudioChangeCallback(this); Log.Info("TSReaderPlayer: Add TsReader to graph"); int hr = _graphBuilder.AddFilter((IBaseFilter)_fileSource, "TsReader"); DsError.ThrowExceptionForHR(hr); #endregion #region load file in TsReader IFileSourceFilter interfaceFile = (IFileSourceFilter)_fileSource; if (interfaceFile == null) { Log.Error("TSReaderPlayer: Failed to get IFileSourceFilter"); Cleanup(); return(false); } Log.Info("TSReaderPlayer: Open file: {0}", filename); hr = interfaceFile.Load(filename, null); if (hr != 0) { Log.Error("TSReaderPlayer: Failed to open file:{0} :0x{1:x}", filename, hr); Cleanup(); return(false); } #endregion #region add codecs Log.Info("TSReaderPlayer: Add codecs"); // add preferred video & audio codecs MatchFilters("Video"); MatchFilters("Audio"); // does .ts file contain video? // default is _isRadio=false which prevents recorded radio file playing if (!_videoFormat.IsValid) { _isRadio = true; } if (!_isRadio) { _vmr9 = new VMR9Util(); _vmr9.AddVMR9(_graphBuilder); _vmr9.Enable(false); DirectShowUtil.AddFilterToGraph(_graphBuilder, videoFilter); if (enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().AddSubtitleFilter(_graphBuilder); } catch (Exception e) { Log.Error(e); } } } DirectShowUtil.AddFilterToGraph(_graphBuilder, audioFilter); // FlipGer: add custom filters to graph string[] arrFilters = strFilters.Split(';'); for (int i = 0; i < intFilters; i++) { DirectShowUtil.AddFilterToGraph(_graphBuilder, arrFilters[i]); } #endregion #region PostProcessingEngine Detection IPostProcessingEngine postengine = PostProcessingEngine.GetInstance(true); if (!postengine.LoadPostProcessing(_graphBuilder)) { PostProcessingEngine.engine = new PostProcessingEngine.DummyEngine(); } #endregion #region render TsReader output pins Log.Info("TSReaderPlayer: Render TsReader outputs"); if (_isRadio) { IEnumPins enumPins; hr = _fileSource.EnumPins(out enumPins); DsError.ThrowExceptionForHR(hr); IPin[] pins = new IPin[1]; int fetched = 0; while (enumPins.Next(1, pins, out fetched) == 0) { if (fetched != 1) { break; } PinDirection direction; pins[0].QueryDirection(out direction); if (direction == PinDirection.Output) { IEnumMediaTypes enumMediaTypes; pins[0].EnumMediaTypes(out enumMediaTypes); AMMediaType[] mediaTypes = new AMMediaType[20]; int fetchedTypes; enumMediaTypes.Next(20, mediaTypes, out fetchedTypes); for (int i = 0; i < fetchedTypes; ++i) { if (mediaTypes[i].majorType == MediaType.Audio) { hr = _graphBuilder.Render(pins[0]); DsError.ThrowExceptionForHR(hr); break; } } } DirectShowUtil.ReleaseComObject(pins[0]); } DirectShowUtil.ReleaseComObject(enumPins); } else { DirectShowUtil.RenderGraphBuilderOutputPins(_graphBuilder, _fileSource); } DirectShowUtil.RemoveUnusedFiltersFromGraph(_graphBuilder); #endregion _mediaCtrl = (IMediaControl)_graphBuilder; _mediaEvt = (IMediaEventEx)_graphBuilder; _mediaSeeking = (IMediaSeeking)_graphBuilder; if (_mediaSeeking == null) { Log.Error("TSReaderPlayer: Unable to get IMediaSeeking interface"); } _audioStream = (IAudioStream)_fileSource; if (_audioStream == null) { Log.Error("TSReaderPlayer: Unable to get IAudioStream interface"); } _audioSelector = new AudioSelector(_audioStream); if (!_isRadio) { if (enableDVBTtxtSubtitles || enableDVBBitmapSubtitles) { try { SubtitleRenderer.GetInstance().SetPlayer(this); _dvbSubRenderer = SubtitleRenderer.GetInstance(); } catch (Exception e) { Log.Error(e); } } if (enableDVBBitmapSubtitles) { _subtitleStream = (ISubtitleStream)_fileSource; if (_subtitleStream == null) { Log.Error("TSReaderPlayer: Unable to get ISubtitleStream interface"); } } if (enableDVBTtxtSubtitles) { //Log.Debug("TSReaderPlayer: Obtaining TeletextSource"); _teletextSource = (ITeletextSource)_fileSource; if (_teletextSource == null) { Log.Error("TSReaderPlayer: Unable to get ITeletextSource interface"); } Log.Debug("TSReaderPlayer: Creating Teletext Receiver"); TeletextSubtitleDecoder ttxtDecoder = new TeletextSubtitleDecoder(_dvbSubRenderer); _ttxtReceiver = new TeletextReceiver(_teletextSource, ttxtDecoder); // regardless of whether dvb subs are enabled, the following call is okay // if _subtitleStream is null the subtitle will just not setup for bitmap subs _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, ttxtDecoder); } else if (enableDVBBitmapSubtitles) { // if only dvb subs are enabled, pass null for ttxtDecoder _subSelector = new SubtitleSelector(_subtitleStream, _dvbSubRenderer, null); } } if (_audioRendererFilter != null) { //Log.Info("TSReaderPlayer:set reference clock"); IMediaFilter mp = (IMediaFilter)_graphBuilder; IReferenceClock clock = (IReferenceClock)_audioRendererFilter; hr = mp.SetSyncSource(null); hr = mp.SetSyncSource(clock); //Log.Info("TSReaderPlayer:set reference clock:{0:X}", hr); _basicAudio = (IBasicAudio)_graphBuilder; } if (!_isRadio) { IBaseFilter basefilter; _graphBuilder.FindFilterByName("Line 21 Decoder", out basefilter); if (basefilter == null) { _graphBuilder.FindFilterByName("Line21 Decoder", out basefilter); } if (basefilter == null) { _graphBuilder.FindFilterByName("Line 21 Decoder 2", out basefilter); } if (basefilter != null) { Log.Info("TSreaderPlayer: Line21 Decoder (Closed Captions), in use"); //: {0}", showClosedCaptions); _line21Decoder = (IAMLine21Decoder)basefilter; if (_line21Decoder != null) { AMLine21CCState state = AMLine21CCState.Off; hr = _line21Decoder.SetServiceState(state); if (hr == 0) { Log.Info("TSReaderPlayer: Closed Captions state change successful"); } else { Log.Info("TSReaderPlayer: Failed to change Closed Captions state"); } } } if (!_vmr9.IsVMR9Connected) { Log.Error("TSReaderPlayer: Failed vmr9 not connected"); Cleanup(); return(false); } DirectShowUtil.EnableDeInterlace(_graphBuilder); _vmr9.SetDeinterlaceMode(); } using (MPSettings xmlreader = new MPSettings()) { int lastSubIndex = xmlreader.GetValueAsInt("tvservice", "lastsubtitleindex", 0); Log.Debug("TSReaderPlayer: Last subtitle index: {0}", lastSubIndex); CurrentSubtitleStream = lastSubIndex; } return(true); } catch (Exception ex) { Log.Error("TSReaderPlayer: Exception while creating DShow graph {0}", ex.Message); Cleanup(); return(false); } }
private void CreateFilters(Guid audioSubType) { isValid = false; int r; // grabber grabberAudio = new GrabberAudio(this); // objects graphObject = null; grabberObjectAudio = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object r = graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { throw new ApplicationException("Failed creating source filter"); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObjectAudio = Activator.CreateInstance(type); sampleGrabberAudio = (ISampleGrabber)grabberObjectAudio; grabberBaseAudio = (IBaseFilter)grabberObjectAudio; // add grabber filters to graph r = graph.AddFilter(grabberBaseAudio, "grabberAudio"); // set media type AMMediaType mediaType = new AMMediaType { MajorType = MediaType.Audio, SubType = audioSubType, FormatType = FormatType.WaveEx }; r = sampleGrabberAudio.SetMediaType(mediaType); // render pin // TODO: Improve this! We can't always assume that the second pin will always be the audio pin -- we need to find it. IPin sbPin = Tools.GetOutPin(sourceBase, 1); if (sbPin == null) { sbPin = Tools.GetOutPin(sourceBase, 0); } r = graph.Render(sbPin); IPin outPin = Tools.GetOutPin(grabberBaseAudio, 0); AMMediaType mt = new AMMediaType(); r = outPin.ConnectionMediaType(mt); if (!Tools.IsPinConnected(outPin)) { throw new ApplicationException("Failed obtaining media information"); } // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; r = mediaFilter.SetSyncSource(null); } wavFormat = new WaveFormatEx(); Marshal.PtrToStructure(mt.FormatPtr, wavFormat); Marshal.ReleaseComObject(outPin); // configure sample grabber r = sampleGrabberAudio.SetBufferSamples(false); r = sampleGrabberAudio.SetOneShot(false); r = sampleGrabberAudio.SetCallback(grabberAudio, 1); if (useNullRenderer) { // Get a list of all the filters connected to the sample grabber List <Tools.FilterInfo2> filtersInfo2 = new List <Tools.FilterInfo2>(); Tools.FilterInfo2 testFilterInfo2 = Tools.GetNextFilter(grabberBaseAudio, PinDirection.Output, 0); while (true) { filtersInfo2.Add(testFilterInfo2); testFilterInfo2 = Tools.GetNextFilter(testFilterInfo2.Filter, PinDirection.Output, 0); if (testFilterInfo2.Filter == null) { break; } } // Remove the last filter, the audio renderer r = graph.RemoveFilter(filtersInfo2[filtersInfo2.Count - 1].Filter); // create null renderer type = Type.GetTypeFromCLSID(Clsid.NullRenderer); if (type == null) { throw new ApplicationException("Failed creating null renderer"); } nullRendererObjectAudio = Activator.CreateInstance(type); IBaseFilter nullRendererAudio = (IBaseFilter)nullRendererObjectAudio; // add grabber filters to graph r = graph.AddFilter(nullRendererAudio, "nullRenderer"); //outPin = Tools.GetOutPin(filtersInfo2[filtersInfo2.Count - 2].Filter, 0); outPin = Tools.GetOutPin(grabberBaseAudio, 0); IPin inPin = Tools.GetInPin(nullRendererAudio, 0); if (graph.Connect(outPin, inPin) < 0) { throw new ApplicationException("Failed obtaining media audio information"); } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); } // configure video window IVideoWindow window = (IVideoWindow)graphObject; if (window != null) { window.put_AutoShow(false); window = null; } // get media control mediaControl = (IMediaControl)graphObject; // get media seek control mediaSeekControl = (IMediaSeeking)graphObject; mediaSeekControl.SetTimeFormat(TimeFormat.MediaTime); // get media events' interface mediaEvent = (IMediaEventEx)graphObject; // get media audio control basicAudio = (IBasicAudio)graphObject; isValid = true; } catch (Exception exception) { DestroyFilters(); // provide information to clients AudioSourceError?.Invoke(this, new AudioSourceErrorEventArgs(exception.Message)); } }
/// <summary> /// Erzeugt einen Graphen und startet ihn. /// </summary> /// <param name="location">Der Ursprung, über den die Quellgruppe empfangen wird.</param> /// <param name="group">Die gewünschte Quellgruppe.</param> /// <exception cref="ArgumentException">Es wurden nicht alle Parameter gesetzt.</exception> public void Create( GroupLocation location, SourceGroup group ) { // Get rid of it Destroy(); // Create new graph builder var graph = Activator.CreateInstance( Type.GetTypeFromCLSID( BDAEnvironment.GraphBuilderClassIdentifier ) ); try { // Convert interface m_Graph = (IMediaFilter) graph; } catch { // Cleanup BDAEnvironment.Release( ref graph ); // Forward throw; } // See if we should register the graph m_ExternalRegistration = BDASettings.RegisterBDAGRaph( m_Graph, false ); // Attach to alternate interface var builder = (IGraphBuilder) m_Graph; // Check log var logFile = BDASettings.BDALogPath; if (logFile != null) { // Open path m_LogFile = new FileStream( logFile.FullName, FileMode.Create, FileAccess.Write, FileShare.Read ); // Enable logging on graph builder builder.SetLogFile( m_LogFile.SafeFileHandle ); } // Start with network provider NetworkProvider = AddFilter( "Network Provider", BDAEnvironment.GetNetworkProviderMoniker( DVBType ) ); // Initialize provider Tune( location, group ); // Always create the tuner if (TunerInformation != null) TunerFilter = AddFilter( "Tuner", TunerInformation ); else throw new ArgumentException( Properties.Resources.Exception_MissingTuner, "Tuner" ); // Optionally create capture if (CaptureInformation != null) CaptureFilter = AddFilter( "Capture", CaptureInformation ); // Add additional filter foreach (var additionalFilter in AdditionalFilterInformations) if (additionalFilter == null) throw new ArgumentNullException( "AdditionalFilters" ); else AdditionalFilters.Add( AddFilter( additionalFilter.DisplayName, additionalFilter ) ); // Connect network provider to streaming instance Connect( NetworkProvider, CaptureFilter ?? TunerFilter ); // Initialize provider Tune( location, group ); // Create the primary filter and add it AddFilter( "TS", TransportStreamAnalyser = new InputFilter() ); // Connect device output for analysis Connect( CaptureFilter ?? TunerFilter, TransportStreamAnalyser ); // Create the demultiplexer - needed to keep the infrastructure alive using (var demux = AddFilter( "TIF", BDAEnvironment.MicrosoftDemultiplexerMoniker )) { // Connect to the dedicated pin of our analyser TransportStreamAnalyser.DataManager.TIFConnector.Connect( demux, BDAEnvironment.TransportStreamMediaType1 ); // Pins to remove var remove = new List<string>(); // Prepare the demultiplexer pins demux.InspectAllPins( pin => { // See if this is the SI pin bool isSectionPin = false; pin.InspectAllMediaTypes( type => { // Check major if (!type.MajorType.Equals( BDAEnvironment.DataFormatTypeSections )) return true; // Check minor isSectionPin = type.SubType.Equals( BDAEnvironment.DataFormatSubtypeSI ); // Report return !isSectionPin; } ); // Check the mode if (isSectionPin) { // Connect using (var comPin = ComIdentity.Create( pin )) builder.Render( comPin.Interface ); // Load connection data IntPtr tifIn = IntPtr.Zero; if (pin.ConnectedTo( ref tifIn ) < 0) throw new InvalidOperationException( Properties.Resources.Exception_TIF ); // Reconstruct var tifPin = Marshal.GetObjectForIUnknown( tifIn ); try { // Request pin context var info = new PinInfo(); ((IPin) tifPin).QueryPinInfo( ref info ); // Request from pin m_TIF = info.GetAndDisposeFilter(); } finally { // Cleanup BDAEnvironment.Release( ref tifPin ); } } else if (pin.QueryDirection() == PinDirection.Output) { // Prepare to kill remove.Add( pin.QueryId() ); } } ); // Prepare to remove all unconnected pins if (remove.Count > 0) using (var demuxInstance = demux.MarshalToManaged()) { // Change type var mpeg2 = (IMpeg2Demultiplexer) demuxInstance.Object; // Remove all foreach (var id in remove) mpeg2.DeleteOutputPin( id ); } } // Install the PMT watchdog TransportStreamAnalyser.DataManager.TSParser.PMTFound += ProcessPMT; }
/// <summary> /// Erzeugt einen Graphen und startet ihn. /// </summary> /// <param name="location">Der Ursprung, über den die Quellgruppe empfangen wird.</param> /// <param name="group">Die gewünschte Quellgruppe.</param> /// <exception cref="ArgumentException">Es wurden nicht alle Parameter gesetzt.</exception> public void Create(GroupLocation location, SourceGroup group) { // Get rid of it Destroy(); // Create new graph builder var graph = Activator.CreateInstance(Type.GetTypeFromCLSID(BDAEnvironment.GraphBuilderClassIdentifier)); try { // Convert interface m_Graph = (IMediaFilter)graph; } catch { // Cleanup BDAEnvironment.Release(ref graph); // Forward throw; } // See if we should register the graph m_ExternalRegistration = BDASettings.RegisterBDAGRaph(m_Graph, false); // Attach to alternate interface var builder = (IGraphBuilder)m_Graph; // Check log var logFile = BDASettings.BDALogPath; if (logFile != null) { // Open path m_LogFile = new FileStream(logFile.FullName, FileMode.Create, FileAccess.Write, FileShare.Read); // Enable logging on graph builder builder.SetLogFile(m_LogFile.SafeFileHandle); } // Start with network provider NetworkProvider = AddFilter("Network Provider", BDAEnvironment.GetNetworkProviderMoniker(DVBType)); // Initialize provider Tune(location, group); // Always create the tuner if (TunerInformation != null) { TunerFilter = AddFilter("Tuner", TunerInformation); } else { throw new ArgumentException(Properties.Resources.Exception_MissingTuner, "Tuner"); } // Optionally create capture if (CaptureInformation != null) { CaptureFilter = AddFilter("Capture", CaptureInformation); } // Add additional filter foreach (var additionalFilter in AdditionalFilterInformations) { if (additionalFilter == null) { throw new ArgumentNullException("AdditionalFilters"); } else { AdditionalFilters.Add(AddFilter(additionalFilter.DisplayName, additionalFilter)); } } // Connect network provider to streaming instance Connect(NetworkProvider, CaptureFilter ?? TunerFilter); // Initialize provider Tune(location, group); // Create the primary filter and add it AddFilter("TS", TransportStreamAnalyser = new InputFilter()); // Connect device output for analysis Connect(CaptureFilter ?? TunerFilter, TransportStreamAnalyser); // Create the demultiplexer - needed to keep the infrastructure alive using (var demux = AddFilter("TIF", BDAEnvironment.MicrosoftDemultiplexerMoniker)) { // Connect to the dedicated pin of our analyser TransportStreamAnalyser.DataManager.TIFConnector.Connect(demux, BDAEnvironment.TransportStreamMediaType1); // Pins to remove var remove = new List <string>(); // Prepare the demultiplexer pins demux.InspectAllPins(pin => { // See if this is the SI pin bool isSectionPin = false; pin.InspectAllMediaTypes(type => { // Check major if (!type.MajorType.Equals(BDAEnvironment.DataFormatTypeSections)) { return(true); } // Check minor isSectionPin = type.SubType.Equals(BDAEnvironment.DataFormatSubtypeSI); // Report return(!isSectionPin); }); // Check the mode if (isSectionPin) { // Connect using (var comPin = ComIdentity.Create(pin)) builder.Render(comPin.Interface); // Load connection data IntPtr tifIn = IntPtr.Zero; if (pin.ConnectedTo(ref tifIn) < 0) { throw new InvalidOperationException(Properties.Resources.Exception_TIF); } // Reconstruct var tifPin = Marshal.GetObjectForIUnknown(tifIn); try { // Request pin context var info = new PinInfo(); ((IPin)tifPin).QueryPinInfo(ref info); // Request from pin m_TIF = info.GetAndDisposeFilter(); } finally { // Cleanup BDAEnvironment.Release(ref tifPin); } } else if (pin.QueryDirection() == PinDirection.Output) { // Prepare to kill remove.Add(pin.QueryId()); } }); // Prepare to remove all unconnected pins if (remove.Count > 0) { using (var demuxInstance = demux.MarshalToManaged()) { // Change type var mpeg2 = (IMpeg2Demultiplexer)demuxInstance.Object; // Remove all foreach (var id in remove) { mpeg2.DeleteOutputPin(id); } } } } // Install the PMT watchdog TransportStreamAnalyser.DataManager.TSParser.PMTFound += ProcessPMT; }
private void WorkerThread() { ReasonToFinishPlaying reason = ReasonToFinishPlaying.StoppedByUser; Grabber grabber = new Grabber(this); object obj = null; object obj2 = null; IGraphBuilder graphBuilder = null; IBaseFilter filter = null; IBaseFilter baseFilter = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IMediaEventEx mediaEventEx = null; try { Type typeFromCLSID = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating filter graph"); } obj = Activator.CreateInstance(typeFromCLSID); graphBuilder = (IGraphBuilder)obj; graphBuilder.AddSourceFilter(fileName, "source", out filter); if (filter == null) { throw new ApplicationException("Failed creating source filter"); } typeFromCLSID = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (typeFromCLSID == null) { throw new ApplicationException("Failed creating sample grabber"); } obj2 = Activator.CreateInstance(typeFromCLSID); sampleGrabber = (ISampleGrabber)obj2; baseFilter = (IBaseFilter)obj2; graphBuilder.AddFilter(baseFilter, "grabber"); AMMediaType aMMediaType = new AMMediaType(); aMMediaType.MajorType = MediaType.Video; aMMediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(aMMediaType); int num = 0; IPin inPin = Tools.GetInPin(baseFilter, 0); IPin pin = null; while (true) { pin = Tools.GetOutPin(filter, num); if (pin == null) { Marshal.ReleaseComObject(inPin); throw new ApplicationException("Did not find acceptable output video pin in the given source"); } if (graphBuilder.Connect(pin, inPin) >= 0) { break; } Marshal.ReleaseComObject(pin); pin = null; num++; } Marshal.ReleaseComObject(pin); Marshal.ReleaseComObject(inPin); if (sampleGrabber.GetConnectedMediaType(aMMediaType) == 0) { VideoInfoHeader videoInfoHeader = (VideoInfoHeader)Marshal.PtrToStructure(aMMediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = videoInfoHeader.BmiHeader.Width; grabber.Height = videoInfoHeader.BmiHeader.Height; aMMediaType.Dispose(); } if (!preventFreezing) { graphBuilder.Render(Tools.GetOutPin(baseFilter, 0)); IVideoWindow videoWindow = (IVideoWindow)obj; videoWindow.put_AutoShow(autoShow: false); videoWindow = null; } sampleGrabber.SetBufferSamples(bufferThem: false); sampleGrabber.SetOneShot(oneShot: false); sampleGrabber.SetCallback(grabber, 1); if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)obj; mediaFilter.SetSyncSource(null); } mediaControl = (IMediaControl)obj; mediaEventEx = (IMediaEventEx)obj; mediaControl.Run(); do { if (mediaEventEx != null && mediaEventEx.GetEvent(out DsEvCode lEventCode, out IntPtr lParam, out IntPtr lParam2, 0) >= 0) { mediaEventEx.FreeEventParams(lEventCode, lParam, lParam2); if (lEventCode == DsEvCode.Complete) { reason = ReasonToFinishPlaying.EndOfStreamReached; break; } } }while (!stopEvent.WaitOne(100, exitContext: false)); mediaControl.Stop(); } catch (Exception ex) { if (this.VideoSourceError != null) { this.VideoSourceError(this, new VideoSourceErrorEventArgs(ex.Message)); } } finally { graphBuilder = null; baseFilter = null; sampleGrabber = null; mediaControl = null; mediaEventEx = null; if (obj != null) { Marshal.ReleaseComObject(obj); obj = null; } if (filter != null) { Marshal.ReleaseComObject(filter); filter = null; } if (obj2 != null) { Marshal.ReleaseComObject(obj2); obj2 = null; } } if (this.PlayingFinished != null) { this.PlayingFinished(this, reason); } }
/// <summary> /// Worker thread. /// </summary> /// private void WorkerThread( ) { ReasonToFinishPlaying reasonToStop = ReasonToFinishPlaying.StoppedByUser; // grabber Grabber grabber = new Grabber(this); // objects object graphObject = null; object grabberObject = null; // interfaces IGraphBuilder graph = null; IBaseFilter sourceBase = null; IBaseFilter grabberBase = null; ISampleGrabber sampleGrabber = null; IMediaControl mediaControl = null; IMediaEventEx mediaEvent = null; try { // get type for filter graph Type type = Type.GetTypeFromCLSID(Clsid.FilterGraph); if (type == null) { throw new ApplicationException("Failed creating filter graph"); } // create filter graph graphObject = Activator.CreateInstance(type); graph = (IGraphBuilder)graphObject; // create source device's object graph.AddSourceFilter(fileName, "source", out sourceBase); if (sourceBase == null) { throw new ApplicationException("Failed creating source filter"); } // get type for sample grabber type = Type.GetTypeFromCLSID(Clsid.SampleGrabber); if (type == null) { throw new ApplicationException("Failed creating sample grabber"); } // create sample grabber grabberObject = Activator.CreateInstance(type); sampleGrabber = (ISampleGrabber)grabberObject; grabberBase = (IBaseFilter)grabberObject; // add grabber filters to graph graph.AddFilter(grabberBase, "grabber"); // set media type AMMediaType mediaType = new AMMediaType( ); mediaType.MajorType = MediaType.Video; mediaType.SubType = MediaSubType.RGB24; sampleGrabber.SetMediaType(mediaType); // connect pins int pinToTry = 0; IPin inPin = Tools.GetInPin(grabberBase, 0); IPin outPin = null; // find output pin acceptable by sample grabber while (true) { outPin = Tools.GetOutPin(sourceBase, pinToTry); if (outPin == null) { Marshal.ReleaseComObject(inPin); throw new ApplicationException("Did not find acceptable output video pin in the given source"); } if (graph.Connect(outPin, inPin) < 0) { Marshal.ReleaseComObject(outPin); outPin = null; pinToTry++; } else { break; } } Marshal.ReleaseComObject(outPin); Marshal.ReleaseComObject(inPin); // get media type if (sampleGrabber.GetConnectedMediaType(mediaType) == 0) { VideoInfoHeader vih = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.FormatPtr, typeof(VideoInfoHeader)); grabber.Width = vih.BmiHeader.Width; grabber.Height = vih.BmiHeader.Height; mediaType.Dispose( ); } // let's do rendering, if we don't need to prevent freezing if (!preventFreezing) { // render pin graph.Render(Tools.GetOutPin(grabberBase, 0)); // configure video window IVideoWindow window = (IVideoWindow)graphObject; window.put_AutoShow(false); window = null; } // configure sample grabber sampleGrabber.SetBufferSamples(false); sampleGrabber.SetOneShot(false); sampleGrabber.SetCallback(grabber, 1); // disable clock, if someone requested it if (!referenceClockEnabled) { IMediaFilter mediaFilter = (IMediaFilter)graphObject; mediaFilter.SetSyncSource(null); } // get media control mediaControl = (IMediaControl)graphObject; // get media events' interface mediaEvent = (IMediaEventEx)graphObject; IntPtr p1, p2; DsEvCode code; // run mediaControl.Run( ); do { if (mediaEvent != null) { if (mediaEvent.GetEvent(out code, out p1, out p2, 0) >= 0) { mediaEvent.FreeEventParams(code, p1, p2); if (code == DsEvCode.Complete) { reasonToStop = ReasonToFinishPlaying.EndOfStreamReached; break; } } } }while (!stopEvent.WaitOne(100, false)); mediaControl.Stop( ); } catch (Exception exception) { // provide information to clients if (VideoSourceError != null) { VideoSourceError(this, new VideoSourceErrorEventArgs(exception.Message)); } } finally { // release all objects graph = null; grabberBase = null; sampleGrabber = null; mediaControl = null; mediaEvent = null; if (graphObject != null) { Marshal.ReleaseComObject(graphObject); graphObject = null; } if (sourceBase != null) { Marshal.ReleaseComObject(sourceBase); sourceBase = null; } if (grabberObject != null) { Marshal.ReleaseComObject(grabberObject); grabberObject = null; } } if (PlayingFinished != null) { PlayingFinished(this, reasonToStop); } }
public CAviDS(string filename, double playSpeed) { int hr = 0x0; builder = (IGraphBuilder) new FilterGraph(); #region [Sample Grabber] { grabber = new SampleGrabber() as ISampleGrabber; mediaType = new AMMediaType(); mediaType.majorType = MediaType.Video; mediaType.subType = MediaSubType.RGB32; mediaType.formatType = FormatType.VideoInfo; hr = grabber.SetMediaType(mediaType); DsError.ThrowExceptionForHR(hr); hr = builder.AddFilter((IBaseFilter)grabber, "Sample Grabber"); DsError.ThrowExceptionForHR(hr); } #endregion hr = builder.RenderFile(filename, null); DsError.ThrowExceptionForHR(hr); // Null レンダラに接続しないとウィンドウが表示される。 // また、レンダリングを行わないため処理速度を向上できる。 CDirectShow.ConnectNullRendererFromSampleGrabber(builder, grabber as IBaseFilter); CDirectShow.tグラフを解析しデバッグ出力する(builder); IVideoWindow videoWindow = builder as IVideoWindow; if (videoWindow != null) { videoWindow.put_AutoShow(OABool.False); } #region [Video Info] { hr = grabber.GetConnectedMediaType(mediaType); DsError.ThrowExceptionForHR(hr); videoInfo = (VideoInfoHeader)Marshal.PtrToStructure(mediaType.formatPtr, typeof(VideoInfoHeader)); nWidth = videoInfo.BmiHeader.Width; nHeight = videoInfo.BmiHeader.Height; } #endregion #region [ Seeker ] { seeker = builder as IMediaSeeking; hr = seeker.GetDuration(out nMediaLength); DsError.ThrowExceptionForHR(hr); hr = seeker.SetRate(playSpeed / 20); DsError.ThrowExceptionForHR(hr); } #endregion #region [Control] { control = builder as IMediaControl; } #endregion #region [Filter] { filter = builder as IMediaFilter; } #endregion grabber.SetBufferSamples(true); this.Run(); this.Pause(); bPlaying = false; bPause = false; // 外見えには演奏停止している。PAUSE中として外に見せないこと。 }